hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7f9c41a17f867c4ba9ef72f09cde2468f31bc0fc | 39,570 | py | Python | airmozilla/main/migrations/0001_initial.py | RAMilewski/airmozilla | 70d52295294fd319e60b046bf75baf971cd00f98 | [
"BSD-3-Clause"
] | 1 | 2019-08-17T21:22:08.000Z | 2019-08-17T21:22:08.000Z | airmozilla/main/migrations/0001_initial.py | RAMilewski/airmozilla | 70d52295294fd319e60b046bf75baf971cd00f98 | [
"BSD-3-Clause"
] | 4 | 2021-03-19T15:38:56.000Z | 2021-09-08T02:47:16.000Z | airmozilla/main/migrations/0001_initial.py | Acidburn0zzz/airmozilla | 7b03af6d6efe9af00a6070f5327e10fb755c3766 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'UserProfile'
db.create_table('main_userprofile', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('contributor', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal('main', ['UserProfile'])
# Adding model 'Participant'
db.create_table('main_participant', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=50)),
('slug', self.gf('django.db.models.fields.SlugField')(unique=True, max_length=65, blank=True)),
('photo', self.gf('sorl.thumbnail.fields.ImageField')(max_length=100, blank=True)),
('email', self.gf('django.db.models.fields.EmailField')(max_length=75, blank=True)),
('department', self.gf('django.db.models.fields.CharField')(max_length=50, blank=True)),
('team', self.gf('django.db.models.fields.CharField')(max_length=50, blank=True)),
('irc', self.gf('django.db.models.fields.CharField')(max_length=50, blank=True)),
('topic_url', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('blog_url', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('twitter', self.gf('django.db.models.fields.CharField')(max_length=50, blank=True)),
('role', self.gf('django.db.models.fields.CharField')(max_length=25)),
('cleared', self.gf('django.db.models.fields.CharField')(default='no', max_length=15, db_index=True)),
('clear_token', self.gf('django.db.models.fields.CharField')(max_length=36, blank=True)),
('creator', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='participant_creator', null=True, on_delete=models.SET_NULL, to=orm['auth.User'])),
))
db.send_create_signal('main', ['Participant'])
# Adding model 'Category'
db.create_table('main_category', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=50)),
))
db.send_create_signal('main', ['Category'])
# Adding model 'Channel'
db.create_table('main_channel', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=200)),
('slug', self.gf('django.db.models.fields.SlugField')(unique=True, max_length=100)),
('image', self.gf('sorl.thumbnail.fields.ImageField')(max_length=100, blank=True)),
('image_is_banner', self.gf('django.db.models.fields.BooleanField')(default=False)),
('description', self.gf('django.db.models.fields.TextField')()),
('created', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2013, 7, 25, 0, 0))),
))
db.send_create_signal('main', ['Channel'])
# Adding model 'Tag'
db.create_table('main_tag', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=50)),
))
db.send_create_signal('main', ['Tag'])
# Adding model 'Template'
db.create_table('main_template', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=100)),
('content', self.gf('django.db.models.fields.TextField')()),
))
db.send_create_signal('main', ['Template'])
# Adding model 'Location'
db.create_table('main_location', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=300)),
('timezone', self.gf('django.db.models.fields.CharField')(max_length=250)),
))
db.send_create_signal('main', ['Location'])
# Adding model 'Event'
db.create_table('main_event', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('title', self.gf('django.db.models.fields.CharField')(max_length=200)),
('slug', self.gf('django.db.models.fields.SlugField')(unique=True, max_length=215, blank=True)),
('template', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.Template'], null=True, on_delete=models.SET_NULL, blank=True)),
('template_environment', self.gf('airmozilla.main.fields.EnvironmentField')(blank=True)),
('status', self.gf('django.db.models.fields.CharField')(default='initiated', max_length=20, db_index=True)),
('placeholder_img', self.gf('sorl.thumbnail.fields.ImageField')(max_length=100)),
('description', self.gf('django.db.models.fields.TextField')()),
('short_description', self.gf('django.db.models.fields.TextField')(blank=True)),
('start_time', self.gf('django.db.models.fields.DateTimeField')(db_index=True)),
('archive_time', self.gf('django.db.models.fields.DateTimeField')(db_index=True, null=True, blank=True)),
('location', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.Location'], null=True, on_delete=models.SET_NULL, blank=True)),
('category', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.Category'], null=True, on_delete=models.SET_NULL, blank=True)),
('call_info', self.gf('django.db.models.fields.TextField')(blank=True)),
('additional_links', self.gf('django.db.models.fields.TextField')(blank=True)),
('remote_presenters', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('privacy', self.gf('django.db.models.fields.CharField')(default='public', max_length=40, db_index=True)),
('featured', self.gf('django.db.models.fields.BooleanField')(default=False, db_index=True)),
('creator', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='creator', null=True, on_delete=models.SET_NULL, to=orm['auth.User'])),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('modified_user', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='modified_user', null=True, on_delete=models.SET_NULL, to=orm['auth.User'])),
('modified', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
))
db.send_create_signal('main', ['Event'])
# Adding M2M table for field participants on 'Event'
db.create_table('main_event_participants', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('event', models.ForeignKey(orm['main.event'], null=False)),
('participant', models.ForeignKey(orm['main.participant'], null=False))
))
db.create_unique('main_event_participants', ['event_id', 'participant_id'])
# Adding M2M table for field tags on 'Event'
db.create_table('main_event_tags', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('event', models.ForeignKey(orm['main.event'], null=False)),
('tag', models.ForeignKey(orm['main.tag'], null=False))
))
db.create_unique('main_event_tags', ['event_id', 'tag_id'])
# Adding M2M table for field channels on 'Event'
db.create_table('main_event_channels', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('event', models.ForeignKey(orm['main.event'], null=False)),
('channel', models.ForeignKey(orm['main.channel'], null=False))
))
db.create_unique('main_event_channels', ['event_id', 'channel_id'])
# Adding model 'SuggestedEvent'
db.create_table('main_suggestedevent', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('title', self.gf('django.db.models.fields.CharField')(max_length=200)),
('slug', self.gf('django.db.models.fields.SlugField')(unique=True, max_length=215, blank=True)),
('placeholder_img', self.gf('sorl.thumbnail.fields.ImageField')(max_length=100)),
('description', self.gf('django.db.models.fields.TextField')()),
('short_description', self.gf('django.db.models.fields.TextField')(blank=True)),
('start_time', self.gf('django.db.models.fields.DateTimeField')(db_index=True, null=True, blank=True)),
('location', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.Location'], null=True, on_delete=models.SET_NULL, blank=True)),
('category', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.Category'], null=True, on_delete=models.SET_NULL, blank=True)),
('call_info', self.gf('django.db.models.fields.TextField')(blank=True)),
('additional_links', self.gf('django.db.models.fields.TextField')(blank=True)),
('remote_presenters', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('privacy', self.gf('django.db.models.fields.CharField')(default='public', max_length=40)),
('featured', self.gf('django.db.models.fields.BooleanField')(default=False)),
('created', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2013, 7, 25, 0, 0))),
('modified', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('submitted', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('accepted', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.Event'], null=True, blank=True)),
('review_comments', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
))
db.send_create_signal('main', ['SuggestedEvent'])
# Adding M2M table for field tags on 'SuggestedEvent'
db.create_table('main_suggestedevent_tags', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('suggestedevent', models.ForeignKey(orm['main.suggestedevent'], null=False)),
('tag', models.ForeignKey(orm['main.tag'], null=False))
))
db.create_unique('main_suggestedevent_tags', ['suggestedevent_id', 'tag_id'])
# Adding M2M table for field channels on 'SuggestedEvent'
db.create_table('main_suggestedevent_channels', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('suggestedevent', models.ForeignKey(orm['main.suggestedevent'], null=False)),
('channel', models.ForeignKey(orm['main.channel'], null=False))
))
db.create_unique('main_suggestedevent_channels', ['suggestedevent_id', 'channel_id'])
# Adding M2M table for field participants on 'SuggestedEvent'
db.create_table('main_suggestedevent_participants', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('suggestedevent', models.ForeignKey(orm['main.suggestedevent'], null=False)),
('participant', models.ForeignKey(orm['main.participant'], null=False))
))
db.create_unique('main_suggestedevent_participants', ['suggestedevent_id', 'participant_id'])
# Adding model 'SuggestedEventComment'
db.create_table('main_suggestedeventcomment', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('suggested_event', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.SuggestedEvent'])),
('comment', self.gf('django.db.models.fields.TextField')()),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True, on_delete=models.SET_NULL, blank=True)),
('created', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2013, 7, 25, 0, 0))),
))
db.send_create_signal('main', ['SuggestedEventComment'])
# Adding model 'EventOldSlug'
db.create_table('main_eventoldslug', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('event', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.Event'])),
('slug', self.gf('django.db.models.fields.SlugField')(unique=True, max_length=215)),
))
db.send_create_signal('main', ['EventOldSlug'])
# Adding model 'EventTweet'
db.create_table('main_eventtweet', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('event', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.Event'])),
('text', self.gf('django.db.models.fields.CharField')(max_length=140)),
('include_placeholder', self.gf('django.db.models.fields.BooleanField')(default=False)),
('creator', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True, on_delete=models.SET_NULL, blank=True)),
('send_date', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2013, 7, 25, 0, 0))),
('sent_date', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('error', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('tweet_id', self.gf('django.db.models.fields.CharField')(max_length=20, null=True, blank=True)),
))
db.send_create_signal('main', ['EventTweet'])
# Adding model 'Approval'
db.create_table('main_approval', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('event', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.Event'])),
('group', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.Group'], null=True, on_delete=models.SET_NULL, blank=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True, on_delete=models.SET_NULL, blank=True)),
('approved', self.gf('django.db.models.fields.BooleanField')(default=False, db_index=True)),
('processed', self.gf('django.db.models.fields.BooleanField')(default=False, db_index=True)),
('processed_time', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('comment', self.gf('django.db.models.fields.TextField')(blank=True)),
))
db.send_create_signal('main', ['Approval'])
# Adding model 'VidlySubmission'
db.create_table('main_vidlysubmission', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('event', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.Event'])),
('url', self.gf('django.db.models.fields.URLField')(max_length=200)),
('submission_time', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2013, 7, 25, 0, 0))),
('tag', self.gf('django.db.models.fields.CharField')(max_length=100, null=True, blank=True)),
('email', self.gf('django.db.models.fields.EmailField')(max_length=75, null=True, blank=True)),
('token_protection', self.gf('django.db.models.fields.BooleanField')(default=False)),
('hd', self.gf('django.db.models.fields.BooleanField')(default=False)),
('submission_error', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
))
db.send_create_signal('main', ['VidlySubmission'])
# Adding model 'URLMatch'
db.create_table('main_urlmatch', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=200)),
('string', self.gf('django.db.models.fields.CharField')(max_length=200)),
('use_count', self.gf('django.db.models.fields.IntegerField')(default=0)),
('modified', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
))
db.send_create_signal('main', ['URLMatch'])
# Adding model 'URLTransform'
db.create_table('main_urltransform', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('match', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.URLMatch'])),
('find', self.gf('django.db.models.fields.CharField')(max_length=200)),
('replace_with', self.gf('django.db.models.fields.CharField')(max_length=200)),
('order', self.gf('django.db.models.fields.IntegerField')(default=1)),
))
db.send_create_signal('main', ['URLTransform'])
# Adding model 'EventHitStats'
db.create_table('main_eventhitstats', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('event', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.Event'], unique=True)),
('total_hits', self.gf('django.db.models.fields.IntegerField')()),
('shortcode', self.gf('django.db.models.fields.CharField')(max_length=100)),
('modified', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2013, 7, 25, 0, 0))),
))
db.send_create_signal('main', ['EventHitStats'])
def backwards(self, orm):
# Deleting model 'UserProfile'
db.delete_table('main_userprofile')
# Deleting model 'Participant'
db.delete_table('main_participant')
# Deleting model 'Category'
db.delete_table('main_category')
# Deleting model 'Channel'
db.delete_table('main_channel')
# Deleting model 'Tag'
db.delete_table('main_tag')
# Deleting model 'Template'
db.delete_table('main_template')
# Deleting model 'Location'
db.delete_table('main_location')
# Deleting model 'Event'
db.delete_table('main_event')
# Removing M2M table for field participants on 'Event'
db.delete_table('main_event_participants')
# Removing M2M table for field tags on 'Event'
db.delete_table('main_event_tags')
# Removing M2M table for field channels on 'Event'
db.delete_table('main_event_channels')
# Deleting model 'SuggestedEvent'
db.delete_table('main_suggestedevent')
# Removing M2M table for field tags on 'SuggestedEvent'
db.delete_table('main_suggestedevent_tags')
# Removing M2M table for field channels on 'SuggestedEvent'
db.delete_table('main_suggestedevent_channels')
# Removing M2M table for field participants on 'SuggestedEvent'
db.delete_table('main_suggestedevent_participants')
# Deleting model 'SuggestedEventComment'
db.delete_table('main_suggestedeventcomment')
# Deleting model 'EventOldSlug'
db.delete_table('main_eventoldslug')
# Deleting model 'EventTweet'
db.delete_table('main_eventtweet')
# Deleting model 'Approval'
db.delete_table('main_approval')
# Deleting model 'VidlySubmission'
db.delete_table('main_vidlysubmission')
# Deleting model 'URLMatch'
db.delete_table('main_urlmatch')
# Deleting model 'URLTransform'
db.delete_table('main_urltransform')
# Deleting model 'EventHitStats'
db.delete_table('main_eventhitstats')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'main.approval': {
'Meta': {'object_name': 'Approval'},
'approved': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Event']"}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Group']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'processed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'processed_time': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'})
},
'main.category': {
'Meta': {'ordering': "['name']", 'object_name': 'Category'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'main.channel': {
'Meta': {'ordering': "['name']", 'object_name': 'Channel'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 7, 25, 0, 0)'}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'image_is_banner': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
'main.event': {
'Meta': {'object_name': 'Event'},
'additional_links': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'archive_time': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'call_info': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Category']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'channels': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.Channel']", 'symmetrical': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'creator'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Location']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'modified_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'modified_user'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'participants': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.Participant']", 'symmetrical': 'False'}),
'placeholder_img': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100'}),
'privacy': ('django.db.models.fields.CharField', [], {'default': "'public'", 'max_length': '40', 'db_index': 'True'}),
'remote_presenters': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'short_description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '215', 'blank': 'True'}),
'start_time': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'initiated'", 'max_length': '20', 'db_index': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.Tag']", 'symmetrical': 'False', 'blank': 'True'}),
'template': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Template']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'template_environment': ('airmozilla.main.fields.EnvironmentField', [], {'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'main.eventhitstats': {
'Meta': {'object_name': 'EventHitStats'},
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Event']", 'unique': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 7, 25, 0, 0)'}),
'shortcode': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'total_hits': ('django.db.models.fields.IntegerField', [], {})
},
'main.eventoldslug': {
'Meta': {'object_name': 'EventOldSlug'},
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Event']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '215'})
},
'main.eventtweet': {
'Meta': {'object_name': 'EventTweet'},
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'error': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Event']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'include_placeholder': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'send_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 7, 25, 0, 0)'}),
'sent_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '140'}),
'tweet_id': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'})
},
'main.location': {
'Meta': {'object_name': 'Location'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'timezone': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
'main.participant': {
'Meta': {'object_name': 'Participant'},
'blog_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'clear_token': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'cleared': ('django.db.models.fields.CharField', [], {'default': "'no'", 'max_length': '15', 'db_index': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'participant_creator'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'department': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'irc': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'photo': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '65', 'blank': 'True'}),
'team': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'topic_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'twitter': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'})
},
'main.suggestedevent': {
'Meta': {'object_name': 'SuggestedEvent'},
'accepted': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Event']", 'null': 'True', 'blank': 'True'}),
'additional_links': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'call_info': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Category']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'channels': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.Channel']", 'symmetrical': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 7, 25, 0, 0)'}),
'description': ('django.db.models.fields.TextField', [], {}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Location']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'participants': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.Participant']", 'symmetrical': 'False'}),
'placeholder_img': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100'}),
'privacy': ('django.db.models.fields.CharField', [], {'default': "'public'", 'max_length': '40'}),
'remote_presenters': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'review_comments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'short_description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '215', 'blank': 'True'}),
'start_time': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'submitted': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.Tag']", 'symmetrical': 'False', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'main.suggestedeventcomment': {
'Meta': {'object_name': 'SuggestedEventComment'},
'comment': ('django.db.models.fields.TextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 7, 25, 0, 0)'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'suggested_event': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.SuggestedEvent']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'})
},
'main.tag': {
'Meta': {'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'main.template': {
'Meta': {'object_name': 'Template'},
'content': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'main.urlmatch': {
'Meta': {'object_name': 'URLMatch'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'string': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'use_count': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'main.urltransform': {
'Meta': {'object_name': 'URLTransform'},
'find': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'match': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.URLMatch']"}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'replace_with': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'main.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'contributor': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'main.vidlysubmission': {
'Meta': {'object_name': 'VidlySubmission'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Event']"}),
'hd': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'submission_error': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'submission_time': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 7, 25, 0, 0)'}),
'tag': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'token_protection': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
}
}
complete_apps = ['main'] | 70.159574 | 208 | 0.597119 | 4,398 | 39,570 | 5.254889 | 0.048204 | 0.0945 | 0.16477 | 0.235386 | 0.846523 | 0.836485 | 0.820172 | 0.781965 | 0.739648 | 0.685907 | 0 | 0.011338 | 0.186429 | 39,570 | 564 | 209 | 70.159574 | 0.706551 | 0.039853 | 0 | 0.35914 | 0 | 0 | 0.494598 | 0.288304 | 0 | 0 | 0 | 0 | 0 | 1 | 0.004301 | false | 0.002151 | 0.008602 | 0 | 0.019355 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
100a391e0e6ad13d17161cfd84dcec9db5952dcc | 185 | py | Python | pybrain/supervised/trainers/__init__.py | sveilleux1/pybrain | 1e1de73142c290edb84e29ca7850835f3e7bca8b | [
"BSD-3-Clause"
] | 2,208 | 2015-01-02T02:14:41.000Z | 2022-03-31T04:45:46.000Z | pybrain/supervised/trainers/__init__.py | sveilleux1/pybrain | 1e1de73142c290edb84e29ca7850835f3e7bca8b | [
"BSD-3-Clause"
] | 91 | 2015-01-08T16:42:16.000Z | 2021-12-11T19:16:35.000Z | pybrain/supervised/trainers/__init__.py | sveilleux1/pybrain | 1e1de73142c290edb84e29ca7850835f3e7bca8b | [
"BSD-3-Clause"
] | 786 | 2015-01-02T15:18:20.000Z | 2022-02-23T23:42:40.000Z | from pybrain.supervised.trainers.trainer import Trainer
from pybrain.supervised.trainers.backprop import BackpropTrainer
from pybrain.supervised.trainers.rprop import RPropMinusTrainer
| 46.25 | 64 | 0.886486 | 21 | 185 | 7.809524 | 0.47619 | 0.20122 | 0.384146 | 0.530488 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.064865 | 185 | 3 | 65 | 61.666667 | 0.947977 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
120011357fa2a02b8b182bb1c526edacc74bb773 | 17,059 | py | Python | nyoka/tests/test_lgbm_to_pmml_UnitTest.py | maxibor/nyoka | 19f480eee608035aa5fba368c96d4143bc2f5710 | [
"Apache-2.0"
] | 1 | 2021-04-07T07:55:31.000Z | 2021-04-07T07:55:31.000Z | nyoka/tests/test_lgbm_to_pmml_UnitTest.py | maxibor/nyoka | 19f480eee608035aa5fba368c96d4143bc2f5710 | [
"Apache-2.0"
] | null | null | null | nyoka/tests/test_lgbm_to_pmml_UnitTest.py | maxibor/nyoka | 19f480eee608035aa5fba368c96d4143bc2f5710 | [
"Apache-2.0"
] | null | null | null | import sys, os
import unittest
import pandas as pd
from sklearn import datasets
from sklearn.pipeline import Pipeline
from sklearn_pandas import DataFrameMapper
from sklearn.preprocessing import StandardScaler
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.model_selection import train_test_split
from lightgbm import LGBMRegressor, LGBMClassifier
from nyoka import lgb_to_pmml
from nyoka import PMML44 as pml
import json
class TestMethods(unittest.TestCase):
def test_lgbm_01(self):
iris = datasets.load_iris()
irisd = pd.DataFrame(iris.data, columns=iris.feature_names)
irisd['Species'] = iris.target
features = irisd.columns.drop('Species').to_numpy()
target = 'Species'
f_name = "lgbmc_pmml.pmml"
model = LGBMClassifier()
pipeline_obj = Pipeline([
('lgbmc', model)
])
pipeline_obj.fit(irisd[features], irisd[target])
lgb_to_pmml(pipeline_obj, features, target, f_name, model_name="MyLGBM",
description="A Model for test")
pmml_obj = pml.parse(f_name, True)
pmml_value_list = []
model_value_list = []
pmml_score_list = []
model_score_list = []
list_seg_score1 = []
list_seg_score2 = []
list_seg_score3 = []
list_seg_val1 = []
list_seg_val2 = []
list_seg_val3 = []
seg_tab = pmml_obj.MiningModel[0].Segmentation.Segment
for seg in seg_tab:
if int(seg.id) <= 3:
for segment in seg.MiningModel.Segmentation.Segment:
node_tab = segment.TreeModel.Node.Node
if not node_tab:
pmml_score_list.append(segment.TreeModel.Node.score)
else:
for node in node_tab:
varlen = node.get_Node().__len__()
if varlen > 0:
pmml_value_list.append(node.SimplePredicate.value)
self.extractValues(node, pmml_value_list, pmml_score_list)
else:
pmml_value_list.append(node.SimplePredicate.value)
pmml_score_list.append(node.score)
main_key_value = []
lgb_dump = model.booster_.dump_model()
for i in range(len(lgb_dump['tree_info'])):
tree = lgb_dump['tree_info'][i]['tree_structure']
main_key_value.append(tree)
n = 1
for i in range(len(main_key_value)):
list_score_temp = []
list_val_temp = []
node_list = main_key_value[i]
if (n == 1):
n = 2
self.create_node(node_list, list_score_temp, list_val_temp)
list_seg_score1 = list_seg_score1 + list_score_temp
list_seg_val1 = list_seg_val1 + list_val_temp
list_val_temp.clear()
list_score_temp.clear()
elif (n == 2):
n = 3
self.create_node(node_list, list_score_temp, list_val_temp)
list_seg_score2 = list_seg_score2 + list_score_temp
list_seg_val2 = list_seg_val2 + list_val_temp
list_val_temp.clear()
list_score_temp.clear()
elif (n == 3):
n = 1
self.create_node(node_list, list_score_temp, list_val_temp)
list_seg_score3 = list_seg_score3 + list_score_temp
list_seg_val3 = list_seg_val3 + list_val_temp
list_val_temp.clear()
list_score_temp.clear()
model_score_list = list_seg_score1 + list_seg_score2 + list_seg_score3
model_value_list = list_seg_val1 + list_seg_val2 + list_seg_val3
##1
for model_val, pmml_val in zip(model_score_list, pmml_score_list):
self.assertEqual(model_val, float(pmml_val))
##2
for model_val, pmml_val in zip(model_value_list, pmml_value_list):
self.assertEqual(model_val, pmml_val)
##3
self.assertEqual(os.path.isfile(f_name), True)
def test_lgbm_02(self):
auto = pd.read_csv('nyoka/tests/auto-mpg.csv')
feature_names = [name for name in auto.columns if name not in ('mpg', 'car name')]
target_name = 'mpg'
f_name = "lgbmr_pmml.pmml"
model = LGBMRegressor()
pipeline_obj = Pipeline([
('lgbmr', model)
])
pipeline_obj.fit(auto[feature_names], auto[target_name])
lgb_to_pmml(pipeline_obj, feature_names, target_name, f_name)
pmml_obj = pml.parse(f_name, True)
pmml_value_list = []
model_value_list = []
pmml_score_list = []
model_score_list = []
seg_tab = pmml_obj.MiningModel[0].Segmentation.Segment
for seg in seg_tab:
for node in seg.TreeModel.Node.Node:
varlen = node.get_Node().__len__()
if varlen > 0:
pmml_value_list.append(node.SimplePredicate.value)
self.extractValues(node, pmml_value_list, pmml_score_list)
else:
pmml_value_list.append(node.SimplePredicate.value)
pmml_score_list.append(node.score)
main_key_value = []
lgb_dump = model.booster_.dump_model()
for i in range(len(lgb_dump['tree_info'])):
tree = lgb_dump['tree_info'][i]['tree_structure']
main_key_value.append(tree)
for i in range(len(main_key_value)):
list_score_temp = []
list_val_temp = []
node_list = main_key_value[i]
self.create_node(node_list, list_score_temp, list_val_temp)
model_score_list = model_score_list + list_score_temp
model_value_list = model_value_list + list_val_temp
list_val_temp.clear()
list_score_temp.clear()
##1
for model_val, pmml_val in zip(model_score_list, pmml_score_list):
self.assertEqual(model_val, float(pmml_val))
##2
for model_val, pmml_val in zip(model_value_list, pmml_value_list):
self.assertEqual(model_val, pmml_val)
##3
self.assertEqual(os.path.isfile(f_name), True)
def test_lgbm_03(self):
iris = datasets.load_iris()
irisd = pd.DataFrame(iris.data, columns=iris.feature_names)
irisd['Species'] = iris.target
features = irisd.columns.drop('Species')
target = 'Species'
f_name = "lgbmc_pmml_preprocess.pmml"
model = LGBMClassifier(n_estimators=5)
pipeline_obj = Pipeline([
('scaling', StandardScaler()),
('LGBMC', model)
])
pipeline_obj.fit(irisd[features], irisd[target])
lgb_to_pmml(pipeline_obj, features, target, f_name)
pmml_obj = pml.parse(f_name, True)
pmml_value_list = []
model_value_list = []
pmml_score_list = []
model_score_list = []
list_seg_score1 = []
list_seg_score2 = []
list_seg_score3 = []
list_seg_val1 = []
list_seg_val2 = []
list_seg_val3 = []
seg_tab = pmml_obj.MiningModel[0].Segmentation.Segment
for seg in seg_tab:
if int(seg.id) <= 3:
for segment in seg.MiningModel.Segmentation.Segment:
node_tab = segment.TreeModel.Node.Node
if not node_tab:
pmml_score_list.append(segment.TreeModel.Node.score)
else:
for node in node_tab:
varlen = node.get_Node().__len__()
if varlen > 0:
pmml_value_list.append(node.SimplePredicate.value)
self.extractValues(node, pmml_value_list, pmml_score_list)
else:
pmml_value_list.append(node.SimplePredicate.value)
pmml_score_list.append(node.score)
main_key_value = []
lgb_dump = model.booster_.dump_model()
for i in range(len(lgb_dump['tree_info'])):
tree = lgb_dump['tree_info'][i]['tree_structure']
main_key_value.append(tree)
n = 1
for i in range(len(main_key_value)):
list_score_temp = []
list_val_temp = []
node_list = main_key_value[i]
if (n == 1):
n = 2
self.create_node(node_list, list_score_temp, list_val_temp)
list_seg_score1 = list_seg_score1 + list_score_temp
list_seg_val1 = list_seg_val1 + list_val_temp
list_val_temp.clear()
list_score_temp.clear()
elif (n == 2):
n = 3
self.create_node(node_list, list_score_temp, list_val_temp)
list_seg_score2 = list_seg_score2 + list_score_temp
list_seg_val2 = list_seg_val2 + list_val_temp
list_val_temp.clear()
list_score_temp.clear()
elif (n == 3):
n = 1
self.create_node(node_list, list_score_temp, list_val_temp)
list_seg_score3 = list_seg_score3 + list_score_temp
list_seg_val3 = list_seg_val3 + list_val_temp
list_val_temp.clear()
list_score_temp.clear()
model_score_list = list_seg_score1 + list_seg_score2 + list_seg_score3
model_value_list = list_seg_val1 + list_seg_val2 + list_seg_val3
##1
for model_val, pmml_val in zip(model_score_list, pmml_score_list):
self.assertEqual(model_val, float(pmml_val))
##2
for model_val, pmml_val in zip(model_value_list, pmml_value_list):
self.assertEqual(model_val, pmml_val)
##3
self.assertEqual(os.path.isfile(f_name), True)
def test_lgbm_04(self):
auto = pd.read_csv('nyoka/tests/auto-mpg.csv')
X = auto.drop(['mpg'], axis=1)
y = auto['mpg']
feature_names = [name for name in auto.columns if name not in ('mpg')]
target_name = 'mpg'
x_train, x_test, y_train, y_test = train_test_split(X, y, test_size=0.33, random_state=101)
f_name = "lgbmr_pmml_preprocess2.pmml"
model = LGBMRegressor()
pipeline_obj = Pipeline([
('mapper', DataFrameMapper([
('car name', CountVectorizer()),
(['displacement'], [StandardScaler()])
])),
('lgbmr', model)
])
pipeline_obj.fit(x_train, y_train)
lgb_to_pmml(pipeline_obj, feature_names, target_name, f_name)
pmml_obj = pml.parse(f_name, True)
pmml_value_list = []
model_value_list = []
pmml_score_list = []
model_score_list = []
seg_tab = pmml_obj.MiningModel[0].Segmentation.Segment
for seg in seg_tab:
for node in seg.TreeModel.Node.Node:
varlen = node.get_Node().__len__()
if varlen > 0:
pmml_value_list.append(node.SimplePredicate.value)
self.extractValues(node, pmml_value_list, pmml_score_list)
else:
pmml_value_list.append(node.SimplePredicate.value)
pmml_score_list.append(node.score)
main_key_value = []
lgb_dump = model.booster_.dump_model()
for i in range(len(lgb_dump['tree_info'])):
tree = lgb_dump['tree_info'][i]['tree_structure']
main_key_value.append(tree)
for i in range(len(main_key_value)):
list_score_temp = []
list_val_temp = []
node_list = main_key_value[i]
self.create_node(node_list, list_score_temp, list_val_temp)
model_score_list = model_score_list + list_score_temp
model_value_list = model_value_list + list_val_temp
list_val_temp.clear()
list_score_temp.clear()
##1
for model_val, pmml_val in zip(model_score_list, pmml_score_list):
self.assertEqual(model_val, float(pmml_val))
##2
for model_val, pmml_val in zip(model_value_list, pmml_value_list):
self.assertEqual(model_val, pmml_val)
##3
self.assertEqual(os.path.isfile(f_name), True)
def test_lgbm_05(self):
iris = datasets.load_iris()
irisd = pd.DataFrame(iris.data, columns=iris.feature_names)
irisd['target'] = [i % 2 for i in range(iris.data.shape[0])]
features = irisd.columns.drop('target')
target = 'target'
f_name = "lgbc_bin_pmml.pmml"
model = LGBMClassifier()
pipeline_obj = Pipeline([
('lgbmc', model)
])
pipeline_obj.fit(irisd[features], irisd[target])
lgb_to_pmml(pipeline_obj, features, target, f_name)
# self.assertEqual(os.path.isfile("lgbc_bin_pmml.pmml"), True)
pmml_obj = pml.parse(f_name, True)
pmml_value_list = []
model_value_list = []
pmml_score_list = []
model_score_list = []
seg_tab = pmml_obj.MiningModel[0].Segmentation.Segment
for seg in seg_tab:
if int(seg.id) == 1:
for segment in seg.MiningModel.Segmentation.Segment:
node_tab = segment.TreeModel.Node.Node
if not node_tab:
pmml_score_list.append(segment.TreeModel.Node.score)
else:
for node in node_tab:
varlen = node.get_Node().__len__()
if varlen > 0:
pmml_value_list.append(node.SimplePredicate.value)
self.extractValues(node, pmml_value_list, pmml_score_list)
else:
pmml_value_list.append(node.SimplePredicate.value)
pmml_score_list.append(node.score)
main_key_value = []
lgb_dump = model.booster_.dump_model()
for i in range(len(lgb_dump['tree_info'])):
tree = lgb_dump['tree_info'][i]['tree_structure']
main_key_value.append(tree)
for i in range(len(main_key_value)):
list_score_temp = []
list_val_temp = []
node_list = main_key_value[i]
self.create_node(node_list, list_score_temp, list_val_temp)
model_score_list = model_score_list + list_score_temp
model_value_list = model_value_list + list_val_temp
list_val_temp.clear()
list_score_temp.clear()
##1
for model_val, pmml_val in zip(model_score_list, pmml_score_list):
self.assertEqual(model_val, float(pmml_val))
##2
for model_val, pmml_val in zip(model_value_list, pmml_value_list):
self.assertEqual(model_val, pmml_val)
##3
self.assertEqual(os.path.isfile(f_name), True)
def test_lgbm_06(self):
iris = datasets.load_iris()
irisd = pd.DataFrame(iris.data, columns=iris.feature_names)
irisd['target'] = [i % 2 for i in range(iris.data.shape[0])]
features = irisd.columns.drop('target')
target = 'target'
model = LGBMClassifier()
model.fit(irisd[features], irisd[target])
with self.assertRaises(TypeError):
lgb_to_pmml(model, features, target, "lgbc_bin_pmml.pmml")
def extractValues(self, node, pmml_value_list, pmml_score_list):
for nsample in (node.Node):
varlen = nsample.get_Node().__len__()
if varlen > 0:
pmml_value_list.append(nsample.SimplePredicate.value)
self.extractValues(nsample, pmml_value_list, pmml_score_list)
else:
pmml_value_list.append(nsample.SimplePredicate.value)
pmml_score_list.append(nsample.score)
def create_node(self, obj, list_score_temp, list_val_temp):
if 'leaf_index' in obj:
list_score_temp.append(obj['leaf_value'])
else:
self.create_left_node(obj, list_score_temp, list_val_temp)
self.create_right_node(obj, list_score_temp, list_val_temp)
def create_left_node(self, obj, list_score_temp, list_val_temp):
value = "{:.16f}".format(obj['threshold'])
list_val_temp.append(value)
self.create_node(obj['left_child'], list_score_temp, list_val_temp)
def create_right_node(self, obj, list_score_temp, list_val_temp):
value = "{:.16f}".format(obj['threshold'])
list_val_temp.append(value)
self.create_node(obj['right_child'], list_score_temp, list_val_temp)
if __name__ == '__main__':
unittest.main(warnings='ignore')
| 36.607296 | 99 | 0.584735 | 2,094 | 17,059 | 4.393028 | 0.075931 | 0.050875 | 0.049027 | 0.048918 | 0.869877 | 0.858789 | 0.842374 | 0.829112 | 0.817806 | 0.814002 | 0 | 0.010578 | 0.323935 | 17,059 | 465 | 100 | 36.686022 | 0.787046 | 0.004455 | 0 | 0.823529 | 0 | 0 | 0.035291 | 0.00596 | 0 | 0 | 0 | 0 | 0.044818 | 1 | 0.028011 | false | 0 | 0.036415 | 0 | 0.067227 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
122693cde09c6ef524eac88baade373e53179423 | 8,546 | py | Python | tests/conftest.py | supaiku0/python-crypto | 112bfe2f7f581d317d6be65c0c38dad5c9689f5c | [
"MIT"
] | null | null | null | tests/conftest.py | supaiku0/python-crypto | 112bfe2f7f581d317d6be65c0c38dad5c9689f5c | [
"MIT"
] | null | null | null | tests/conftest.py | supaiku0/python-crypto | 112bfe2f7f581d317d6be65c0c38dad5c9689f5c | [
"MIT"
] | 1 | 2019-11-26T15:37:56.000Z | 2019-11-26T15:37:56.000Z | import pytest
@pytest.fixture
def transaction_type_0():
"""Transaction of type "transfer"
"""
data = {
'version': 1,
'network': 30,
'type': 0,
'timestamp': 0,
'senderPublicKey': '03cb7bca143376721d0e9e3f3ccb0dc2e7e8470c06e630c3cef73f03e309b558ad',
'fee': 0,
'amount': 12500000000000000,
'expiration': 0,
'recipientId': 'DGihocTkwDygiFvmg6aG8jThYTic47GzU9',
'signature': '3044022016ecdf3039e69514c7d75861b22fc076496b61c07a1fcf793dc4f5c76fa0532b0220579c4c0c9d13720f9db5d9df29ed8ceab0adc266c6c160d612d4894dc5867eb1', # noqa
'id': 'e40ce11cab82736da1cc91191716f3c1f446ca7b6a9f4f93b7120ef105ba06e8',
'serialized': 'ff011e000000000003cb7bca143376721d0e9e3f3ccb0dc2e7e8470c06e630c3cef73f03e309b558ad0000000000000000000040b10baf682c00000000001e7f048c40fd8a0442ffe79e0aa804f27fd5db15943044022016ecdf3039e69514c7d75861b22fc076496b61c07a1fcf793dc4f5c76fa0532b0220579c4c0c9d13720f9db5d9df29ed8ceab0adc266c6c160d612d4894dc5867eb1' # noqa
}
return data
@pytest.fixture
def transaction_type_1():
"""Transaction of type "second signature registration"
"""
data = {
'version': 1,
'network': 30,
'type': 1,
'timestamp': 4895203,
'senderPublicKey': '03a02b9d5fdd1307c2ee4652ba54d492d1fd11a7d1bb3f3a44c4a05e79f19de933',
'fee': 500000000,
'asset': {
'signature': {
'publicKey': '0292d580f200d041861d78b3de5ff31c6665b7a092ac3890d9132593beb9aa8513'
}
},
'signature': '3045022100e4fe1f3fb2845ad5f6ab377f247ffb797661d7516626bdc1d2f0f73eca582b4d02200ada103bdbff439d57c7aaa266f30ce74ff4385f0c77a486070033061b71650c', # noqa
'amount': 0,
'recipientId': 'D7seWn8JLVwX4nHd9hh2Lf7gvZNiRJ7qLk',
'id': '62c36be3e5176771a476d813f64082a8f4e3861c0356438bdf1cc91eebcc9b0d',
'serialized': 'ff011e01e3b14a0003a02b9d5fdd1307c2ee4652ba54d492d1fd11a7d1bb3f3a44c4a05e79f19de9330065cd1d00000000000292d580f200d041861d78b3de5ff31c6665b7a092ac3890d9132593beb9aa85133045022100e4fe1f3fb2845ad5f6ab377f247ffb797661d7516626bdc1d2f0f73eca582b4d02200ada103bdbff439d57c7aaa266f30ce74ff4385f0c77a486070033061b71650c' # noqa
}
return data
@pytest.fixture
def transaction_type_2():
"""Transaction of type "delegate registration"
"""
data = {
'version': 1,
'network': 30,
'type': 2,
'timestamp': 0,
'senderPublicKey': '03e5b39a83e6c7c952c5908089d4524bb8dda93acc2b2b953247e43dc4fe9aa3d1',
'fee': 0,
'asset': {
'delegate': {
'username': 'genesis_1'
}
},
'signature': '3045022100e3e38811778023e6f17fefd447f179d45ab92c398c7cfb1e34e2f6e1b167c95a022070c36439ecec0fc3c43850070f29515910435d389e059579878d61b5ff2ea337', # noqa
'amount': 0,
'id': 'eb0146ac79afc228f0474a5ae1c4771970ae7880450b998c401029f522cd8a21',
'serialized': 'ff011e020000000003e5b39a83e6c7c952c5908089d4524bb8dda93acc2b2b953247e43dc4fe9aa3d10000000000000000000967656e657369735f313045022100e3e38811778023e6f17fefd447f179d45ab92c398c7cfb1e34e2f6e1b167c95a022070c36439ecec0fc3c43850070f29515910435d389e059579878d61b5ff2ea337' # noqa
}
return data
@pytest.fixture
def transaction_type_3():
"""Transaction of type "vote"
"""
data = {
'version': 1,
'network': 30,
'type': 3,
'timestamp': 4349399,
'senderPublicKey': '0374e9a97611540a9ce4812b0980e62d3c5141ea964c2cab051f14a78284570dcd',
'fee': 100000000,
'asset': {
'votes': ['+02dcb94d73fb54e775f734762d26975d57f18980314f3b67bc52beb393893bc706']
},
'signature': '3045022100af1e5d6f3c9eff8699192ad1b827e7cf7c60040bd2f704360a1f1fbadf6bc1cf022048238b7175369861436d895adaeeeb31ceb453e543dbf20218a4a5b688650482', # noqa
'amount': 0,
'recipientId': 'DRac35wghMcmUSe5jDMLBDLWkVVjyKZFxK',
'id': 'a430dbe34172d205ec251875b14438e58e4bd6cf4efc1ebb3da4c206b002115b',
'serialized': 'ff011e03d75d42000374e9a97611540a9ce4812b0980e62d3c5141ea964c2cab051f14a78284570dcd00e1f5050000000000010102dcb94d73fb54e775f734762d26975d57f18980314f3b67bc52beb393893bc7063045022100af1e5d6f3c9eff8699192ad1b827e7cf7c60040bd2f704360a1f1fbadf6bc1cf022048238b7175369861436d895adaeeeb31ceb453e543dbf20218a4a5b688650482' # noqa
}
return data
@pytest.fixture
def transaction_type_4():
"""Transaction of type "multi signature registration"
"""
data = {
'version': 1,
'network': 23,
'id': 'cbd6862966bb1b03ba742397b7e5a88d6eefb393a362ead0d605723b840db2af',
'blockid': '1844069042066945391',
'type': 4,
'timestamp': 10112114,
'amount': 0,
'fee': 2000000000,
'senderId': 'AMw3TiLrmVmwmFVwRzn96kkUsUpFTqsAEX',
'senderPublicKey': '036928c98ee53a1f52ed01dd87db10ffe1980eb47cd7c0a7d688321f47b5d7d760',
'signature': '30440220324d89c5792e4a54ae70b4f1e27e2f87a8b7169cc6f2f7b2c83dba894960f987022053b8d0ae23ff9d1769364db7b6fd03216d93753c82a711c3558045e787bc01a5', # noqa
'signSignature': '304402201fcd54a9ac9c0269b8cec213566ddf43207798e2cf9ca1ce3c5d315d66321c6902201aa94c4ed3e5e479a12220aa886b259e488eb89b697c711f91e8c03b9620e0b1', # noqa
'signatures': [
'304502210097f17c8eecf36f86a967cc52a83fa661e4ffc70cc4ea08df58673669406d424c0220798f5710897b75dda42f6548f841afbe4ed1fa262097112cf5a1b3f7dade60e4', # noqa
'304402201a4a4c718bfdc699bbb891b2e89be018027d2dcd10640b5ddf07802424dab78e02204ec7c7d505d2158c3b51fdd3843d16aecd2eaaa4c6c7a555ef123c5e59fd41fb', # noqa
'304402207e660489bced5ce80c33d45c86781b63898775ab4a231bb48780f97b40073a63022026f0cefd0d83022d822522ab4366a82e3b89085c328817919939f2efeabd913d' # noqa
],
'asset': {
'multisignature': {
'min': 2,
'keysgroup': [
'+03543c6cc3545be6bac09c82721973a052c690658283472e88f24d14739f75acc8',
'+0276dc5b8706a85ca9fdc46e571ac84e52fbb48e13ec7a165a80731b44ae89f1fc',
'+02e8d5d17eb17bbc8d7bf1001d29a2d25d1249b7bb7a5b7ad8b7422063091f4b31'
],
'lifetime': 24
}
},
'serialized': 'ff011704724c9a00036928c98ee53a1f52ed01dd87db10ffe1980eb47cd7c0a7d688321f47b5d7d76000943577000000000002031803543c6cc3545be6bac09c82721973a052c690658283472e88f24d14739f75acc80276dc5b8706a85ca9fdc46e571ac84e52fbb48e13ec7a165a80731b44ae89f1fc02e8d5d17eb17bbc8d7bf1001d29a2d25d1249b7bb7a5b7ad8b7422063091f4b3130440220324d89c5792e4a54ae70b4f1e27e2f87a8b7169cc6f2f7b2c83dba894960f987022053b8d0ae23ff9d1769364db7b6fd03216d93753c82a711c3558045e787bc01a5304402201fcd54a9ac9c0269b8cec213566ddf43207798e2cf9ca1ce3c5d315d66321c6902201aa94c4ed3e5e479a12220aa886b259e488eb89b697c711f91e8c03b9620e0b1ff304502210097f17c8eecf36f86a967cc52a83fa661e4ffc70cc4ea08df58673669406d424c0220798f5710897b75dda42f6548f841afbe4ed1fa262097112cf5a1b3f7dade60e4304402201a4a4c718bfdc699bbb891b2e89be018027d2dcd10640b5ddf07802424dab78e02204ec7c7d505d2158c3b51fdd3843d16aecd2eaaa4c6c7a555ef123c5e59fd41fb304402207e660489bced5ce80c33d45c86781b63898775ab4a231bb48780f97b40073a63022026f0cefd0d83022d822522ab4366a82e3b89085c328817919939f2efeabd913d' # noqa
}
return data
@pytest.fixture
def transaction_type_5():
"""Transaction of type "ipfs"
"""
data = {
'data': {},
'serialised': ''
}
return data
@pytest.fixture
def transaction_type_6():
"""Transaction of type "timelock transfer"
"""
data = {
'data': {},
'serialised': ''
}
return data
@pytest.fixture
def transaction_type_7():
"""Transaction of type "multi payment"
"""
data = {
'data': {},
'serialised': ''
}
return data
@pytest.fixture
def transaction_type_8():
"""Transaction of type "delegate resignation"
"""
data = {
'data': {},
'serialised': ''
}
return data
@pytest.fixture
def message():
data = {
'data': {
'public_key': '034151a3ec46b5670a682b0a63394f863587d1bc97483b1b6c70eb58e7f0aed192',
'signature': '304402200fb4adddd1f1d652b544ea6ab62828a0a65b712ed447e2538db0caebfa68929e02205ecb2e1c63b29879c2ecf1255db506d671c8b3fa6017f67cfd1bf07e6edd1cc8', # noqa
'message': 'Hello World'
},
'passphrase': 'this is a top secret passphrase'
}
return data
| 44.978947 | 1,040 | 0.749941 | 313 | 8,546 | 20.412141 | 0.354633 | 0.020347 | 0.025043 | 0.038034 | 0.101737 | 0.096885 | 0.08139 | 0.069807 | 0.027704 | 0.027704 | 0 | 0.45939 | 0.178797 | 8,546 | 189 | 1,041 | 45.216931 | 0.450983 | 0.055114 | 0 | 0.403974 | 0 | 0 | 0.684966 | 0.597629 | 0 | 1 | 0 | 0 | 0 | 1 | 0.066225 | false | 0.006623 | 0.006623 | 0 | 0.139073 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d63815ffda821be24c860aa53273950606f6ab55 | 228 | py | Python | CLI/Tables/VendorType.py | mdrummond1/budget-app | 5c016687b3282e4fd850af0515940095e88d4a95 | [
"MIT"
] | null | null | null | CLI/Tables/VendorType.py | mdrummond1/budget-app | 5c016687b3282e4fd850af0515940095e88d4a95 | [
"MIT"
] | null | null | null | CLI/Tables/VendorType.py | mdrummond1/budget-app | 5c016687b3282e4fd850af0515940095e88d4a95 | [
"MIT"
] | null | null | null | class VendorType:
def __init__(self, row: list):
self.vendor_type_id = row[0]
self.vendor_type = row[1]
def __str__(self):
return f"id: {self.vendor_type_id} vendor category: {self.vendor_type}"
| 28.5 | 79 | 0.649123 | 33 | 228 | 4.060606 | 0.484848 | 0.298507 | 0.41791 | 0.238806 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.011429 | 0.232456 | 228 | 7 | 80 | 32.571429 | 0.754286 | 0 | 0 | 0 | 0 | 0 | 0.267544 | 0.092105 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0 | 0.166667 | 0.666667 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
c38bf8dec413b406a20ba68580f4ead7e87b75e0 | 5,640 | py | Python | model/combine.py | JungHeeKim29/DiffHDRsyn | 01fa64e94a9edf34a029a04adeb75180b3989786 | [
"MIT"
] | 15 | 2021-01-07T03:37:31.000Z | 2021-12-29T02:40:26.000Z | model/combine.py | JungHeeKim29/DiffHDRsyn | 01fa64e94a9edf34a029a04adeb75180b3989786 | [
"MIT"
] | 2 | 2021-03-09T08:37:15.000Z | 2021-12-29T06:48:39.000Z | model/combine.py | JungHeeKim29/DiffHDRsyn | 01fa64e94a9edf34a029a04adeb75180b3989786 | [
"MIT"
] | null | null | null | from __future__ import absolute_import, division, print_function
import numpy as np
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
import torchvision.transforms as T
from utils.block import ConvBlock, DoubleBlock
class CombineNet_up(nn.Module):
def __init__(self, in_ch=2, nf=16, ndown = 4, nintermediate = 2):
super(CombineNet_up, self).__init__()
init_layers = []
init_layers +=[ConvBlock(in_ch, nf,
kernel_size = 3, stride=1, padding=1,
norm = 'none', activation = 'swish')]
down_layers = []
for down in range(ndown):
down_layers +=[DoubleBlock(nf, nf*2,
first_kernel=3, second_kernel= 3,
stride=1, padding=1,
first_norm = 'none', second_norm = 'none',
first_act= 'swish', second_act='swish')]
down_layers += [nn.AvgPool2d(2)]
nf = nf*2
inter_layers = []
for inter in range(nintermediate):
inter_layers += [ConvBlock(nf, nf,kernel_size=3, stride=1, padding=1, norm = 'none', activation='swish')]
up_layers = []
for up in range(ndown):
up_layers += [nn.Upsample(scale_factor=2, mode='bilinear')]
up_layers += [DoubleBlock(nf*2, nf//2,
first_kernel=3, second_kernel= 3,
stride=1, padding=1,
first_norm = 'none', second_norm = 'in',
first_act= 'swish', second_act='swish')]
nf //=2
self.out_conv = ConvBlock(nf*2, 3,
kernel_size = 3, stride = 1, padding = 1,
norm='none', activation='tanh')
self.init_layers = nn.Sequential(*init_layers)
self.down_layers = nn.Sequential(*down_layers)
self.inter_layers = nn.Sequential(*inter_layers)
self.up_layers = nn.Sequential(*up_layers)
def forward(self, in_x) :
out = self.init_layers(in_x)
down_list = list()
down_list.append(out)
for down_block in self.down_layers:
if isinstance(down_block, nn.AvgPool2d):
out = down_block(out)
down_list.append(out)
else :
out = down_block(out)
down_list.reverse()
out = self.inter_layers(out)
i = 0
for up_block in self.up_layers:
if isinstance(up_block, nn.Upsample):
skip = down_list[i]
out = torch.cat([out, skip], 1)
out = up_block(out)
i += 1
else:
out = up_block(out)
out = torch.cat([out, down_list[i]], 1)
out = self.out_conv(out)
out = (out+in_x[:,3:6,:])/2
return out
class CombineNet_down(nn.Module):
def __init__(self, in_ch=2, nf=16, ndown = 4, nintermediate = 2):
super(CombineNet_down, self).__init__()
init_layers = []
init_layers +=[ConvBlock(in_ch, nf,
kernel_size = 3, stride=1, padding=1,
norm = 'none', activation = 'swish')]
down_layers = []
for down in range(ndown):
down_layers +=[DoubleBlock(nf, nf*2,
first_kernel=3, second_kernel= 3,
stride=1, padding=1,
first_norm = 'none', second_norm = 'none',
first_act= 'swish', second_act='swish')]
down_layers += [nn.AvgPool2d(2)]
nf = nf*2
inter_layers = []
for inter in range(nintermediate):
inter_layers += [ConvBlock(nf, nf,kernel_size=3, stride=1, padding=1, norm = 'none', activation='swish')]
up_layers = []
for up in range(ndown):
up_layers += [nn.Upsample(scale_factor=2, mode='bilinear')]
up_layers += [DoubleBlock(nf*2, nf//2,
first_kernel=3, second_kernel= 3,
stride=1, padding=1,
first_norm = 'none', second_norm = 'in',
first_act= 'swish', second_act='swish')]
nf //=2
self.out_conv = ConvBlock(nf*2, 3,
kernel_size = 3, stride = 1, padding = 1,
norm='none', activation='tanh')
self.init_layers = nn.Sequential(*init_layers)
self.down_layers = nn.Sequential(*down_layers)
self.inter_layers = nn.Sequential(*inter_layers)
self.up_layers = nn.Sequential(*up_layers)
def forward(self, in_x) :
out = self.init_layers(in_x)
down_list = list()
down_list.append(out)
for down_block in self.down_layers:
if isinstance(down_block, nn.AvgPool2d):
out = down_block(out)
down_list.append(out)
else :
out = down_block(out)
down_list.reverse()
out = self.inter_layers(out)
i = 0
for up_block in self.up_layers:
if isinstance(up_block, nn.Upsample):
skip = down_list[i]
out = torch.cat([out, skip], 1)
out = up_block(out)
i += 1
else:
out = up_block(out)
out = torch.cat([out, down_list[i]], 1)
out = self.out_conv(out)
out = (out + in_x[:,3:6,:])/2
return out
| 35.923567 | 117 | 0.507447 | 658 | 5,640 | 4.130699 | 0.121581 | 0.03532 | 0.029433 | 0.055188 | 0.910964 | 0.910964 | 0.910964 | 0.910964 | 0.910964 | 0.910964 | 0 | 0.022923 | 0.381206 | 5,640 | 156 | 118 | 36.153846 | 0.755874 | 0 | 0 | 0.90625 | 0 | 0 | 0.024118 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.03125 | false | 0 | 0.0625 | 0 | 0.125 | 0.007813 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c3c9deef9b5f73a12c083e5d4a3f335bd7f398b8 | 227 | py | Python | cemc/wanglandau/__init__.py | davidkleiven/WangLandau | 0b253dd98033c53560fe95c76f5e38257834bdf6 | [
"MIT"
] | 2 | 2022-02-10T00:38:53.000Z | 2022-03-17T22:08:40.000Z | cemc/wanglandau/__init__.py | davidkleiven/CEMC | 0b253dd98033c53560fe95c76f5e38257834bdf6 | [
"MIT"
] | 30 | 2018-05-21T14:52:00.000Z | 2021-02-24T07:45:09.000Z | cemc/wanglandau/__init__.py | davidkleiven/WangLandau | 0b253dd98033c53560fe95c76f5e38257834bdf6 | [
"MIT"
] | 3 | 2018-10-09T14:03:32.000Z | 2022-02-09T05:36:05.000Z | # Empty file
from cemc.wanglandau.wang_landau_db_manager import WangLandauDBManager
from cemc.wanglandau.wang_landau_initializer import WangLandauInit, AtomExistsError
from cemc.wanglandau.wang_landau_sampler import WangLandau
| 45.4 | 83 | 0.894273 | 28 | 227 | 7 | 0.535714 | 0.122449 | 0.27551 | 0.336735 | 0.428571 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.070485 | 227 | 4 | 84 | 56.75 | 0.92891 | 0.044053 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
4ede4d97c533bb7df63fc346fee20f9b7bffa4b2 | 42 | py | Python | my_assist/api/reminders/__init__.py | sriramsv/assist | 3940457f7efe224bbb4dfc2325e805d040d4626b | [
"BSD-3-Clause"
] | null | null | null | my_assist/api/reminders/__init__.py | sriramsv/assist | 3940457f7efe224bbb4dfc2325e805d040d4626b | [
"BSD-3-Clause"
] | 1 | 2021-02-08T20:19:51.000Z | 2021-02-08T20:19:51.000Z | my_assist/api/reminders/__init__.py | sriramsv/assist | 3940457f7efe224bbb4dfc2325e805d040d4626b | [
"BSD-3-Clause"
] | null | null | null | from . import views
from . import actions
| 14 | 21 | 0.761905 | 6 | 42 | 5.333333 | 0.666667 | 0.625 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.190476 | 42 | 2 | 22 | 21 | 0.941176 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
f607e07d5bd9143f8f1dc62ebbe97d3dd410ed80 | 21,663 | py | Python | tests/test_holereachability.py | mamysa/PyPltRedex | d99f809c0519efa0f451f2bc0078a50d98efdf6e | [
"MIT"
] | null | null | null | tests/test_holereachability.py | mamysa/PyPltRedex | d99f809c0519efa0f451f2bc0078a50d98efdf6e | [
"MIT"
] | null | null | null | tests/test_holereachability.py | mamysa/PyPltRedex | d99f809c0519efa0f451f2bc0078a50d98efdf6e | [
"MIT"
] | null | null | null | import unittest
from src.preprocess.pattern import NumberOfHoles, DefineLanguage_HoleReachabilitySolver, NtGraphBuilder
from src.model.pattern import PatSequence, BuiltInPat, Nt, Repeat, Lit, LitKind, BuiltInPatKind, RepeatMatchMode, InHole, PatternAttribute
from src.model.tlform import DefineLanguage, Module
from src.context import CompilationContext
from src.parser import parse_string
from src.util import CompilationError
def result(lang, nt):
return lang.nts[nt].nt.getattribute(PatternAttribute.NumberOfHoles)
class TestDefineLanguageHoleReachabilitySolver(unittest.TestCase):
# (n ::= number)
# (P ::= (E))
# (E ::= (E n) hole)
# n = (zero, zero) P = (one one) E = (one one)
def test_holereachability0(self):
lang = DefineLanguage('Lang', [
DefineLanguage.NtDefinition(Nt('n', 'n'), [
BuiltInPat(BuiltInPatKind.Number, 'number', 'number'),
]),
DefineLanguage.NtDefinition(Nt('P', 'P'), [
PatSequence([Nt('E', 'E_0')]),
]),
DefineLanguage.NtDefinition(Nt('E', 'E'), [
PatSequence([Nt('E', 'E_2'), Nt('n', 'n_3')]),
BuiltInPat(BuiltInPatKind.Hole, 'hole', 'hole'),
]),
])
graph = NtGraphBuilder(lang).run()
DefineLanguage_HoleReachabilitySolver(lang, graph).run()
self.assertEqual(result(lang, 'n'), (NumberOfHoles.Zero, NumberOfHoles.Zero))
self.assertEqual(result(lang, 'E'), (NumberOfHoles.One , NumberOfHoles.One ))
self.assertEqual(result(lang, 'P'), (NumberOfHoles.One , NumberOfHoles.One ))
# (P ::= (E))
# (E ::= P)
# P = (zero, zero) E = (zero zero)
def test_holereachability1(self):
lang = DefineLanguage('Lang', [
DefineLanguage.NtDefinition(Nt('P', 'P'), [
PatSequence([Nt('E', 'E_0')]),
]),
DefineLanguage.NtDefinition(Nt('E', 'E'), [
Nt('P', 'P_1'),
]),
])
graph = NtGraphBuilder(lang).run()
DefineLanguage_HoleReachabilitySolver(lang, graph).run()
self.assertEqual(result(lang, 'E'), (NumberOfHoles.Zero, NumberOfHoles.Zero))
self.assertEqual(result(lang, 'P'), (NumberOfHoles.Zero, NumberOfHoles.Zero))
# (n ::= number) (zero zero)
# (P ::= (E)) (one one)
# (E ::= P (E n) hole) (one one)
# The algorithm does not deal with infinite cycles that well - for example we can have
# term (((( (E) )))) that is infinite and thus ideally should match zero holes.
# Since algorithm used to simply propagates holes throughout the graph, it does not take
# into account inner-node cycles such as (E) -> (E n) -> (E). Perhaps for each edge in such
# cycle we should enforce min value of Zero holes?
# It might be fine for our purposes of checking in-hole patterns that involves checking
# if given expression matches exactly one hole.
def test_holereachability2(self):
lang = DefineLanguage('Lang', [
DefineLanguage.NtDefinition(Nt('n', 'n'), [
BuiltInPat(BuiltInPatKind.Number, 'number', 'number'),
]),
DefineLanguage.NtDefinition(Nt('P', 'P'), [
PatSequence([Nt('E', 'E_0')]),
]),
DefineLanguage.NtDefinition(Nt('E', 'E'), [
Nt('P', 'P_1'),
PatSequence([Nt('E', 'E_2'), Nt('n', 'n_3')]),
BuiltInPat(BuiltInPatKind.Hole, 'hole', 'hole'),
]),
])
graph = NtGraphBuilder(lang).run()
DefineLanguage_HoleReachabilitySolver(lang, graph).run()
self.assertEqual(result(lang, 'n'), (NumberOfHoles.Zero, NumberOfHoles.Zero))
self.assertEqual(result(lang, 'E'), (NumberOfHoles.One , NumberOfHoles.One ))
self.assertEqual(result(lang, 'P'), (NumberOfHoles.One , NumberOfHoles.One ))
# (n ::= number) (zero zero)
# (P ::= (E)) (one many)
# (E ::= P (E n) (E E) hole) (one many)
def test_holereachability3(self):
lang = DefineLanguage('Lang', [
DefineLanguage.NtDefinition(Nt('n', 'n'), [
BuiltInPat(BuiltInPatKind.Number, 'number', 'number'),
]),
DefineLanguage.NtDefinition(Nt('P', 'P'), [
PatSequence([Nt('E', 'E_0')]),
]),
DefineLanguage.NtDefinition(Nt('E', 'E'), [
Nt('P', 'P_1'),
PatSequence([Nt('E', 'E_3'), Nt('E', 'E_4')]),
PatSequence([Nt('E', 'E_5'), Nt('n', 'n_6')]),
BuiltInPat(BuiltInPatKind.Hole, 'hole', 'hole'),
]),
])
graph = NtGraphBuilder(lang).run()
DefineLanguage_HoleReachabilitySolver(lang, graph).run()
self.assertEqual(result(lang, 'n'), (NumberOfHoles.Zero, NumberOfHoles.Zero))
self.assertEqual(result(lang, 'E'), (NumberOfHoles.One , NumberOfHoles.Many))
self.assertEqual(result(lang, 'P'), (NumberOfHoles.One , NumberOfHoles.Many))
# (n ::= number) (zero zero)
# (P ::= (E)) (zero many)
# (E ::= P n (E n) (E E) hole) (zero many) zero because n
def test_holereachability4(self):
lang = DefineLanguage('Lang', [
DefineLanguage.NtDefinition(Nt('n', 'n'), [
BuiltInPat(BuiltInPatKind.Number, 'number', 'number'),
]),
DefineLanguage.NtDefinition(Nt('P', 'P'), [
PatSequence([Nt('E', 'E_0')]),
]),
DefineLanguage.NtDefinition(Nt('E', 'E'), [
Nt('P', 'P_1'),
Nt('n', 'n_2'),
PatSequence([Nt('E', 'E_3'), Nt('E', 'E_4')]),
PatSequence([Nt('E', 'E_5'), Nt('n', 'n_6')]),
BuiltInPat(BuiltInPatKind.Hole, 'hole', 'hole'),
]),
])
graph = NtGraphBuilder(lang).run()
DefineLanguage_HoleReachabilitySolver(lang, graph).run()
self.assertEqual(result(lang, 'n'), (NumberOfHoles.Zero, NumberOfHoles.Zero))
self.assertEqual(result(lang, 'E'), (NumberOfHoles.Zero, NumberOfHoles.Many))
self.assertEqual(result(lang, 'P'), (NumberOfHoles.Zero, NumberOfHoles.Many))
# (n ::= number) (zero zero)
# (P ::= (E)) (zero many)
# (E ::= P (E n) (hole ...)) (zero many) hole under ellipsis
def test_holereachability5(self):
lang = DefineLanguage('Lang', [
DefineLanguage.NtDefinition(Nt('n', 'n'), [
BuiltInPat(BuiltInPatKind.Number, 'number', 'number'),
]),
DefineLanguage.NtDefinition(Nt('P', 'P'), [
PatSequence([Nt('E', 'E_0')]),
]),
DefineLanguage.NtDefinition(Nt('E', 'E'), [
Nt('P', 'P_1'),
PatSequence([Nt('E', 'E_5'), Nt('n', 'n_6')]),
PatSequence([Repeat(BuiltInPat(BuiltInPatKind.Hole, 'hole', 'hole'))]),
]),
])
graph = NtGraphBuilder(lang).run()
DefineLanguage_HoleReachabilitySolver(lang, graph).run()
self.assertEqual(result(lang, 'n'), (NumberOfHoles.Zero, NumberOfHoles.Zero))
self.assertEqual(result(lang, 'E'), (NumberOfHoles.Zero, NumberOfHoles.Many))
self.assertEqual(result(lang, 'P'), (NumberOfHoles.Zero, NumberOfHoles.Many))
# (n ::= number) (zero zero)
# (P ::= (E)) (one many)
# (E ::= P (E hole)) (one many) (((...) hole) hole)
def test_holereachability6(self):
lang = DefineLanguage('Lang', [
DefineLanguage.NtDefinition(Nt('n', 'n'), [
BuiltInPat(BuiltInPatKind.Number, 'number', 'number'),
]),
DefineLanguage.NtDefinition(Nt('P', 'P'), [
PatSequence([Nt('E', 'E_0')]),
]),
DefineLanguage.NtDefinition(Nt('E', 'E'), [
Nt('P', 'P_1'),
PatSequence([Nt('E', 'E_5'), BuiltInPat(BuiltInPatKind.Hole, 'hole', 'hole')]),
]),
])
graph = NtGraphBuilder(lang).run()
DefineLanguage_HoleReachabilitySolver(lang, graph).run()
self.assertEqual(result(lang, 'n'), (NumberOfHoles.Zero, NumberOfHoles.Zero))
self.assertEqual(result(lang, 'E'), (NumberOfHoles.Many, NumberOfHoles.Many))
self.assertEqual(result(lang, 'P'), (NumberOfHoles.Many, NumberOfHoles.Many))
# (n ::= number) (zero zero)
# (P ::= (E)) (zero many)
# (E ::= P n (E hole)) (zero many) because n
def test_holereachability7(self):
lang = DefineLanguage('Lang', [
DefineLanguage.NtDefinition(Nt('n', 'n'), [
BuiltInPat(BuiltInPatKind.Number, 'number', 'number'),
]),
DefineLanguage.NtDefinition(Nt('P', 'P'), [
PatSequence([Nt('E', 'E_0')]),
]),
DefineLanguage.NtDefinition(Nt('E', 'E'), [
Nt('P', 'P_1'),
Nt('n', 'n_2'),
PatSequence([Nt('E', 'E_5'), BuiltInPat(BuiltInPatKind.Hole, 'hole', 'hole')]),
]),
])
graph = NtGraphBuilder(lang).run()
DefineLanguage_HoleReachabilitySolver(lang, graph).run()
self.assertEqual(result(lang, 'n'), (NumberOfHoles.Zero, NumberOfHoles.Zero))
self.assertEqual(result(lang, 'E'), (NumberOfHoles.Zero, NumberOfHoles.Many))
self.assertEqual(result(lang, 'P'), (NumberOfHoles.Zero, NumberOfHoles.Many))
# (n ::= number) (zero zero)
# (P ::= (E E)) (many many)
# (E ::= P (E n) hole) (one many)
def test_holereachability8(self):
lang = DefineLanguage('Lang', [
DefineLanguage.NtDefinition(Nt('n', 'n'), [
BuiltInPat(BuiltInPatKind.Number, 'number', 'number'),
]),
DefineLanguage.NtDefinition(Nt('P', 'P'), [
PatSequence([Nt('E', 'E_0'), Nt('E', 'E_1')]),
]),
DefineLanguage.NtDefinition(Nt('E', 'E'), [
Nt('P', 'P_1'),
PatSequence([Nt('E', 'E_5'), Nt('n', 'n_2')]),
BuiltInPat(BuiltInPatKind.Hole, 'hole', 'hole'),
]),
])
graph = NtGraphBuilder(lang).run()
DefineLanguage_HoleReachabilitySolver(lang, graph).run()
self.assertEqual(result(lang, 'n'), (NumberOfHoles.Zero, NumberOfHoles.Zero))
self.assertEqual(result(lang, 'E'), (NumberOfHoles.One , NumberOfHoles.Many))
self.assertEqual(result(lang, 'P'), (NumberOfHoles.Many, NumberOfHoles.Many))
# (n ::= number) (zero zero)
# (P ::= (E E) hole) (one many)
# (E ::= P (E n)) (one many)
def test_holereachability9(self):
lang = DefineLanguage('Lang', [
DefineLanguage.NtDefinition(Nt('n', 'n'), [
BuiltInPat(BuiltInPatKind.Number, 'number', 'number'),
]),
DefineLanguage.NtDefinition(Nt('P', 'P'), [
PatSequence([Nt('E', 'E_0'), Nt('E', 'E_1')]),
BuiltInPat(BuiltInPatKind.Hole, 'hole', 'hole'),
]),
DefineLanguage.NtDefinition(Nt('E', 'E'), [
Nt('P', 'P_1'),
PatSequence([Nt('E', 'E_5'), Nt('n', 'n_2')]),
]),
])
graph = NtGraphBuilder(lang).run()
DefineLanguage_HoleReachabilitySolver(lang, graph).run()
self.assertEqual(result(lang, 'n'), (NumberOfHoles.Zero, NumberOfHoles.Zero))
self.assertEqual(result(lang, 'E'), (NumberOfHoles.One , NumberOfHoles.Many))
self.assertEqual(result(lang, 'P'), (NumberOfHoles.One , NumberOfHoles.Many))
# (n ::= number) (zero zero)
# (Z ::= P) (zero many)
# (P ::= (E)) (zero many)
# (E ::= P ((Z) ... n) hole (zero many) because Z under ellipsis
def test_holereachability10(self):
lang = DefineLanguage('Lang', [
DefineLanguage.NtDefinition(Nt('n', 'n'), [
BuiltInPat(BuiltInPatKind.Number, 'number', 'number'),
]),
DefineLanguage.NtDefinition(Nt('Z', 'Z'), [
Nt('P', 'P')
]),
DefineLanguage.NtDefinition(Nt('P', 'P'), [
PatSequence([Nt('E', 'E_0')]),
]),
DefineLanguage.NtDefinition(Nt('E', 'E'), [
Nt('P', 'P_1'),
PatSequence([
Repeat(PatSequence([Nt('Z', 'Z'), ])),
Nt('n', 'n_2'),
]),
BuiltInPat(BuiltInPatKind.Hole, 'hole', 'hole'),
]),
])
graph = NtGraphBuilder(lang).run()
DefineLanguage_HoleReachabilitySolver(lang, graph).run()
self.assertEqual(result(lang, 'n'), (NumberOfHoles.Zero, NumberOfHoles.Zero))
self.assertEqual(result(lang, 'E'), (NumberOfHoles.Zero, NumberOfHoles.Many))
self.assertEqual(result(lang, 'P'), (NumberOfHoles.Zero, NumberOfHoles.Many))
self.assertEqual(result(lang, 'Z'), (NumberOfHoles.Zero, NumberOfHoles.Many))
# (n ::= number) (zero zero)
# (P ::= (E)) (zero many)
# (E ::= P ((P) ... ()) hole (zero many)
def test_holereachability11(self):
lang = DefineLanguage('Lang', [
DefineLanguage.NtDefinition(Nt('n', 'n'), [
BuiltInPat(BuiltInPatKind.Number, 'number', 'number'),
]),
DefineLanguage.NtDefinition(Nt('P', 'P'), [
PatSequence([Nt('E', 'E_0')]),
]),
DefineLanguage.NtDefinition(Nt('E', 'E'), [
Nt('P', 'P_1'),
PatSequence([
Repeat(PatSequence([Nt('P', 'P'), ])),
PatSequence([]),
]),
BuiltInPat(BuiltInPatKind.Hole, 'hole', 'hole'),
]),
])
graph = NtGraphBuilder(lang).run()
DefineLanguage_HoleReachabilitySolver(lang, graph).run()
self.assertEqual(result(lang, 'n'), (NumberOfHoles.Zero, NumberOfHoles.Zero))
self.assertEqual(result(lang, 'E'), (NumberOfHoles.Zero, NumberOfHoles.Many))
self.assertEqual(result(lang, 'P'), (NumberOfHoles.Zero, NumberOfHoles.Many))
# (n ::= number) (zero zero)
# (P ::= (E)) (zero many)
# (E ::= P (in-hole P n) hole (zero many)
# Think we should disallow in-hole patterns in language grammar definition.
def test_holereachability12(self):
lang = DefineLanguage('Lang', [
DefineLanguage.NtDefinition(Nt('n', 'n'), [
BuiltInPat(BuiltInPatKind.Number, 'number', 'number'),
]),
DefineLanguage.NtDefinition(Nt('P', 'P'), [
PatSequence([Nt('E', 'E_0')]),
]),
DefineLanguage.NtDefinition(Nt('E', 'E'), [
Nt('P', 'P_1'),
InHole(Nt('P', 'P'), Nt('n', 'n')),
BuiltInPat(BuiltInPatKind.Hole, 'hole', 'hole'),
]),
])
try:
graph = NtGraphBuilder(lang).run()
DefineLanguage_HoleReachabilitySolver(lang, graph).run()
self.fail()
except CompilationError as ex:
self.assertEqual(str(ex), 'in-hole pattern in define-language')
# (n ::= number) (zero zero)
# (P ::= (E)) (zero many)
# (E ::= P ((in-hole P n) ...) hole (zero many)
# Think we should disallow in-hole patterns in language grammar definition.
def test_holereachability13(self):
lang = DefineLanguage('Lang', [
DefineLanguage.NtDefinition(Nt('n', 'n'), [
BuiltInPat(BuiltInPatKind.Number, 'number', 'number'),
]),
DefineLanguage.NtDefinition(Nt('P', 'P'), [
PatSequence([Nt('E', 'E_0')]),
]),
DefineLanguage.NtDefinition(Nt('E', 'E'), [
Nt('P', 'P_1'),
PatSequence([Repeat(InHole(Nt('P', 'P'), Nt('n', 'n'))) ]),
BuiltInPat(BuiltInPatKind.Hole, 'hole', 'hole'),
]),
])
try:
graph = NtGraphBuilder(lang).run()
DefineLanguage_HoleReachabilitySolver(lang, graph).run()
self.fail()
except CompilationError as ex:
self.assertEqual(str(ex), 'in-hole pattern in define-language')
# n ::number (zero, zero)
# E ::= (E hole)(E E) n (zero, many)
def test_holereachability14(self):
lang = DefineLanguage('Lang', [
DefineLanguage.NtDefinition(Nt('n', 'n'), [
BuiltInPat(BuiltInPatKind.Number, 'number', 'number'),
]),
DefineLanguage.NtDefinition(Nt('E', 'E'), [
PatSequence([Nt('E', 'E'), Nt('E', 'E') ]),
PatSequence([Nt('E', 'E'), BuiltInPat(BuiltInPatKind.Hole, 'hole', 'hole') ]),
Nt('n', 'n'),
]),
])
graph = NtGraphBuilder(lang).run()
DefineLanguage_HoleReachabilitySolver(lang, graph).run()
self.assertEqual(result(lang, 'n'), (NumberOfHoles.Zero, NumberOfHoles.Zero))
self.assertEqual(result(lang, 'E'), (NumberOfHoles.Zero, NumberOfHoles.Many))
# n ::number (zero, zero)
# E ::= (E hole)(E E) n (zero, many)
def test_holereachability15(self):
lang = DefineLanguage('Lang', [
DefineLanguage.NtDefinition(Nt('n', 'n'), [
BuiltInPat(BuiltInPatKind.Number, 'number', 'number'),
]),
DefineLanguage.NtDefinition(Nt('E', 'E'), [
PatSequence([Nt('E', 'E'), BuiltInPat(BuiltInPatKind.Hole, 'hole', 'hole') ]),
PatSequence([Nt('E', 'E'), Nt('E', 'E') ]),
Nt('n', 'n'),
]),
])
graph = NtGraphBuilder(lang).run()
DefineLanguage_HoleReachabilitySolver(lang, graph).run()
self.assertEqual(result(lang, 'n'), (NumberOfHoles.Zero, NumberOfHoles.Zero))
self.assertEqual(result(lang, 'E'), (NumberOfHoles.Zero, NumberOfHoles.Many))
# (P ::= (E)) (zero one)
# (E ::= P hole (()) ) (zero one)
def test_holereachability16(self):
lang = DefineLanguage('Lang', [
DefineLanguage.NtDefinition(Nt('P', 'P'), [
PatSequence([Nt('E', 'E_0')]),
]),
DefineLanguage.NtDefinition(Nt('E', 'E'), [
Nt('P', 'P_1'),
BuiltInPat(BuiltInPatKind.Hole, 'hole', 'hole'),
PatSequence([
PatSequence([]),
]),
]),
])
graph = NtGraphBuilder(lang).run()
DefineLanguage_HoleReachabilitySolver(lang, graph).run()
self.assertEqual(result(lang, 'E'), (NumberOfHoles.Zero, NumberOfHoles.One))
self.assertEqual(result(lang, 'P'), (NumberOfHoles.Zero, NumberOfHoles.One))
# (P ::= (E)) (one one)
# (E ::= P hole) (one one)
def test_holereachability17(self):
lang = DefineLanguage('Lang', [
DefineLanguage.NtDefinition(Nt('P', 'P'), [
PatSequence([Nt('E', 'E_0')]),
]),
DefineLanguage.NtDefinition(Nt('E', 'E'), [
Nt('P', 'P_1'),
BuiltInPat(BuiltInPatKind.Hole, 'hole', 'hole'),
]),
])
graph = NtGraphBuilder(lang).run()
DefineLanguage_HoleReachabilitySolver(lang, graph).run()
self.assertEqual(result(lang, 'E'), (NumberOfHoles.One, NumberOfHoles.One))
self.assertEqual(result(lang, 'P'), (NumberOfHoles.One, NumberOfHoles.One))
# (P ::= (E)) (zero one)
# (E ::= 44 hole) (zero one)
def test_holereachability18(self):
lang = DefineLanguage('Lang', [
DefineLanguage.NtDefinition(Nt('P', 'P'), [
PatSequence([Nt('E', 'E_0')]),
]),
DefineLanguage.NtDefinition(Nt('E', 'E'), [
Lit(44, LitKind.Integer),
BuiltInPat(BuiltInPatKind.Hole, 'hole', 'hole'),
]),
])
graph = NtGraphBuilder(lang).run()
DefineLanguage_HoleReachabilitySolver(lang, graph).run()
self.assertEqual(result(lang, 'E'), (NumberOfHoles.Zero, NumberOfHoles.One))
self.assertEqual(result(lang, 'P'), (NumberOfHoles.Zero, NumberOfHoles.One))
# (P ::= (E)) (zero one)
# (E ::= (44) hole) (zero one)
def test_holereachability19(self):
lang = DefineLanguage('Lang', [
DefineLanguage.NtDefinition(Nt('P', 'P'), [
PatSequence([Nt('E', 'E_0')]),
]),
DefineLanguage.NtDefinition(Nt('E', 'E'), [
PatSequence([Lit(44, LitKind.Integer)]),
BuiltInPat(BuiltInPatKind.Hole, 'hole', 'hole'),
]),
])
graph = NtGraphBuilder(lang).run()
DefineLanguage_HoleReachabilitySolver(lang, graph).run()
self.assertEqual(result(lang, 'E'), (NumberOfHoles.Zero, NumberOfHoles.One))
self.assertEqual(result(lang, 'P'), (NumberOfHoles.Zero, NumberOfHoles.One))
| 44.574074 | 138 | 0.519919 | 2,030 | 21,663 | 5.499015 | 0.076847 | 0.012721 | 0.021141 | 0.107498 | 0.855505 | 0.849234 | 0.839022 | 0.835528 | 0.827914 | 0.827645 | 0 | 0.006431 | 0.310945 | 21,663 | 485 | 139 | 44.665979 | 0.741408 | 0.143101 | 0 | 0.884409 | 0 | 0 | 0.048554 | 0 | 0 | 0 | 0 | 0 | 0.134409 | 1 | 0.056452 | false | 0 | 0.018817 | 0.002688 | 0.080645 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f660b70372cc5702f59cf20685de0f9bfedc96b7 | 109 | py | Python | taurex/stellar.py | rychallener/TauREx3_public | eb0eeeeca8f47e5e7d64d8d70b43a3af370b7677 | [
"BSD-3-Clause"
] | 10 | 2019-12-18T09:19:16.000Z | 2021-06-21T11:02:06.000Z | taurex/stellar.py | rychallener/TauREx3_public | eb0eeeeca8f47e5e7d64d8d70b43a3af370b7677 | [
"BSD-3-Clause"
] | 10 | 2020-03-24T18:02:15.000Z | 2021-08-23T20:32:09.000Z | taurex/stellar.py | rychallener/TauREx3_public | eb0eeeeca8f47e5e7d64d8d70b43a3af370b7677 | [
"BSD-3-Clause"
] | 8 | 2020-03-26T14:16:42.000Z | 2021-12-18T22:11:25.000Z | from taurex.data.stellar.star import Star, BlackbodyStar
from taurex.data.stellar.phoenix import PhoenixStar
| 36.333333 | 56 | 0.853211 | 15 | 109 | 6.2 | 0.6 | 0.215054 | 0.301075 | 0.451613 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.082569 | 109 | 2 | 57 | 54.5 | 0.93 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
9c8a0aac5d7a489ac1ad160d6d60a77b05452e73 | 24,363 | py | Python | sdk/python/pulumi_auth0/action.py | kevinschoonover/pulumi-auth0 | 18a1ae8fde65291d9e49d6bbc9bb6a5b0eb5dd8a | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_auth0/action.py | kevinschoonover/pulumi-auth0 | 18a1ae8fde65291d9e49d6bbc9bb6a5b0eb5dd8a | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_auth0/action.py | kevinschoonover/pulumi-auth0 | 18a1ae8fde65291d9e49d6bbc9bb6a5b0eb5dd8a | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
from ._inputs import *
__all__ = ['ActionArgs', 'Action']
@pulumi.input_type
class ActionArgs:
def __init__(__self__, *,
code: pulumi.Input[str],
supported_triggers: pulumi.Input['ActionSupportedTriggersArgs'],
dependencies: Optional[pulumi.Input[Sequence[pulumi.Input['ActionDependencyArgs']]]] = None,
deploy: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
runtime: Optional[pulumi.Input[str]] = None,
secrets: Optional[pulumi.Input[Sequence[pulumi.Input['ActionSecretArgs']]]] = None):
"""
The set of arguments for constructing a Action resource.
:param pulumi.Input[str] code: The source code of the action.
:param pulumi.Input['ActionSupportedTriggersArgs'] supported_triggers: List of triggers that this action supports. At this time, an action can only target a single trigger at a time
:param pulumi.Input[Sequence[pulumi.Input['ActionDependencyArgs']]] dependencies: List of third party npm modules, and their versions, that this action depends on
:param pulumi.Input[bool] deploy: Deploying an action will create a new immutable version of the action. If the action is currently bound to a trigger, then the system will begin executing the newly deployed version of the action immediately. Default is `false`.
:param pulumi.Input[str] name: Secret name.
:param pulumi.Input[str] runtime: The Node runtime. For example `node16`, defaults to `node12`
:param pulumi.Input[Sequence[pulumi.Input['ActionSecretArgs']]] secrets: List of secrets that are included in an action or a version of an action
"""
pulumi.set(__self__, "code", code)
pulumi.set(__self__, "supported_triggers", supported_triggers)
if dependencies is not None:
pulumi.set(__self__, "dependencies", dependencies)
if deploy is not None:
pulumi.set(__self__, "deploy", deploy)
if name is not None:
pulumi.set(__self__, "name", name)
if runtime is not None:
pulumi.set(__self__, "runtime", runtime)
if secrets is not None:
pulumi.set(__self__, "secrets", secrets)
@property
@pulumi.getter
def code(self) -> pulumi.Input[str]:
"""
The source code of the action.
"""
return pulumi.get(self, "code")
@code.setter
def code(self, value: pulumi.Input[str]):
pulumi.set(self, "code", value)
@property
@pulumi.getter(name="supportedTriggers")
def supported_triggers(self) -> pulumi.Input['ActionSupportedTriggersArgs']:
"""
List of triggers that this action supports. At this time, an action can only target a single trigger at a time
"""
return pulumi.get(self, "supported_triggers")
@supported_triggers.setter
def supported_triggers(self, value: pulumi.Input['ActionSupportedTriggersArgs']):
pulumi.set(self, "supported_triggers", value)
@property
@pulumi.getter
def dependencies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ActionDependencyArgs']]]]:
"""
List of third party npm modules, and their versions, that this action depends on
"""
return pulumi.get(self, "dependencies")
@dependencies.setter
def dependencies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ActionDependencyArgs']]]]):
pulumi.set(self, "dependencies", value)
@property
@pulumi.getter
def deploy(self) -> Optional[pulumi.Input[bool]]:
"""
Deploying an action will create a new immutable version of the action. If the action is currently bound to a trigger, then the system will begin executing the newly deployed version of the action immediately. Default is `false`.
"""
return pulumi.get(self, "deploy")
@deploy.setter
def deploy(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "deploy", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Secret name.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def runtime(self) -> Optional[pulumi.Input[str]]:
"""
The Node runtime. For example `node16`, defaults to `node12`
"""
return pulumi.get(self, "runtime")
@runtime.setter
def runtime(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "runtime", value)
@property
@pulumi.getter
def secrets(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ActionSecretArgs']]]]:
"""
List of secrets that are included in an action or a version of an action
"""
return pulumi.get(self, "secrets")
@secrets.setter
def secrets(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ActionSecretArgs']]]]):
pulumi.set(self, "secrets", value)
@pulumi.input_type
class _ActionState:
def __init__(__self__, *,
code: Optional[pulumi.Input[str]] = None,
dependencies: Optional[pulumi.Input[Sequence[pulumi.Input['ActionDependencyArgs']]]] = None,
deploy: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
runtime: Optional[pulumi.Input[str]] = None,
secrets: Optional[pulumi.Input[Sequence[pulumi.Input['ActionSecretArgs']]]] = None,
supported_triggers: Optional[pulumi.Input['ActionSupportedTriggersArgs']] = None,
version_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Action resources.
:param pulumi.Input[str] code: The source code of the action.
:param pulumi.Input[Sequence[pulumi.Input['ActionDependencyArgs']]] dependencies: List of third party npm modules, and their versions, that this action depends on
:param pulumi.Input[bool] deploy: Deploying an action will create a new immutable version of the action. If the action is currently bound to a trigger, then the system will begin executing the newly deployed version of the action immediately. Default is `false`.
:param pulumi.Input[str] name: Secret name.
:param pulumi.Input[str] runtime: The Node runtime. For example `node16`, defaults to `node12`
:param pulumi.Input[Sequence[pulumi.Input['ActionSecretArgs']]] secrets: List of secrets that are included in an action or a version of an action
:param pulumi.Input['ActionSupportedTriggersArgs'] supported_triggers: List of triggers that this action supports. At this time, an action can only target a single trigger at a time
:param pulumi.Input[str] version_id: Version ID of the action. This value is available if `deploy` is set to true
"""
if code is not None:
pulumi.set(__self__, "code", code)
if dependencies is not None:
pulumi.set(__self__, "dependencies", dependencies)
if deploy is not None:
pulumi.set(__self__, "deploy", deploy)
if name is not None:
pulumi.set(__self__, "name", name)
if runtime is not None:
pulumi.set(__self__, "runtime", runtime)
if secrets is not None:
pulumi.set(__self__, "secrets", secrets)
if supported_triggers is not None:
pulumi.set(__self__, "supported_triggers", supported_triggers)
if version_id is not None:
pulumi.set(__self__, "version_id", version_id)
@property
@pulumi.getter
def code(self) -> Optional[pulumi.Input[str]]:
"""
The source code of the action.
"""
return pulumi.get(self, "code")
@code.setter
def code(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "code", value)
@property
@pulumi.getter
def dependencies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ActionDependencyArgs']]]]:
"""
List of third party npm modules, and their versions, that this action depends on
"""
return pulumi.get(self, "dependencies")
@dependencies.setter
def dependencies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ActionDependencyArgs']]]]):
pulumi.set(self, "dependencies", value)
@property
@pulumi.getter
def deploy(self) -> Optional[pulumi.Input[bool]]:
"""
Deploying an action will create a new immutable version of the action. If the action is currently bound to a trigger, then the system will begin executing the newly deployed version of the action immediately. Default is `false`.
"""
return pulumi.get(self, "deploy")
@deploy.setter
def deploy(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "deploy", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Secret name.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def runtime(self) -> Optional[pulumi.Input[str]]:
"""
The Node runtime. For example `node16`, defaults to `node12`
"""
return pulumi.get(self, "runtime")
@runtime.setter
def runtime(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "runtime", value)
@property
@pulumi.getter
def secrets(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ActionSecretArgs']]]]:
"""
List of secrets that are included in an action or a version of an action
"""
return pulumi.get(self, "secrets")
@secrets.setter
def secrets(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ActionSecretArgs']]]]):
pulumi.set(self, "secrets", value)
@property
@pulumi.getter(name="supportedTriggers")
def supported_triggers(self) -> Optional[pulumi.Input['ActionSupportedTriggersArgs']]:
"""
List of triggers that this action supports. At this time, an action can only target a single trigger at a time
"""
return pulumi.get(self, "supported_triggers")
@supported_triggers.setter
def supported_triggers(self, value: Optional[pulumi.Input['ActionSupportedTriggersArgs']]):
pulumi.set(self, "supported_triggers", value)
@property
@pulumi.getter(name="versionId")
def version_id(self) -> Optional[pulumi.Input[str]]:
"""
Version ID of the action. This value is available if `deploy` is set to true
"""
return pulumi.get(self, "version_id")
@version_id.setter
def version_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "version_id", value)
class Action(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
code: Optional[pulumi.Input[str]] = None,
dependencies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ActionDependencyArgs']]]]] = None,
deploy: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
runtime: Optional[pulumi.Input[str]] = None,
secrets: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ActionSecretArgs']]]]] = None,
supported_triggers: Optional[pulumi.Input[pulumi.InputType['ActionSupportedTriggersArgs']]] = None,
__props__=None):
"""
Actions are secure, tenant-specific, versioned functions written in Node.js that
execute at certain points during the Auth0 runtime. Actions are used to
customize and extend Auth0's capabilities with custom logic.
## Example Usage
```python
import pulumi
import pulumi_auth0 as auth0
my_action = auth0.Action("myAction",
code=\"\"\"/**
* Handler that will be called during the execution of a PostLogin flow.
*
* @param {Event} event - Details about the user and the context in which they are logging in.
* @param {PostLoginAPI} api - Interface whose methods can be used to change the behavior of the login.
*/
exports.onExecutePostLogin = async (event, api) => {
console.log(event)
};
\"\"\",
deploy=True,
supported_triggers=auth0.ActionSupportedTriggersArgs(
id="post-login",
version="v2",
))
```
## Import
An action can be imported using the action's ID, e.g.
```sh
$ pulumi import auth0:index/action:Action example ...
```
~> For security reasons importing `secrets` is not allowed. Therefore it is advised to import the action without secrets and adding them back after the
action has been imported.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] code: The source code of the action.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ActionDependencyArgs']]]] dependencies: List of third party npm modules, and their versions, that this action depends on
:param pulumi.Input[bool] deploy: Deploying an action will create a new immutable version of the action. If the action is currently bound to a trigger, then the system will begin executing the newly deployed version of the action immediately. Default is `false`.
:param pulumi.Input[str] name: Secret name.
:param pulumi.Input[str] runtime: The Node runtime. For example `node16`, defaults to `node12`
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ActionSecretArgs']]]] secrets: List of secrets that are included in an action or a version of an action
:param pulumi.Input[pulumi.InputType['ActionSupportedTriggersArgs']] supported_triggers: List of triggers that this action supports. At this time, an action can only target a single trigger at a time
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ActionArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Actions are secure, tenant-specific, versioned functions written in Node.js that
execute at certain points during the Auth0 runtime. Actions are used to
customize and extend Auth0's capabilities with custom logic.
## Example Usage
```python
import pulumi
import pulumi_auth0 as auth0
my_action = auth0.Action("myAction",
code=\"\"\"/**
* Handler that will be called during the execution of a PostLogin flow.
*
* @param {Event} event - Details about the user and the context in which they are logging in.
* @param {PostLoginAPI} api - Interface whose methods can be used to change the behavior of the login.
*/
exports.onExecutePostLogin = async (event, api) => {
console.log(event)
};
\"\"\",
deploy=True,
supported_triggers=auth0.ActionSupportedTriggersArgs(
id="post-login",
version="v2",
))
```
## Import
An action can be imported using the action's ID, e.g.
```sh
$ pulumi import auth0:index/action:Action example ...
```
~> For security reasons importing `secrets` is not allowed. Therefore it is advised to import the action without secrets and adding them back after the
action has been imported.
:param str resource_name: The name of the resource.
:param ActionArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ActionArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
code: Optional[pulumi.Input[str]] = None,
dependencies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ActionDependencyArgs']]]]] = None,
deploy: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
runtime: Optional[pulumi.Input[str]] = None,
secrets: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ActionSecretArgs']]]]] = None,
supported_triggers: Optional[pulumi.Input[pulumi.InputType['ActionSupportedTriggersArgs']]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ActionArgs.__new__(ActionArgs)
if code is None and not opts.urn:
raise TypeError("Missing required property 'code'")
__props__.__dict__["code"] = code
__props__.__dict__["dependencies"] = dependencies
__props__.__dict__["deploy"] = deploy
__props__.__dict__["name"] = name
__props__.__dict__["runtime"] = runtime
__props__.__dict__["secrets"] = secrets
if supported_triggers is None and not opts.urn:
raise TypeError("Missing required property 'supported_triggers'")
__props__.__dict__["supported_triggers"] = supported_triggers
__props__.__dict__["version_id"] = None
super(Action, __self__).__init__(
'auth0:index/action:Action',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
code: Optional[pulumi.Input[str]] = None,
dependencies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ActionDependencyArgs']]]]] = None,
deploy: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
runtime: Optional[pulumi.Input[str]] = None,
secrets: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ActionSecretArgs']]]]] = None,
supported_triggers: Optional[pulumi.Input[pulumi.InputType['ActionSupportedTriggersArgs']]] = None,
version_id: Optional[pulumi.Input[str]] = None) -> 'Action':
"""
Get an existing Action resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] code: The source code of the action.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ActionDependencyArgs']]]] dependencies: List of third party npm modules, and their versions, that this action depends on
:param pulumi.Input[bool] deploy: Deploying an action will create a new immutable version of the action. If the action is currently bound to a trigger, then the system will begin executing the newly deployed version of the action immediately. Default is `false`.
:param pulumi.Input[str] name: Secret name.
:param pulumi.Input[str] runtime: The Node runtime. For example `node16`, defaults to `node12`
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ActionSecretArgs']]]] secrets: List of secrets that are included in an action or a version of an action
:param pulumi.Input[pulumi.InputType['ActionSupportedTriggersArgs']] supported_triggers: List of triggers that this action supports. At this time, an action can only target a single trigger at a time
:param pulumi.Input[str] version_id: Version ID of the action. This value is available if `deploy` is set to true
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ActionState.__new__(_ActionState)
__props__.__dict__["code"] = code
__props__.__dict__["dependencies"] = dependencies
__props__.__dict__["deploy"] = deploy
__props__.__dict__["name"] = name
__props__.__dict__["runtime"] = runtime
__props__.__dict__["secrets"] = secrets
__props__.__dict__["supported_triggers"] = supported_triggers
__props__.__dict__["version_id"] = version_id
return Action(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def code(self) -> pulumi.Output[str]:
"""
The source code of the action.
"""
return pulumi.get(self, "code")
@property
@pulumi.getter
def dependencies(self) -> pulumi.Output[Optional[Sequence['outputs.ActionDependency']]]:
"""
List of third party npm modules, and their versions, that this action depends on
"""
return pulumi.get(self, "dependencies")
@property
@pulumi.getter
def deploy(self) -> pulumi.Output[Optional[bool]]:
"""
Deploying an action will create a new immutable version of the action. If the action is currently bound to a trigger, then the system will begin executing the newly deployed version of the action immediately. Default is `false`.
"""
return pulumi.get(self, "deploy")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Secret name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def runtime(self) -> pulumi.Output[str]:
"""
The Node runtime. For example `node16`, defaults to `node12`
"""
return pulumi.get(self, "runtime")
@property
@pulumi.getter
def secrets(self) -> pulumi.Output[Optional[Sequence['outputs.ActionSecret']]]:
"""
List of secrets that are included in an action or a version of an action
"""
return pulumi.get(self, "secrets")
@property
@pulumi.getter(name="supportedTriggers")
def supported_triggers(self) -> pulumi.Output['outputs.ActionSupportedTriggers']:
"""
List of triggers that this action supports. At this time, an action can only target a single trigger at a time
"""
return pulumi.get(self, "supported_triggers")
@property
@pulumi.getter(name="versionId")
def version_id(self) -> pulumi.Output[str]:
"""
Version ID of the action. This value is available if `deploy` is set to true
"""
return pulumi.get(self, "version_id")
| 45.116667 | 270 | 0.647868 | 2,850 | 24,363 | 5.401754 | 0.088772 | 0.090744 | 0.075284 | 0.040013 | 0.885677 | 0.869503 | 0.845404 | 0.833517 | 0.82163 | 0.806626 | 0 | 0.002521 | 0.251119 | 24,363 | 539 | 271 | 45.200371 | 0.841272 | 0.384764 | 0 | 0.72242 | 1 | 0 | 0.116228 | 0.023836 | 0 | 0 | 0 | 0 | 0 | 1 | 0.160142 | false | 0.003559 | 0.024911 | 0 | 0.281139 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
9cbd46bb6aa4225d46d3b849e876df7434486563 | 12,374 | py | Python | python_scripts/generate_keras_evaluation_data.py | GoldenholzLab/LPC-RCT | b14bc440ad3f8acfbf6f776ddd749475818af70d | [
"MIT"
] | null | null | null | python_scripts/generate_keras_evaluation_data.py | GoldenholzLab/LPC-RCT | b14bc440ad3f8acfbf6f776ddd749475818af70d | [
"MIT"
] | null | null | null | python_scripts/generate_keras_evaluation_data.py | GoldenholzLab/LPC-RCT | b14bc440ad3f8acfbf6f776ddd749475818af70d | [
"MIT"
] | null | null | null | from clinical_trial_generation import generate_true_and_false_drug_histograms_and_MPC_stat_significances
import numpy as np
import json
import time
import sys
def generate_evaluation_data(one_or_two,
num_baseline_months,
num_maintenance_months,
baseline_time_scale,
maintenance_time_scale,
minimum_cumulative_baseline_seizure_count,
placebo_percent_effect_mean_upper_bound,
placebo_percent_effect_mean_lower_bound,
drug_percent_effect_mean_upper_bound,
drug_percent_effect_mean_lower_bound,
placebo_percent_effect_std_dev,
drug_percent_effect_std_dev,
num_bins,
hist_range,
num_patients_per_trial_arm_step,
final_num_patients_trial_arm_step,
num_trials_per_stat_power_estim):
num_patients_per_trial_arm_array = \
np.arange(num_patients_per_trial_arm_step,
final_num_patients_trial_arm_step + num_patients_per_trial_arm_step,
num_patients_per_trial_arm_step)
num_trial_sizes = int(final_num_patients_trial_arm_step/num_patients_per_trial_arm_step)
MPC_true_drug_stat_significance_matrix = np.zeros((num_trials_per_stat_power_estim, num_trial_sizes), dtype=bool)
MPC_false_drug_stat_significance_matrix = np.zeros((num_trials_per_stat_power_estim, num_trial_sizes), dtype=bool)
true_drug_placebo_arm_hists_over_all_trial_sizes = np.zeros((num_trial_sizes, num_trials_per_stat_power_estim, num_bins, 1))
true_drug_treatment_arm_hists_over_all_trial_sizes = np.zeros((num_trial_sizes, num_trials_per_stat_power_estim, num_bins, 1))
false_drug_placebo_arm_hists_over_all_trial_sizes = np.zeros((num_trial_sizes, num_trials_per_stat_power_estim, num_bins, 1))
false_drug_treatment_arm_hists_over_all_trial_sizes = np.zeros((num_trial_sizes, num_trials_per_stat_power_estim, num_bins, 1))
for trial_iter in range(num_trials_per_stat_power_estim):
for trial_size_index in range(num_trial_sizes):
num_patients_per_trial_arm = num_patients_per_trial_arm_array[trial_size_index]
num_placebo_arm_patients = num_patients_per_trial_arm
num_drug_arm_patients = num_patients_per_trial_arm
[MPC_true_drug_stat_significance_matrix[trial_iter, trial_size_index],
MPC_false_drug_stat_significance_matrix[trial_iter, trial_size_index],
true_drug_placebo_arm_hists_over_all_trial_sizes[trial_size_index, trial_iter, :, 0],
true_drug_treatment_arm_hists_over_all_trial_sizes[trial_size_index, trial_iter, :, 0],
false_drug_placebo_arm_hists_over_all_trial_sizes[trial_size_index, trial_iter, :, 0],
false_drug_treatment_arm_hists_over_all_trial_sizes[trial_size_index, trial_iter, :, 0]] = \
generate_true_and_false_drug_histograms_and_MPC_stat_significances(one_or_two,
num_placebo_arm_patients,
num_drug_arm_patients,
num_baseline_months,
num_maintenance_months,
baseline_time_scale,
maintenance_time_scale,
minimum_cumulative_baseline_seizure_count,
placebo_percent_effect_mean_upper_bound,
placebo_percent_effect_mean_lower_bound,
placebo_percent_effect_std_dev,
drug_percent_effect_mean_upper_bound,
drug_percent_effect_mean_lower_bound,
drug_percent_effect_std_dev,
num_bins,
hist_range)
print('trial #' + str(trial_iter + 1) + ' of ' + str(2*num_patients_per_trial_arm) + ' patients')
MPC_stat_power_array = np.sum(MPC_true_drug_stat_significance_matrix, 0)/num_trials_per_stat_power_estim
MPC_type_1_error_array = np.sum(MPC_false_drug_stat_significance_matrix, 0)/num_trials_per_stat_power_estim
return [num_patients_per_trial_arm_array,
MPC_stat_power_array,
MPC_type_1_error_array,
true_drug_placebo_arm_hists_over_all_trial_sizes,
true_drug_treatment_arm_hists_over_all_trial_sizes,
false_drug_placebo_arm_hists_over_all_trial_sizes,
false_drug_treatment_arm_hists_over_all_trial_sizes]
def store_evaluation_data(num_patients_per_trial_arm_array,
MPC_stat_power_array,
MPC_type_1_error_array,
true_drug_placebo_arm_hists_over_all_trial_sizes,
true_drug_treatment_arm_hists_over_all_trial_sizes,
false_drug_placebo_arm_hists_over_all_trial_sizes,
false_drug_treatment_arm_hists_over_all_trial_sizes,
evaluation_data_dir,
evaluation_data_file_name):
evaluation_data_file_path = evaluation_data_dir + '/' + evaluation_data_file_name + '.json'
data = []
data.append(num_patients_per_trial_arm_array.tolist())
data.append(MPC_stat_power_array.tolist())
data.append(MPC_type_1_error_array.tolist())
data.append(true_drug_placebo_arm_hists_over_all_trial_sizes.tolist())
data.append(true_drug_treatment_arm_hists_over_all_trial_sizes.tolist())
data.append(false_drug_placebo_arm_hists_over_all_trial_sizes.tolist())
data.append(false_drug_treatment_arm_hists_over_all_trial_sizes.tolist())
with open(evaluation_data_file_path, 'w+') as json_file:
json.dump(data, json_file)
def get_inputs():
one_or_two = sys.argv[1]
placebo_percent_effect_mean_upper_bound = float(sys.argv[2])
placebo_percent_effect_mean_lower_bound = float(sys.argv[3])
drug_percent_effect_mean_upper_bound = float(sys.argv[4])
drug_percent_effect_mean_lower_bound = float(sys.argv[5])
placebo_percent_effect_std_dev = float(sys.argv[6])
drug_percent_effect_std_dev = float(sys.argv[7])
num_patients_per_trial_step = int(sys.argv[8])
final_num_patients_trial_step = int(sys.argv[9])
num_trials_per_stat_power_estim = int(sys.argv[10])
evaluation_data_file_name = sys.argv[11]
#======================================================================================================#
if((num_patients_per_trial_step % 2 == 1) or (final_num_patients_trial_step % 2 == 1)):
raise ValueError('The number of patients per trial needs to divisible by 2.')
num_patients_per_trial_arm_step = int(num_patients_per_trial_step/2)
final_num_patients_trial_arm_step = int(final_num_patients_trial_step/2)
num_baseline_months = 2
num_maintenance_months = 3
baseline_time_scale = 'weekly'
maintenance_time_scale = 'weekly'
minimum_cumulative_baseline_seizure_count = 4
num_bins = 80
hist_range = [-3, 1]
import os
evaluation_data_dir = os.getcwd()
return [one_or_two,
num_baseline_months,
num_maintenance_months,
baseline_time_scale,
maintenance_time_scale,
minimum_cumulative_baseline_seizure_count,
placebo_percent_effect_mean_upper_bound,
placebo_percent_effect_mean_lower_bound,
drug_percent_effect_mean_upper_bound,
drug_percent_effect_mean_lower_bound,
placebo_percent_effect_std_dev,
drug_percent_effect_std_dev,
num_bins,
hist_range,
num_patients_per_trial_arm_step,
final_num_patients_trial_arm_step,
num_trials_per_stat_power_estim,
evaluation_data_dir,
evaluation_data_file_name]
def main():
[one_or_two,
num_baseline_months,
num_maintenance_months,
baseline_time_scale,
maintenance_time_scale,
minimum_cumulative_baseline_seizure_count,
placebo_percent_effect_mean_upper_bound,
placebo_percent_effect_mean_lower_bound,
drug_percent_effect_mean_upper_bound,
drug_percent_effect_mean_lower_bound,
placebo_percent_effect_std_dev,
drug_percent_effect_std_dev,
num_bins,
hist_range,
num_patients_per_trial_arm_step,
final_num_patients_trial_arm_step,
num_trials_per_stat_power_estim,
evaluation_data_dir,
evaluation_data_file_name] = \
get_inputs()
[num_patients_per_trial_arm_array,
MPC_stat_power_array,
MPC_type_1_error_array,
true_drug_placebo_arm_hists_over_all_trial_sizes,
true_drug_treatment_arm_hists_over_all_trial_sizes,
false_drug_placebo_arm_hists_over_all_trial_sizes,
false_drug_treatment_arm_hists_over_all_trial_sizes] = \
generate_evaluation_data(one_or_two,
num_baseline_months,
num_maintenance_months,
baseline_time_scale,
maintenance_time_scale,
minimum_cumulative_baseline_seizure_count,
placebo_percent_effect_mean_upper_bound,
placebo_percent_effect_mean_lower_bound,
drug_percent_effect_mean_upper_bound,
drug_percent_effect_mean_lower_bound,
placebo_percent_effect_std_dev,
drug_percent_effect_std_dev,
num_bins,
hist_range,
num_patients_per_trial_arm_step,
final_num_patients_trial_arm_step,
num_trials_per_stat_power_estim)
store_evaluation_data(num_patients_per_trial_arm_array,
MPC_stat_power_array,
MPC_type_1_error_array,
true_drug_placebo_arm_hists_over_all_trial_sizes,
true_drug_treatment_arm_hists_over_all_trial_sizes,
false_drug_placebo_arm_hists_over_all_trial_sizes,
false_drug_treatment_arm_hists_over_all_trial_sizes,
evaluation_data_dir,
evaluation_data_file_name)
print(true_drug_placebo_arm_hists_over_all_trial_sizes.shape)
print(true_drug_treatment_arm_hists_over_all_trial_sizes.shape)
print(false_drug_placebo_arm_hists_over_all_trial_sizes.shape)
print(false_drug_treatment_arm_hists_over_all_trial_sizes.shape)
import matplotlib.pyplot as plt
plt.figure()
plt.plot(2*num_patients_per_trial_arm_array, 100*MPC_stat_power_array)
plt.plot(2*num_patients_per_trial_arm_array, 100*MPC_type_1_error_array)
plt.show()
if(__name__=='__main__'):
start_time_in_seconds = time.time()
main()
total_runtime_in_seconds_str = str(np.round((time.time() - start_time_in_seconds)/60, 3)) + ' minutes'
print(total_runtime_in_seconds_str)
| 48.909091 | 131 | 0.620737 | 1,431 | 12,374 | 4.682739 | 0.095038 | 0.059693 | 0.057305 | 0.071631 | 0.877183 | 0.827936 | 0.768244 | 0.742874 | 0.719594 | 0.678705 | 0 | 0.006858 | 0.32835 | 12,374 | 252 | 132 | 49.103175 | 0.799422 | 0.008243 | 0 | 0.49505 | 1 | 0 | 0.009209 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.019802 | false | 0 | 0.034653 | 0 | 0.064356 | 0.029703 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1ad5a1631c1e61ddb301a8419d002aa1778db0c8 | 229 | py | Python | summarize/training/metrics/__init__.py | danieldeutsch/summarize | f36a86d58f381ff1f607f356dad3d6ef7b0e0224 | [
"Apache-2.0"
] | 15 | 2019-11-01T11:49:44.000Z | 2021-01-19T06:59:32.000Z | summarize/training/metrics/__init__.py | CogComp/summary-cloze | b38e3e8c7755903477fd92a4cff27125cbf5553d | [
"Apache-2.0"
] | 2 | 2020-03-30T07:54:01.000Z | 2021-11-15T16:27:42.000Z | summarize/training/metrics/__init__.py | CogComp/summary-cloze | b38e3e8c7755903477fd92a4cff27125cbf5553d | [
"Apache-2.0"
] | 3 | 2019-12-06T05:57:51.000Z | 2019-12-11T11:34:21.000Z | from summarize.training.metrics.binary_f1_measure import BinaryF1Measure
from summarize.training.metrics.python_rouge_metric import PythonRougeMetric
from summarize.training.metrics.cross_entropy_metric import CrossEntropyMetric
| 57.25 | 78 | 0.908297 | 27 | 229 | 7.481481 | 0.592593 | 0.193069 | 0.311881 | 0.415842 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009217 | 0.052402 | 229 | 3 | 79 | 76.333333 | 0.921659 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
2149a646473343d92bb3f3db8591101307b23f11 | 109,554 | py | Python | tests/compiler/push_down/test_push_down_agg.py | CCD-HRI/congregation | a552856b03a64a4295792184107c4e529ca3f4ae | [
"MIT"
] | 3 | 2020-10-05T16:30:15.000Z | 2021-01-22T13:38:02.000Z | tests/compiler/push_down/test_push_down_agg.py | CCD-HRI/congregation | a552856b03a64a4295792184107c4e529ca3f4ae | [
"MIT"
] | null | null | null | tests/compiler/push_down/test_push_down_agg.py | CCD-HRI/congregation | a552856b03a64a4295792184107c4e529ca3f4ae | [
"MIT"
] | 1 | 2021-02-19T12:40:57.000Z | 2021-02-19T12:40:57.000Z | from congregation.lang import *
from congregation.dag import Dag
from congregation.dag.nodes.internal import *
from congregation.comp import PushDown
from tests.utils import create_cols, compare_to_expected
import pytest
"""
Tests for correct propagation of the following relation-level
and column-level attributes after the PushDown() phase of the
compiler has been run:
- DAG node order
- node.requires_mpc() attribute
- relation-level stored_with sets
- column-level plaintext sets
- column-level trust_with sets
- column ordering in cases where columns
are reordered as part of the Op.
"""
@pytest.mark.parametrize("party_data, expected", [
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [Create, AggregateCount, Create, AggregateCount, Concat, AggregateSum, Collect],
"requires_mpc": [False, False, False, False, True, True, False],
"ownership_data":[
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
}
],
{
"node_order": [Create, AggregateCount, Create, AggregateCount, Concat, AggregateSum, Collect],
"requires_mpc": [False, False, False, False, True, True, False],
"ownership_data": [
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [Create, AggregateCount, Create, AggregateCount, Concat, AggregateSum, Collect],
"requires_mpc": [False, False, False, False, False, False, False],
"ownership_data": [
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1, 2}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"col_names": ["c", "d"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
}
],
{
"node_order": [Create, Create, Concat, AggregateCount, Collect],
"requires_mpc": [True, True, True, True, False],
"ownership_data": [
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
}
]
}
)
])
def test_count(party_data, expected):
cols_in_one = create_cols(party_data[0])
cols_in_two = create_cols(party_data[1])
rel_one = create("in1", cols_in_one, party_data[0]["stored_with"])
rel_two = create("in2", cols_in_two, party_data[1]["stored_with"])
cc = concat([rel_one, rel_two], "concat", party_data[0]["col_names"])
ac = aggregate_count(cc, "counted", [party_data[0]["col_names"][0]], "count_col")
collect(ac, {1, 2})
d = Dag({rel_one, rel_two})
pd = PushDown()
pd.rewrite(d)
compare_to_expected(d, expected)
@pytest.mark.parametrize("party_data, expected", [
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [Create, AggregateCount, Create, AggregateCount, Concat, AggregateSum, Collect],
"requires_mpc": [False, False, False, False, True, True, False],
"ownership_data":[
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}],
"col_names": ["a", "b"]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}],
"col_names": ["b", "count"]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}],
"col_names": ["c", "d"]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}],
"col_names": ["d", "count"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["b", "count"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["b", "count"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}],
"col_names": ["b", "count"]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"col_names": ["c", "d"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
}
],
{
"node_order": [Create, Create, Concat, AggregateCount, Collect],
"requires_mpc": [True, True, True, True, False],
"ownership_data": [
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["a", "b"]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["c", "d"]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["a", "b"]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["b", "count"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}],
"col_names": ["b", "count"]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1, 2}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [Create, AggregateCount, Create, AggregateCount, Concat, AggregateSum, Collect],
"requires_mpc": [False, False, False, False, False, False, False],
"ownership_data": [
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1, 2}],
"col_names": ["a", "b"]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1, 2}],
"col_names": ["b", "count"]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}],
"col_names": ["c", "d"]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}],
"col_names": ["d", "count"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [{2}, {2}],
"col_names": ["b", "count"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [{2}, {2}],
"col_names": ["b", "count"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}],
"col_names": ["b", "count"]
}
]
}
),
])
def test_count_alt_key_col(party_data, expected):
cols_in_one = create_cols(party_data[0])
cols_in_two = create_cols(party_data[1])
rel_one = create("in1", cols_in_one, party_data[0]["stored_with"])
rel_two = create("in2", cols_in_two, party_data[1]["stored_with"])
cc = concat([rel_one, rel_two], "concat", party_data[0]["col_names"])
ac = aggregate_count(cc, "counted", [party_data[0]["col_names"][1]], "count")
collect(ac, {1, 2})
d = Dag({rel_one, rel_two})
pd = PushDown()
pd.rewrite(d)
compare_to_expected(d, expected)
zip_col_names = zip(d.top_sort(), [e["col_names"] for e in expected["ownership_data"]])
col_name_checks = [[c.name for c in z[0].out_rel.columns] == z[1] for z in zip_col_names]
assert all(col_name_checks)
@pytest.mark.parametrize("party_data, expected", [
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [Create, Create, AggregateSum, AggregateSum, Concat, AggregateSum, Collect],
"requires_mpc": [False, False, False, False, True, True, False],
"ownership_data":[
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
}
],
{
"node_order": [Create, Create, AggregateSum, AggregateSum, Concat, AggregateSum, Collect],
"requires_mpc": [False, False, False, False, True, True, False],
"ownership_data": [
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [Create, Create, AggregateSum, AggregateSum, Concat, AggregateSum, Collect],
"requires_mpc": [False, False, False, False, True, True, False],
"ownership_data": [
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [{2}, set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [{2}, set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"col_names": ["c", "d"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
}
],
{
"node_order": [Create, Create, Concat, AggregateSum, Collect],
"requires_mpc": [True, True, True, True, False],
"ownership_data": [
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
}
]
}
)
])
def test_sum(party_data, expected):
cols_in_one = create_cols(party_data[0])
cols_in_two = create_cols(party_data[1])
rel_one = create("in1", cols_in_one, party_data[0]["stored_with"])
rel_two = create("in2", cols_in_two, party_data[1]["stored_with"])
cc = concat([rel_one, rel_two], "concat", party_data[0]["col_names"])
asum = aggregate(cc, "sum", [party_data[0]["col_names"][0]], party_data[0]["col_names"][1], "sum")
collect(asum, {1, 2})
d = Dag({rel_one, rel_two})
pd = PushDown()
pd.rewrite(d)
compare_to_expected(d, expected)
@pytest.mark.parametrize("party_data, expected", [
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [Create, Create, AggregateSum, AggregateSum, Concat, AggregateSum, Collect],
"requires_mpc": [False, False, False, False, True, True, False],
"ownership_data":[
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}],
"col_names": ["a", "b"]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}],
"col_names": ["c", "d"]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}],
"col_names": ["b", "sum"]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}],
"col_names": ["d", "sum"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["b", "sum"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["b", "sum"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}],
"col_names": ["b", "sum"]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"col_names": ["c", "d"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
}
],
{
"node_order": [Create, Create, Concat, AggregateSum, Collect],
"requires_mpc": [True, True, True, True, False],
"ownership_data": [
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["a", "b"]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["c", "d"]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["a", "b"]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["b", "sum"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}],
"col_names": ["b", "sum"]
}
]
}
),
])
def test_sum_alt_key_col(party_data, expected):
cols_in_one = create_cols(party_data[0])
cols_in_two = create_cols(party_data[1])
rel_one = create("in1", cols_in_one, party_data[0]["stored_with"])
rel_two = create("in2", cols_in_two, party_data[1]["stored_with"])
cc = concat([rel_one, rel_two], "concat", party_data[0]["col_names"])
asum = aggregate(cc, "sum", [party_data[0]["col_names"][1]], party_data[0]["col_names"][0], "sum", "sum")
collect(asum, {1, 2})
d = Dag({rel_one, rel_two})
pd = PushDown()
pd.rewrite(d)
compare_to_expected(d, expected)
zip_col_names = zip(d.top_sort(), [e["col_names"] for e in expected["ownership_data"]])
col_name_checks = [[c.name for c in z[0].out_rel.columns] == z[1] for z in zip_col_names]
assert all(col_name_checks)
@pytest.mark.parametrize("party_data, expected", [
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [Create, Create, AggregateSum, AggregateSum, Concat, AggregateSum, Collect],
"requires_mpc": [False, False, False, False, True, True, False],
"ownership_data":[
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}],
"col_names": ["a", "b"]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}],
"col_names": ["c", "d"]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}],
"trust_with_sets": [{1}],
"col_names": ["a"]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}],
"trust_with_sets": [{2}],
"col_names": ["c"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set()],
"trust_with_sets": [set()],
"col_names": ["a"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set()],
"trust_with_sets": [set()],
"col_names": ["sum"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}],
"trust_with_sets": [{1, 2}],
"col_names": ["sum"]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"col_names": ["c", "d"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
}
],
{
"node_order": [Create, Create, Concat, AggregateSum, Collect],
"requires_mpc": [True, True, True, True, False],
"ownership_data": [
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["a", "b"]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["c", "d"]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["a", "b"]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set()],
"trust_with_sets": [set()],
"col_names": ["sum"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}],
"trust_with_sets": [{1, 2}],
"col_names": ["sum"]
}
]
}
),
])
def test_sum_no_key_cols(party_data, expected):
cols_in_one = create_cols(party_data[0])
cols_in_two = create_cols(party_data[1])
rel_one = create("in1", cols_in_one, party_data[0]["stored_with"])
rel_two = create("in2", cols_in_two, party_data[1]["stored_with"])
cc = concat([rel_one, rel_two], "concat", party_data[0]["col_names"])
asum = aggregate(cc, "sum", [], party_data[0]["col_names"][0], "sum", "sum")
collect(asum, {1, 2})
d = Dag({rel_one, rel_two})
pd = PushDown()
pd.rewrite(d)
compare_to_expected(d, expected)
@pytest.mark.parametrize("party_data, expected", [
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [Create, Create, AggregateSumCountCol, AggregateSumCountCol, Concat, AggregateMean, Collect],
"requires_mpc": [False, False, False, False, True, True, False],
"ownership_data":[
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}, {1}],
"trust_with_sets": [{1}, {1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}, {2}],
"trust_with_sets": [{2}, {2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set(), set()],
"trust_with_sets": [set(), set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
}
],
{
"node_order": [Create, Create, AggregateSumCountCol, AggregateSumCountCol, Concat, AggregateMean, Collect],
"requires_mpc": [False, False, False, False, True, True, False],
"ownership_data": [
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}, {1}],
"trust_with_sets": [{1}, {1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}, {2}],
"trust_with_sets": [{2}, {2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set(), set()],
"trust_with_sets": [set(), set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [Create, Create, AggregateSumCountCol, AggregateSumCountCol, Concat, AggregateMean, Collect],
"requires_mpc": [False, False, False, False, True, True, False],
"ownership_data": [
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}, {1}],
"trust_with_sets": [{1, 2}, {1}, {1, 2}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}, {2}],
"trust_with_sets": [{2}, {2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set(), set()],
"trust_with_sets": [{2}, set(), {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [{2}, set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"col_names": ["c", "d"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
}
],
{
"node_order": [Create, Create, Concat, AggregateMean, Collect],
"requires_mpc": [True, True, True, True, False],
"ownership_data": [
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
}
]
}
)
])
def test_mean(party_data, expected):
cols_in_one = create_cols(party_data[0])
cols_in_two = create_cols(party_data[1])
rel_one = create("in1", cols_in_one, party_data[0]["stored_with"])
rel_two = create("in2", cols_in_two, party_data[1]["stored_with"])
cc = concat([rel_one, rel_two], "concat", party_data[0]["col_names"])
amean = aggregate(cc, "mean", [party_data[0]["col_names"][0]], party_data[0]["col_names"][1], "mean")
collect(amean, {1, 2})
d = Dag({rel_one, rel_two})
pd = PushDown()
pd.rewrite(d)
compare_to_expected(d, expected)
@pytest.mark.parametrize("party_data, expected", [
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [Create, Create, AggregateSumCountCol, AggregateSumCountCol, Concat, AggregateMean, Collect],
"requires_mpc": [False, False, False, False, True, True, False],
"ownership_data":[
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}],
"col_names": ["a", "b"]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}],
"col_names": ["c", "d"]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}, {1}],
"trust_with_sets": [{1}, {1}, {1}],
"col_names": ["b", "a", "__COUNT__"]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}, {2}],
"trust_with_sets": [{2}, {2}, {2}],
"col_names": ["d", "c", "__COUNT__"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set(), set()],
"trust_with_sets": [set(), set(), set()],
"col_names": ["b", "a", "__COUNT__"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["b", "a"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}],
"col_names": ["b", "a"]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1, 2}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [Create, Create, Concat, AggregateMean, Collect],
"requires_mpc": [False, False, False, False, False],
"ownership_data": [
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1, 2}],
"col_names": ["a", "b"]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}],
"col_names": ["c", "d"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [{2}, {2}],
"col_names": ["a", "b"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [{2}, {2}],
"col_names": ["b", "a"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}],
"col_names": ["b", "a"]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"col_names": ["c", "d"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
}
],
{
"node_order": [Create, Create, Concat, AggregateMean, Collect],
"requires_mpc": [True, True, True, True, False],
"ownership_data": [
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["a", "b"]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["c", "d"]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["a", "b"]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["b", "a"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}],
"col_names": ["b", "a"]
}
]
}
)
])
def test_mean_alt_key_col(party_data, expected):
cols_in_one = create_cols(party_data[0])
cols_in_two = create_cols(party_data[1])
rel_one = create("in1", cols_in_one, party_data[0]["stored_with"])
rel_two = create("in2", cols_in_two, party_data[1]["stored_with"])
cc = concat([rel_one, rel_two], "concat", party_data[0]["col_names"])
amean = aggregate(cc, "mean", [party_data[0]["col_names"][1]], party_data[0]["col_names"][0], "mean")
collect(amean, {1, 2})
d = Dag({rel_one, rel_two})
pd = PushDown()
pd.rewrite(d)
compare_to_expected(d, expected)
zip_col_names = zip(d.top_sort(), [e["col_names"] for e in expected["ownership_data"]])
col_name_checks = [[c.name for c in z[0].out_rel.columns] == z[1] for z in zip_col_names]
assert all(col_name_checks)
@pytest.mark.parametrize("party_data, expected", [
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [Create, Create, AggregateSumCountCol, AggregateSumCountCol, Concat, AggregateMean, Collect],
"requires_mpc": [False, False, False, False, True, True, False],
"ownership_data":[
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set()],
"trust_with_sets": [set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}],
"trust_with_sets": [{1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
}
],
{
"node_order": [Create, Create, AggregateSumCountCol, AggregateSumCountCol, Concat, AggregateMean, Collect],
"requires_mpc": [False, False, False, False, True, True, False],
"ownership_data":[
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set()],
"trust_with_sets": [set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}],
"trust_with_sets": [{1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [Create, Create, AggregateSumCountCol, AggregateSumCountCol, Concat, AggregateMean, Collect],
"requires_mpc": [False, False, False, False, True, True, False],
"ownership_data": [
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [{2}, set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set()],
"trust_with_sets": [{2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}],
"trust_with_sets": [{1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"col_names": ["c", "d"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
}
],
{
"node_order": [Create, Create, Concat, AggregateMean, Collect],
"requires_mpc": [True, True, True, True, False],
"ownership_data": [
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set()],
"trust_with_sets": [set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}],
"trust_with_sets": [{1, 2}]
}
]
}
)
])
def test_mean_no_key_cols(party_data, expected):
cols_in_one = create_cols(party_data[0])
cols_in_two = create_cols(party_data[1])
rel_one = create("in1", cols_in_one, party_data[0]["stored_with"])
rel_two = create("in2", cols_in_two, party_data[1]["stored_with"])
cc = concat([rel_one, rel_two], "concat", party_data[0]["col_names"])
amean = aggregate(cc, "mean", [], party_data[0]["col_names"][0], "mean")
collect(amean, {1, 2})
d = Dag({rel_one, rel_two})
pd = PushDown()
pd.rewrite(d)
compare_to_expected(d, expected)
@pytest.mark.parametrize("party_data, expected", [
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [
Create,
Create,
AggregateSumSquaresAndCount,
AggregateSumSquaresAndCount,
Concat,
AggregateStdDev,
Collect
],
"requires_mpc": [False, False, False, False, True, True, False],
"ownership_data":[
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}, {1}, {1}],
"trust_with_sets": [{1}, {1}, {1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}, {2}, {2}],
"trust_with_sets": [{2}, {2}, {2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set(), set(), set()],
"trust_with_sets": [set(), set(), set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
}
],
{
"node_order": [
Create,
Create,
AggregateSumSquaresAndCount,
AggregateSumSquaresAndCount,
Concat,
AggregateStdDev,
Collect
],
"requires_mpc": [False, False, False, False, True, True, False],
"ownership_data": [
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}, {1}, {1}],
"trust_with_sets": [{1}, {1}, {1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}, {2}, {2}],
"trust_with_sets": [{2}, {2}, {2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set(), set(), set()],
"trust_with_sets": [set(), set(), set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [
Create,
Create,
AggregateSumSquaresAndCount,
AggregateSumSquaresAndCount,
Concat,
AggregateStdDev,
Collect
],
"requires_mpc": [False, False, False, False, True, True, False],
"ownership_data": [
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}, {1}, {1}],
"trust_with_sets": [{1, 2}, {1}, {1}, {1, 2}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}, {2}, {2}],
"trust_with_sets": [{2}, {2}, {2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set(), set(), set()],
"trust_with_sets": [{2}, set(), set(), {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [{2}, set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"col_names": ["c", "d"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
}
],
{
"node_order": [Create, Create, Concat, AggregateStdDev, Collect],
"requires_mpc": [True, True, True, True, False],
"ownership_data": [
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
}
]
}
)
])
def test_std_dev(party_data, expected):
cols_in_one = create_cols(party_data[0])
cols_in_two = create_cols(party_data[1])
rel_one = create("in1", cols_in_one, party_data[0]["stored_with"])
rel_two = create("in2", cols_in_two, party_data[1]["stored_with"])
cc = concat([rel_one, rel_two], "concat", party_data[0]["col_names"])
std_dev = aggregate(cc, "std_dev", [party_data[0]["col_names"][0]], party_data[0]["col_names"][1], "std_dev")
collect(std_dev, {1, 2})
d = Dag({rel_one, rel_two})
pd = PushDown()
pd.rewrite(d)
compare_to_expected(d, expected)
@pytest.mark.parametrize("party_data, expected", [
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [
Create,
Create,
AggregateSumSquaresAndCount,
AggregateSumSquaresAndCount,
Concat,
AggregateStdDev,
Collect
],
"requires_mpc": [False, False, False, False, True, True, False],
"ownership_data":[
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}],
"col_names": ["a", "b"]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}],
"col_names": ["c", "d"]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}, {1}, {1}],
"trust_with_sets": [{1}, {1}, {1}, {1}],
"col_names": ["b", "a", "__SQUARES__", "__COUNT__"]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}, {2}, {2}],
"trust_with_sets": [{2}, {2}, {2}, {2}],
"col_names": ["d", "c", "__SQUARES__", "__COUNT__"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set(), set(), set()],
"trust_with_sets": [set(), set(), set(), set()],
"col_names": ["b", "a", "__SQUARES__", "__COUNT__"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["b", "a"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}],
"col_names": ["b", "a"]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1, 2}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [Create, Create, Concat, AggregateStdDev, Collect],
"requires_mpc": [False, False, False, False, False],
"ownership_data": [
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1, 2}],
"col_names": ["a", "b"]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}],
"col_names": ["c", "d"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [{2}, {2}],
"col_names": ["a", "b"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [{2}, {2}],
"col_names": ["b", "a"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}],
"col_names": ["b", "a"]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"col_names": ["c", "d"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
}
],
{
"node_order": [Create, Create, Concat, AggregateStdDev, Collect],
"requires_mpc": [True, True, True, True, False],
"ownership_data": [
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["a", "b"]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["c", "d"]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["a", "b"]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["b", "a"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}],
"col_names": ["b", "a"]
}
]
}
)
])
def test_std_dev_alt_key_col(party_data, expected):
cols_in_one = create_cols(party_data[0])
cols_in_two = create_cols(party_data[1])
rel_one = create("in1", cols_in_one, party_data[0]["stored_with"])
rel_two = create("in2", cols_in_two, party_data[1]["stored_with"])
cc = concat([rel_one, rel_two], "concat", party_data[0]["col_names"])
std_dev = aggregate(cc, "std_dev", [party_data[0]["col_names"][1]], party_data[0]["col_names"][0], "std_dev")
collect(std_dev, {1, 2})
d = Dag({rel_one, rel_two})
pd = PushDown()
pd.rewrite(d)
compare_to_expected(d, expected)
zip_col_names = zip(d.top_sort(), [e["col_names"] for e in expected["ownership_data"]])
col_name_checks = [[c.name for c in z[0].out_rel.columns] == z[1] for z in zip_col_names]
assert all(col_name_checks)
@pytest.mark.parametrize("party_data, expected", [
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [
Create,
Create,
AggregateSumSquaresAndCount,
AggregateSumSquaresAndCount,
Concat,
AggregateStdDev,
Collect
],
"requires_mpc": [False, False, False, False, True, True, False],
"ownership_data":[
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}, {1}],
"trust_with_sets": [{1}, {1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}, {2}],
"trust_with_sets": [{2}, {2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set(), set()],
"trust_with_sets": [set(), set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set()],
"trust_with_sets": [set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}],
"trust_with_sets": [{1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
}
],
{
"node_order": [
Create,
Create,
AggregateSumSquaresAndCount,
AggregateSumSquaresAndCount,
Concat,
AggregateStdDev,
Collect
],
"requires_mpc": [False, False, False, False, True, True, False],
"ownership_data":[
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}, {1}],
"trust_with_sets": [{1}, {1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}, {2}],
"trust_with_sets": [{2}, {2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set(), set()],
"trust_with_sets": [set(), set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set()],
"trust_with_sets": [set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}],
"trust_with_sets": [{1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [
Create,
Create,
AggregateSumSquaresAndCount,
AggregateSumSquaresAndCount,
Concat,
AggregateStdDev,
Collect
],
"requires_mpc": [False, False, False, False, True, True, False],
"ownership_data": [
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}, {1}],
"trust_with_sets": [{1, 2}, {1, 2}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}, {2}],
"trust_with_sets": [{2}, {2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set(), set()],
"trust_with_sets": [{2}, {2}, set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set()],
"trust_with_sets": [{2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}],
"trust_with_sets": [{1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"col_names": ["c", "d"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
}
],
{
"node_order": [Create, Create, Concat, AggregateStdDev, Collect],
"requires_mpc": [True, True, True, True, False],
"ownership_data": [
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set()],
"trust_with_sets": [set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}],
"trust_with_sets": [{1, 2}]
}
]
}
)
])
def test_std_dev_no_key_cols(party_data, expected):
cols_in_one = create_cols(party_data[0])
cols_in_two = create_cols(party_data[1])
rel_one = create("in1", cols_in_one, party_data[0]["stored_with"])
rel_two = create("in2", cols_in_two, party_data[1]["stored_with"])
cc = concat([rel_one, rel_two], "concat", party_data[0]["col_names"])
std_dev = aggregate(cc, "std_dev", [], party_data[0]["col_names"][0], "std_dev")
collect(std_dev, {1, 2})
d = Dag({rel_one, rel_two})
pd = PushDown()
pd.rewrite(d)
compare_to_expected(d, expected)
@pytest.mark.parametrize("party_data, expected", [
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [
Create,
Create,
AggregateSumSquaresAndCount,
AggregateSumSquaresAndCount,
Concat,
AggregateVariance,
Collect
],
"requires_mpc": [False, False, False, False, True, True, False],
"ownership_data":[
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}, {1}, {1}],
"trust_with_sets": [{1}, {1}, {1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}, {2}, {2}],
"trust_with_sets": [{2}, {2}, {2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set(), set(), set()],
"trust_with_sets": [set(), set(), set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
}
],
{
"node_order": [
Create,
Create,
AggregateSumSquaresAndCount,
AggregateSumSquaresAndCount,
Concat,
AggregateVariance,
Collect
],
"requires_mpc": [False, False, False, False, True, True, False],
"ownership_data": [
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}, {1}, {1}],
"trust_with_sets": [{1}, {1}, {1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}, {2}, {2}],
"trust_with_sets": [{2}, {2}, {2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set(), set(), set()],
"trust_with_sets": [set(), set(), set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [
Create,
Create,
AggregateSumSquaresAndCount,
AggregateSumSquaresAndCount,
Concat,
AggregateVariance,
Collect
],
"requires_mpc": [False, False, False, False, True, True, False],
"ownership_data": [
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}, {1}, {1}],
"trust_with_sets": [{1, 2}, {1}, {1}, {1, 2}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}, {2}, {2}],
"trust_with_sets": [{2}, {2}, {2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set(), set(), set()],
"trust_with_sets": [{2}, set(), set(), {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [{2}, set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"col_names": ["c", "d"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
}
],
{
"node_order": [Create, Create, Concat, AggregateVariance, Collect],
"requires_mpc": [True, True, True, True, False],
"ownership_data": [
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
}
]
}
)
])
def test_variance(party_data, expected):
cols_in_one = create_cols(party_data[0])
cols_in_two = create_cols(party_data[1])
rel_one = create("in1", cols_in_one, party_data[0]["stored_with"])
rel_two = create("in2", cols_in_two, party_data[1]["stored_with"])
cc = concat([rel_one, rel_two], "concat", party_data[0]["col_names"])
variance = aggregate(cc, "variance", [party_data[0]["col_names"][0]], party_data[0]["col_names"][1], "variance")
collect(variance, {1, 2})
d = Dag({rel_one, rel_two})
pd = PushDown()
pd.rewrite(d)
compare_to_expected(d, expected)
@pytest.mark.parametrize("party_data, expected", [
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [
Create,
Create,
AggregateSumSquaresAndCount,
AggregateSumSquaresAndCount,
Concat,
AggregateVariance,
Collect
],
"requires_mpc": [False, False, False, False, True, True, False],
"ownership_data":[
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}],
"col_names": ["a", "b"]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}],
"col_names": ["c", "d"]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}, {1}, {1}],
"trust_with_sets": [{1}, {1}, {1}, {1}],
"col_names": ["b", "a", "__SQUARES__", "__COUNT__"]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}, {2}, {2}],
"trust_with_sets": [{2}, {2}, {2}, {2}],
"col_names": ["d", "c", "__SQUARES__", "__COUNT__"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set(), set(), set()],
"trust_with_sets": [set(), set(), set(), set()],
"col_names": ["b", "a", "__SQUARES__", "__COUNT__"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["b", "a"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}],
"col_names": ["b", "a"]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1, 2}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [Create, Create, Concat, AggregateVariance, Collect],
"requires_mpc": [False, False, False, False, False],
"ownership_data": [
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1, 2}],
"col_names": ["a", "b"]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}],
"col_names": ["c", "d"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [{2}, {2}],
"col_names": ["a", "b"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [{2}, {2}],
"col_names": ["b", "a"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}],
"col_names": ["b", "a"]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"col_names": ["c", "d"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
}
],
{
"node_order": [Create, Create, Concat, AggregateVariance, Collect],
"requires_mpc": [True, True, True, True, False],
"ownership_data": [
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["a", "b"]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["c", "d"]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["a", "b"]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()],
"col_names": ["b", "a"]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}],
"col_names": ["b", "a"]
}
]
}
)
])
def test_variance_alt_key_col(party_data, expected):
cols_in_one = create_cols(party_data[0])
cols_in_two = create_cols(party_data[1])
rel_one = create("in1", cols_in_one, party_data[0]["stored_with"])
rel_two = create("in2", cols_in_two, party_data[1]["stored_with"])
cc = concat([rel_one, rel_two], "concat", party_data[0]["col_names"])
variance = aggregate(cc, "variance", [party_data[0]["col_names"][1]], party_data[0]["col_names"][0], "variance")
collect(variance, {1, 2})
d = Dag({rel_one, rel_two})
pd = PushDown()
pd.rewrite(d)
compare_to_expected(d, expected)
zip_col_names = zip(d.top_sort(), [e["col_names"] for e in expected["ownership_data"]])
col_name_checks = [[c.name for c in z[0].out_rel.columns] == z[1] for z in zip_col_names]
assert all(col_name_checks)
@pytest.mark.parametrize("party_data, expected", [
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [
Create,
Create,
AggregateSumSquaresAndCount,
AggregateSumSquaresAndCount,
Concat,
AggregateVariance,
Collect
],
"requires_mpc": [False, False, False, False, True, True, False],
"ownership_data":[
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}, {1}],
"trust_with_sets": [{1}, {1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}, {2}],
"trust_with_sets": [{2}, {2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set(), set()],
"trust_with_sets": [set(), set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set()],
"trust_with_sets": [set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}],
"trust_with_sets": [{1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
}
],
{
"node_order": [
Create,
Create,
AggregateSumSquaresAndCount,
AggregateSumSquaresAndCount,
Concat,
AggregateVariance,
Collect
],
"requires_mpc": [False, False, False, False, True, True, False],
"ownership_data":[
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}, {1}],
"trust_with_sets": [{1}, {1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}, {2}],
"trust_with_sets": [{2}, {2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set(), set()],
"trust_with_sets": [set(), set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set()],
"trust_with_sets": [set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}],
"trust_with_sets": [{1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [
Create,
Create,
AggregateSumSquaresAndCount,
AggregateSumSquaresAndCount,
Concat,
AggregateVariance,
Collect
],
"requires_mpc": [False, False, False, False, True, True, False],
"ownership_data": [
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}, {1}],
"trust_with_sets": [{1, 2}, {1, 2}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}, {2}],
"trust_with_sets": [{2}, {2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set(), set()],
"trust_with_sets": [{2}, {2}, set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set()],
"trust_with_sets": [{2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}],
"trust_with_sets": [{1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"col_names": ["c", "d"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
}
],
{
"node_order": [Create, Create, Concat, AggregateVariance, Collect],
"requires_mpc": [True, True, True, True, False],
"ownership_data": [
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set()],
"trust_with_sets": [set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}],
"trust_with_sets": [{1, 2}]
}
]
}
)
])
def test_variance_no_key_cols(party_data, expected):
cols_in_one = create_cols(party_data[0])
cols_in_two = create_cols(party_data[1])
rel_one = create("in1", cols_in_one, party_data[0]["stored_with"])
rel_two = create("in2", cols_in_two, party_data[1]["stored_with"])
cc = concat([rel_one, rel_two], "concat", party_data[0]["col_names"])
variance = aggregate(cc, "variance", [], party_data[0]["col_names"][0], "variance")
collect(variance, {1, 2})
d = Dag({rel_one, rel_two})
pd = PushDown()
pd.rewrite(d)
compare_to_expected(d, expected) | 35.271732 | 119 | 0.325063 | 8,663 | 109,554 | 3.816807 | 0.013275 | 0.12914 | 0.156873 | 0.072584 | 0.983215 | 0.98137 | 0.9801 | 0.979162 | 0.979162 | 0.97765 | 0 | 0.036986 | 0.501725 | 109,554 | 3,106 | 120 | 35.271732 | 0.568733 | 0 | 0 | 0.633657 | 0 | 0 | 0.19522 | 0 | 0 | 0 | 0 | 0 | 0.001673 | 1 | 0.004684 | false | 0 | 0.002007 | 0 | 0.006691 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
372f2a6d58a1437c0ea0a071fcd7ff9143640281 | 13,637 | py | Python | estate/terraform/migrations/0001_initial.py | asloan7/estate | 07d9802198947bfcd25d019b5c79610a9a81e25e | [
"MIT"
] | 95 | 2017-11-02T12:47:24.000Z | 2021-08-14T22:44:53.000Z | estate/terraform/migrations/0001_initial.py | asloan7/estate | 07d9802198947bfcd25d019b5c79610a9a81e25e | [
"MIT"
] | 6 | 2018-10-28T22:30:41.000Z | 2018-10-29T01:08:47.000Z | estate/terraform/migrations/0001_initial.py | asloan7/estate | 07d9802198947bfcd25d019b5c79610a9a81e25e | [
"MIT"
] | 63 | 2017-10-31T13:36:35.000Z | 2022-02-04T01:56:59.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-06-26 14:59
from __future__ import unicode_literals
from django.conf import settings
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
import django.db.models.deletion
import django_extensions.db.fields
import semantic_version.base
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='File',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('title', models.CharField(max_length=255, verbose_name='title')),
('description', models.TextField(blank=True, null=True, verbose_name='description')),
('slug', django_extensions.db.fields.AutoSlugField(blank=True, editable=False, populate_from=b'title', verbose_name='slug')),
('content', models.TextField(blank=True, verbose_name='content')),
],
options={
'ordering': ('-modified', '-created'),
'abstract': False,
'get_latest_by': 'modified',
},
),
migrations.CreateModel(
name='HistoricalFile',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('title', models.CharField(max_length=255, verbose_name='title')),
('description', models.TextField(blank=True, null=True, verbose_name='description')),
('slug', django_extensions.db.fields.AutoSlugField(blank=True, editable=False, populate_from=b'title', verbose_name='slug')),
('content', models.TextField(blank=True, verbose_name='content')),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical file',
},
),
migrations.CreateModel(
name='HistoricalNamespace',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('title', models.CharField(max_length=255, verbose_name='title')),
('description', models.TextField(blank=True, null=True, verbose_name='description')),
('slug', django_extensions.db.fields.AutoSlugField(blank=True, editable=False, populate_from=b'title', verbose_name='slug')),
('owner', models.CharField(max_length=80, verbose_name='owner')),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical namespace',
},
),
migrations.CreateModel(
name='HistoricalTemplate',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('title', models.CharField(max_length=255, verbose_name='title')),
('description', models.TextField(blank=True, null=True, verbose_name='description')),
('slug', django_extensions.db.fields.AutoSlugField(blank=True, editable=False, populate_from=b'title', verbose_name='slug')),
('version', models.CharField(default=b'0.0.0', max_length=128, validators=[semantic_version.base.validate], verbose_name='version')),
('json_schema', models.TextField(blank=True, verbose_name='JSONSchema')),
('ui_schema', models.TextField(blank=True, verbose_name='UISchema')),
('body', models.TextField(blank=True, verbose_name='body')),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical template',
},
),
migrations.CreateModel(
name='HistoricalTemplateInstance',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('title', models.CharField(max_length=255, verbose_name='title')),
('description', models.TextField(blank=True, null=True, verbose_name='description')),
('slug', django_extensions.db.fields.AutoSlugField(blank=True, editable=False, populate_from=b'title', verbose_name='slug')),
('disable', models.BooleanField(default=False, verbose_name='disable')),
('inputs', django.contrib.postgres.fields.jsonb.JSONField(verbose_name='inputs')),
('overrides', models.TextField(blank=True, verbose_name='overrides')),
('historical_template', models.IntegerField()),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical template instance',
},
),
migrations.CreateModel(
name='Namespace',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('title', models.CharField(max_length=255, verbose_name='title')),
('description', models.TextField(blank=True, null=True, verbose_name='description')),
('slug', django_extensions.db.fields.AutoSlugField(blank=True, editable=False, populate_from=b'title', verbose_name='slug')),
('owner', models.CharField(max_length=80, verbose_name='owner')),
],
options={
'ordering': ('-modified', '-created'),
'abstract': False,
'get_latest_by': 'modified',
},
),
migrations.CreateModel(
name='Template',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('title', models.CharField(max_length=255, verbose_name='title')),
('description', models.TextField(blank=True, null=True, verbose_name='description')),
('slug', django_extensions.db.fields.AutoSlugField(blank=True, editable=False, populate_from=b'title', verbose_name='slug')),
('version', models.CharField(default=b'0.0.0', max_length=128, validators=[semantic_version.base.validate], verbose_name='version')),
('json_schema', models.TextField(blank=True, verbose_name='JSONSchema')),
('ui_schema', models.TextField(blank=True, verbose_name='UISchema')),
('body', models.TextField(blank=True, verbose_name='body')),
],
options={
'ordering': ('-modified', '-created'),
'abstract': False,
'get_latest_by': 'modified',
},
),
migrations.CreateModel(
name='TemplateDependency',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('condition', models.IntegerField(choices=[(1, 'Any'), (2, 'All'), (3, 'Not Any'), (4, 'Not All')], default=1)),
('dependencies', models.ManyToManyField(related_name='asDependent', to='terraform.Template')),
('template', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='dependencies', to='terraform.Template')),
],
options={
'verbose_name': 'TemplateDependencies',
},
),
migrations.CreateModel(
name='TemplateInstance',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('title', models.CharField(max_length=255, verbose_name='title')),
('description', models.TextField(blank=True, null=True, verbose_name='description')),
('slug', django_extensions.db.fields.AutoSlugField(blank=True, editable=False, populate_from=b'title', verbose_name='slug')),
('disable', models.BooleanField(default=False, verbose_name='disable')),
('inputs', django.contrib.postgres.fields.jsonb.JSONField(verbose_name='inputs')),
('overrides', models.TextField(blank=True, verbose_name='overrides')),
('historical_template', models.IntegerField()),
('namespace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='templates', to='terraform.Namespace')),
],
options={
'ordering': ('-modified', '-created'),
'abstract': False,
'get_latest_by': 'modified',
},
),
migrations.AddField(
model_name='historicaltemplateinstance',
name='namespace',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='terraform.Namespace'),
),
migrations.AddField(
model_name='historicalfile',
name='namespace',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='terraform.Namespace'),
),
migrations.AddField(
model_name='file',
name='namespace',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='files', to='terraform.Namespace'),
),
]
| 63.427907 | 172 | 0.610765 | 1,334 | 13,637 | 6.044228 | 0.107196 | 0.098226 | 0.070693 | 0.074414 | 0.877837 | 0.871636 | 0.871636 | 0.871636 | 0.871636 | 0.871636 | 0 | 0.00635 | 0.237809 | 13,637 | 214 | 173 | 63.724299 | 0.769386 | 0.004986 | 0 | 0.781553 | 1 | 0 | 0.165266 | 0.003833 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.033981 | 0 | 0.053398 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
2ec95f69916c267e9ee3a7473eafe11744a9f593 | 11,513 | py | Python | qa327_test/frontend/test_register.py | awebsters/SeetGeak-Quality-Assurance-Example | 65272068d8fe81266efb0b8528bac339fb063891 | [
"MIT"
] | null | null | null | qa327_test/frontend/test_register.py | awebsters/SeetGeak-Quality-Assurance-Example | 65272068d8fe81266efb0b8528bac339fb063891 | [
"MIT"
] | null | null | null | qa327_test/frontend/test_register.py | awebsters/SeetGeak-Quality-Assurance-Example | 65272068d8fe81266efb0b8528bac339fb063891 | [
"MIT"
] | null | null | null | from seleniumbase import BaseCase
from qa327_test.conftest import base_url
from unittest.mock import patch
from qa327.models import User
from werkzeug.security import generate_password_hash
# Moch a sample user
test_user = User(
email='test_frontend@test.com',
name='test_frontend',
password=generate_password_hash('Test1234!', method='sha256'),
balance=5000
)
class RegistrationPageTest(BaseCase):
@patch('qa327.backend.get_user', return_value=test_user)
def test_login_redirect(self, *_):
"""
R2.1.1 - Tests if the register will redirect to / with a valid session
"""
#Invalidate any logged in sessions
self.open(base_url + '/logout')
#Open login page
self.open(base_url + '/login')
#Fill in form
self.type("#email", "test_frontend@test.com")
self.type("#password", "Test1234!")
# Submit
self.click('input[type="submit"]')
# Open register page
self.open(base_url + '/register')
self.assert_equal(self.get_current_url(), base_url + '/')
def test_login_not_redirect(self, *_):
"""
R2.2.1 - Tests if it will stay on /register with no session
"""
#Invalidate any logged in sessions
self.open(base_url + '/logout')
# Open register page
self.open(base_url + '/register')
self.assert_equal(self.get_current_url(), base_url + '/register')
def test_registration_form_present(self, *_):
"""
R2.3.1- Tests if the /register page has input fields for email, username password and password2
"""
#Invalidate any logged in sessions
self.open(base_url + '/logout')
# Open register page
self.open(base_url + '/register')
#Check if elements exist
self.assert_element("#email")
self.assert_element("#name")
self.assert_element("#password")
self.assert_element("#password2")
@patch('qa327.backend.get_user', return_value=None)
@patch('qa327.backend.register_user', return_value=True)
def test_registration_valid_input(self, *_):
"""
R2.5.1- Check success with all valid inputs
"""
# Invalidate any logged in sessions
self.open(base_url + '/logout')
# Open register page
self.open(base_url + '/register')
#Fill out form
self.type("#email", "test_frontend@test.com")
self.type("#name", "test")
self.type("#password", "Test1234!")
self.type("#password2", "Test1234!")
self.click('input[type="submit"]')
self.assert_text("Please login", "#message")
@patch('qa327.backend.get_user', return_value=None)
def test_registration_invalid_email_input(self, *_):
"""
R2.5.2- Check failure with only email as invalid
"""
# Invalidate any logged in sessions
self.open(base_url + '/logout')
# Open register page
self.open(base_url + '/register')
# Fill out form
self.type("#email", "test")
self.type("#name", "test")
self.type("#password", "Test1234!")
self.type("#password2", "Test1234!")
self.click('input[type="submit"]')
self.assert_text("Email format is incorrect.", "#message")
@patch('qa327.backend.get_user', return_value=None)
def test_registration_invalid_password_length_input(self, *_):
"""
R2.5.3- Check failure with password length < 6
"""
# Invalidate any logged in sessions
self.open(base_url + '/logout')
# Open register page
self.open(base_url + '/register')
# Fill out form
self.type("#email", "test_frontend@test.com")
self.type("#name", "test")
self.type("#password", "T")
self.type("#password2", "T")
self.click('input[type="submit"]')
self.assert_text("Password format is incorrect.", "#message")
@patch('qa327.backend.get_user', return_value=None)
def test_registration_invalid_password_uppercare_input(self, *_):
"""
R2.5.4- Check failure with password with no uppercase letters
"""
# Invalidate any logged in sessions
self.open(base_url + '/logout')
# Open register page
self.open(base_url + '/register')
# Fill out form
self.type("#email", "test_frontend@test.com")
self.type("#name", "test")
self.type("#password", "test1234!")
self.type("#password2", "test1234!")
self.click('input[type="submit"]')
self.assert_text("Password format is incorrect.", "#message")
@patch('qa327.backend.get_user', return_value=None)
def test_registration_invalid_password_lowercase_input(self, *_):
"""
R2.5.5- Check failure with password with no lowercase letters
"""
# Invalidate any logged in sessions
self.open(base_url + '/logout')
# Open register page
self.open(base_url + '/register')
# Fill out form
self.type("#email", "test_frontend@test.com")
self.type("#name", "test")
self.type("#password", "TEST1234!")
self.type("#password2", "TEST1234!")
self.click('input[type="submit"]')
self.assert_text("Password format is incorrect.", "#message")
@patch('qa327.backend.get_user', return_value=None)
def test_registration_invalid_password_NoSpecialCharacters_input(self, *_):
"""
R2.5.6- Check failure with password with no special characters
"""
# Invalidate any logged in sessions
self.open(base_url + '/logout')
# Open register page
self.open(base_url + '/register')
# Fill out form
self.type("#email", "test_frontend@test.com")
self.type("#name", "test")
self.type("#password", "Test1234")
self.type("#password2", "Test1234")
self.click('input[type="submit"]')
self.assert_text("Password format is incorrect.", "#message")
@patch('qa327.backend.get_user', return_value=None)
def test_registration_passwords_dont_match(self, *_):
"""
R2.6- Check registration failure if password2 != password
"""
# Invalidate any logged in sessions
self.open(base_url + '/logout')
# Open register page
self.open(base_url + '/register')
# Fill out form
self.type("#email", "test_frontend@test.com")
self.type("#name", "test")
self.type("#password", "Test1234!")
self.type("#password2", "Test1234!!")
self.click('input[type="submit"]')
self.assert_text("The passwords do not match", "#message")
@patch('qa327.backend.get_user', return_value=None)
def test_registration_empty_name(self, *_):
"""
R2.7.1- Check failure with empty username
"""
# Invalidate any logged in sessions
self.open(base_url + '/logout')
# Open register page
self.open(base_url + '/register')
# Fill out form
self.type("#email", "test_frontend@test.com")
self.type("#name", "")
self.type("#password", "Test1234!")
self.type("#password2", "Test1234!")
self.click('input[type="submit"]')
self.assert_equal(self.get_current_url(), base_url + '/register')
@patch('qa327.backend.get_user', return_value=None)
def test_registration_username_non_alphanumeric(self, *_):
"""
R2.7.2- check failure with a non-alphanumeric character
"""
# Invalidate any logged in sessions
self.open(base_url + '/logout')
# Open register page
self.open(base_url + '/register')
# Fill out form
self.type("#email", "test_frontend@test.com")
self.type("#name", "Test!")
self.type("#password", "Test1234!")
self.type("#password2", "Test1234!")
self.click('input[type="submit"]')
self.wait_for_element("#message")
self.assert_text("Name format is incorrect.", "#message")
@patch('qa327.backend.get_user', return_value=None)
def test_registration_username_invalid_space_start(self, *_):
"""
R2.7.3- Check failure with a space at the start
"""
# Invalidate any logged in sessions
self.open(base_url + '/logout')
# Open register page
self.open(base_url + '/register')
# Fill out form
self.type("#email", "test_frontend@test.com")
self.type("#name", " Test")
self.type("#password", "Test1234!")
self.type("#password2", "Test1234!")
self.click('input[type="submit"]')
self.assert_text("Name format is incorrect.", "#message")
@patch('qa327.backend.get_user', return_value=None)
def test_registration_username_invalid_space_end(self, *_):
"""
R2.7.4- Check failure with a space at the end
"""
# Invalidate any logged in sessions
self.open(base_url + '/logout')
# Open register page
self.open(base_url + '/register')
# Fill out form
self.type("#email", "test_frontend@test.com")
self.type("#name", "Test ")
self.type("#password", "Test1234!")
self.type("#password2", "Test1234!")
self.click('input[type="submit"]')
self.assert_text("Name format is incorrect.", "#message")
@patch('qa327.backend.get_user', return_value=None)
def test_registration_username_invalid_length2(self, *_):
"""
R2.8.1- Check failure with username of length 2
"""
# Invalidate any logged in sessions
self.open(base_url + '/logout')
# Open register page
self.open(base_url + '/register')
# Fill out form
self.type("#email", "test_frontend@test.com")
self.type("#name", "Te")
self.type("#password", "Test1234!")
self.type("#password2", "Test1234!")
self.click('input[type="submit"]')
self.assert_text("Name format is incorrect.", "#message")
@patch('qa327.backend.get_user', return_value=None)
def test_registration_username_invalid_length20(self, *_):
"""
R2.8.2- Check failure with username of length 20
"""
# Invalidate any logged in sessions
self.open(base_url + '/logout')
# Open register page
self.open(base_url + '/register')
# Fill out form
self.type("#email", "test_frontend@test.com")
self.type("#name", "Teeeeeeeeeeeeeeeeeessst")
self.type("#password", "Test1234!")
self.type("#password2", "Test1234!")
self.click('input[type="submit"]')
self.assert_text("Name format is incorrect.", "#message")
@patch('qa327.backend.get_user', return_value=test_user)
def test_registration_email_exists(self, *_):
"""
R2.8.2- Check failure with username of length 20
"""
# Invalidate any logged in sessions
self.open(base_url + '/logout')
# Open register page
self.open(base_url + '/register')
# Fill out form
self.type("#email", "test_frontend@test.com")
self.type("#name", "Test")
self.type("#password", "Test1234!")
self.type("#password2", "Test1234!")
self.click('input[type="submit"]')
self.assert_text("This email has been ALREADY used", "#message")
| 31.116216 | 103 | 0.60106 | 1,353 | 11,513 | 4.961567 | 0.104952 | 0.06912 | 0.062565 | 0.078206 | 0.796514 | 0.780873 | 0.762699 | 0.754655 | 0.748846 | 0.748846 | 0 | 0.029102 | 0.25684 | 11,513 | 369 | 104 | 31.200542 | 0.755493 | 0.184053 | 0 | 0.683908 | 1 | 0 | 0.284596 | 0.079365 | 0 | 0 | 0 | 0 | 0.114943 | 1 | 0.097701 | false | 0.247126 | 0.028736 | 0 | 0.132184 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
2ed7e76182c822550e745c419c20a6ed2f7fcdbe | 3,311 | py | Python | cloudmesh/storage/provider/storage.py | manjunathsivan/cm | fedc33b503e7fd59176678e775c024fbb5e952c0 | [
"Apache-2.0"
] | null | null | null | cloudmesh/storage/provider/storage.py | manjunathsivan/cm | fedc33b503e7fd59176678e775c024fbb5e952c0 | [
"Apache-2.0"
] | 1 | 2020-10-21T18:15:46.000Z | 2020-10-21T18:15:46.000Z | cloudmesh/storage/provider/storage.py | robludwig/cm | 5b18012def16d4d845141798652bb11d5564e9db | [
"Apache-2.0"
] | null | null | null | from cloudmesh.abstractclass.StorageABC import StorageABC
# from cloudmesh.storag.google.Provider import Provider as GoogleStorageProvider
# from cloudmesh.bostorage.box.Provider import Provider as GoogleBoxProvider
class Provider(StorageABC):
def __init__(self, cloud, config):
'''
if cloud == 'google':
self.p = GoogleStorageProvider
'''
raise NotImplementedError
def create_dir(self, service=None, directory=None):
"""
creates a directory
:param service: the name of the service in the yaml file
:param directory: the name of the directory
:return: dict
"""
raise NotImplementedError
# @DatabaseUpdate
def list(self, service=None, source=None, recursive=False):
"""
lists the information as dict
:param service: the name of the service in the yaml file
:param source: the source which either can be a directory or file
:param recursive: in case of directory the recursive referes to all
subdirectories in the specified source
:return: dict
"""
raise NotImplementedError
def put(self, service=None, source=None, destination=None, recusrive=False):
"""
puts the source on the service
:param service: the name of the service in the yaml file
:param source: the source which either can be a directory or file
:param destination: the destination which either can be a directory or file
:param recursive: in case of directory the recursive referes to all
subdirectories in the specified source
:return: dict
"""
raise NotImplementedError
def get(self, service=None, source=None, destination=None, recusrive=False):
"""
gets the destination and copies it in source
:param service: the name of the service in the yaml file
:param source: the source which either can be a directory or file
:param destination: the destination which either can be a directory or file
:param recursive: in case of directory the recursive referes to all
subdirectories in the specified source
:return: dict
"""
raise NotImplementedError
def delete(self, service=None, source=None, recusrive=False):
"""
deletes the source
:param service: the name of the service in the yaml file
:param source: the source which either can be a directory or file
:param recursive: in case of directory the recursive referes to all
subdirectories in the specified source
:return: dict
"""
raise NotImplementedError
def search(self, service=None, directory=None, filename=None,
recusrive=False):
"""
gets the destination and copies it in source
:param service: the name of the service in the yaml file
:param directory: the directory which either can be a directory or file
:param recursive: in case of directory the recursive referes to all
subdirectories in the specified source
:return: dict
"""
raise NotImplementedError
| 35.98913 | 83 | 0.642404 | 394 | 3,311 | 5.385787 | 0.180203 | 0.055137 | 0.029689 | 0.039585 | 0.754477 | 0.704524 | 0.704524 | 0.704524 | 0.704524 | 0.662111 | 0 | 0 | 0.304138 | 3,311 | 91 | 84 | 36.384615 | 0.921007 | 0.605859 | 0 | 0.411765 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.411765 | false | 0 | 0.058824 | 0 | 0.529412 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 7 |
2edce443d04a33ac7512cda6997b9a34ecdf61aa | 1,363 | py | Python | Jupyter/rdkrpcChem.py | MooersLab/jupyterlabpymolpysnipsplus | b886750d63372434df53d4d6d7cdad6cb02ae4e7 | [
"MIT"
] | null | null | null | Jupyter/rdkrpcChem.py | MooersLab/jupyterlabpymolpysnipsplus | b886750d63372434df53d4d6d7cdad6cb02ae4e7 | [
"MIT"
] | null | null | null | Jupyter/rdkrpcChem.py | MooersLab/jupyterlabpymolpysnipsplus | b886750d63372434df53d4d6d7cdad6cb02ae4e7 | [
"MIT"
] | null | null | null | # Description: Demo of the use of the RPC server with a drug compound via the rdkit python module.
# Source: placeHolder
"""
cmd.do('import os;')
cmd.do('import rdkit;')
cmd.do('from rdkit import Chem;')
cmd.do('from rdkit.Chem import AllChem;')
cmd.do('from rdkit.Chem import PyMol;')
cmd.do('')
cmd.do('s = PyMOL.MolViewer();')
cmd.do('mol = Chem.MolFromSmiles("${1:CCOCCn1c(C2CC[NH+](CCc3ccc(C(C)(C)C(=O)[O-])cc3)CC2)nc2ccccc21}");')
cmd.do('mol = AllChem.AddHs(mol);')
cmd.do('AllChem.EmbedMolecule(mol);')
cmd.do('AllChem.MMFFOptimizeMolecule(mol);')
cmd.do('s.ShowMol(mol, name = "${2:bilastine}", showOnly = False);')
cmd.do('s.Zoom("${2:bilastine}");')
cmd.do('s.SetDisplayStyle("${2:bilastine}", "sticks");')
cmd.do('s.GetPNG(preDelay=5);')
"""
cmd.do('import os;')
cmd.do('import rdkit;')
cmd.do('from rdkit import Chem;')
cmd.do('from rdkit.Chem import AllChem;')
cmd.do('from rdkit.Chem import PyMol;')
cmd.do('')
cmd.do('s = PyMOL.MolViewer();')
cmd.do('mol = Chem.MolFromSmiles("CCOCCn1c(C2CC[NH+](CCc3ccc(C(C)(C)C(=O)[O-])cc3)CC2)nc2ccccc21");')
cmd.do('mol = AllChem.AddHs(mol);')
cmd.do('AllChem.EmbedMolecule(mol);')
cmd.do('AllChem.MMFFOptimizeMolecule(mol);')
cmd.do('s.ShowMol(mol, name = "bilastine", showOnly = False);')
cmd.do('s.Zoom("bilastine");')
cmd.do('s.SetDisplayStyle("bilastine", "sticks");')
cmd.do('s.GetPNG(preDelay=5);')
| 36.837838 | 106 | 0.673514 | 214 | 1,363 | 4.28972 | 0.252336 | 0.163399 | 0.065359 | 0.091503 | 0.893246 | 0.827887 | 0.827887 | 0.75817 | 0.679739 | 0.679739 | 0 | 0.017502 | 0.07777 | 1,363 | 36 | 107 | 37.861111 | 0.712808 | 0.545855 | 0 | 0 | 0 | 0.066667 | 0.721311 | 0.322951 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.333333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 8 |
2596fd1496f4c0dd2bf55d9a454a8f220e34faa3 | 79 | py | Python | tests/__init__.py | nwithan8/jellyfin-python | 360eb1932d4deac021b9a78f7e39ba3a499dfd43 | [
"BSD-3-Clause"
] | null | null | null | tests/__init__.py | nwithan8/jellyfin-python | 360eb1932d4deac021b9a78f7e39ba3a499dfd43 | [
"BSD-3-Clause"
] | null | null | null | tests/__init__.py | nwithan8/jellyfin-python | 360eb1932d4deac021b9a78f7e39ba3a499dfd43 | [
"BSD-3-Clause"
] | 3 | 2020-02-16T02:01:53.000Z | 2020-07-23T19:30:57.000Z | from . import test_user
from . import test_server
from . import test_userhelper | 26.333333 | 29 | 0.822785 | 12 | 79 | 5.166667 | 0.5 | 0.483871 | 0.677419 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.139241 | 79 | 3 | 29 | 26.333333 | 0.911765 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
25e97e587adcbeb500c56e992182400d0dad9c6f | 11,149 | py | Python | webfront/tests/test_pagination.py | ProteinsWebTeam/project-skeleton | 7aeb971ba2d9bfe272e0590bd4484afb61336b96 | [
"Apache-2.0"
] | 6 | 2020-05-25T17:35:52.000Z | 2022-03-26T00:45:55.000Z | webfront/tests/test_pagination.py | ProteinsWebTeam/project-skeleton | 7aeb971ba2d9bfe272e0590bd4484afb61336b96 | [
"Apache-2.0"
] | 76 | 2016-07-29T09:22:34.000Z | 2022-03-15T07:57:17.000Z | webfront/tests/test_pagination.py | ProteinsWebTeam/project-skeleton | 7aeb971ba2d9bfe272e0590bd4484afb61336b96 | [
"Apache-2.0"
] | 1 | 2017-04-09T20:08:30.000Z | 2017-04-09T20:08:30.000Z | from webfront.tests.InterproRESTTestCase import InterproRESTTestCase
from rest_framework import status
def get_next_value_from_response(response):
try:
return "/api" + response.data["next"].split("/api")[1]
except:
return None
def get_previous_value_from_response(response):
try:
return "/api" + response.data["previous"].split("/api")[1]
except:
return None
class PaginationOverSingleEnpointTest(InterproRESTTestCase):
def test_pagesize(self):
for size in range(1, 5):
response = self.client.get("/api/entry/all?page_size={}".format(size))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data["results"]), size)
def test_pagesize_larger_than_total(self):
size = 20
response = self.client.get("/api/entry/all?page_size={}".format(size))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["count"], len(response.data["results"]))
self.assertLess(response.data["count"], size)
def test_next_and_back_should_be_the_same(self):
for size in range(1, 5):
response = self.client.get("/api/entry/all?page_size={}".format(size))
self.assertEqual(response.status_code, status.HTTP_200_OK)
next_url = get_next_value_from_response(response)
self.assertIsNotNone(next_url)
next_response = self.client.get(next_url)
self.assertEqual(next_response.status_code, status.HTTP_200_OK)
previous_url = get_previous_value_from_response(next_response)
self.assertIsNotNone(previous_url)
previous_response = self.client.get(previous_url)
self.assertEqual(previous_response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, previous_response.data)
def test_walking_through_all_pages_and_back(self):
full_response = self.client.get("/api/entry/all?page_size=20")
self.assertEqual(full_response.status_code, status.HTTP_200_OK)
for size in range(1, 10):
response = self.client.get("/api/entry/all?page_size={}".format(size))
self.assertEqual(
response.data["results"], full_response.data["results"][:size]
)
next_url = get_next_value_from_response(response)
start = size
previous_url = None
while next_url is not None:
response = self.client.get(next_url)
self.assertEqual(
response.data["results"],
full_response.data["results"][start : start + size],
)
next_url = get_next_value_from_response(response)
previous_url = get_previous_value_from_response(response)
start = start + size
start = start - 2 * size
while previous_url is not None:
response = self.client.get(previous_url)
self.assertEqual(
response.data["results"],
full_response.data["results"][start : start + size],
)
previous_url = get_previous_value_from_response(response)
start = start - size
class PaginationOverMultipleEnpointTest(InterproRESTTestCase):
def test_pagesize(self):
for size in range(1, 5):
response = self.client.get(
"/api/entry/all/protein?page_size={}".format(size)
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data["results"]), size)
def test_pagesize_larger_than_total(self):
size = 20
response = self.client.get("/api/entry/all/protein?page_size={}".format(size))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["count"], len(response.data["results"]))
self.assertLess(response.data["count"], size)
def test_next_and_back_should_be_the_same(self):
for size in range(1, 5):
response = self.client.get(
"/api/entry/all/protein?page_size={}".format(size)
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
next_url = get_next_value_from_response(response)
self.assertIsNotNone(next_url)
next_response = self.client.get(next_url)
self.assertEqual(next_response.status_code, status.HTTP_200_OK)
previous_url = get_previous_value_from_response(next_response)
self.assertIsNotNone(previous_url)
previous_response = self.client.get(previous_url)
self.assertEqual(previous_response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["count"], previous_response.data["count"])
self.assertEqual(
response.data["results"], previous_response.data["results"]
)
self.assertEqual(response.data["next"], previous_response.data["next"])
# self.assertEqual(response.data["previous"], previous_response.data["previous"])
def test_next_until_the_end(self):
full_response = self.client.get("/api/entry/all/protein?page_size=20")
self.assertEqual(full_response.status_code, status.HTTP_200_OK)
for size in range(1, 10):
response = self.client.get(
"/api/entry/all/protein?page_size={}".format(size)
)
count = len(response.data["results"])
next_url = get_next_value_from_response(response)
start = size
while next_url is not None:
response = self.client.get(next_url)
count += len(response.data["results"])
next_url = get_next_value_from_response(response)
start = start + size
self.assertEqual(len(full_response.data["results"]), count)
def test_goto_the_end_and_prev_back(self):
full_response = self.client.get("/api/entry/all/protein?page_size=20")
self.assertEqual(full_response.status_code, status.HTTP_200_OK)
for size in range(1, 10):
response = self.client.get(
"/api/entry/all/protein?page_size={}".format(size)
)
count = len(response.data["results"])
next_url = get_next_value_from_response(response)
start = size
previous_url = None
while next_url is not None:
response = self.client.get(next_url)
count += len(response.data["results"])
next_url = get_next_value_from_response(response)
previous_url = get_previous_value_from_response(response)
start = start + size
self.assertEqual(len(full_response.data["results"]), count)
count -= len(response.data["results"])
start = start - 2 * size
while previous_url is not None:
response = self.client.get(previous_url)
count -= len(response.data["results"])
previous_url = get_previous_value_from_response(response)
start = start - size
self.assertEqual(count, 0)
class PaginationOverIDASearch(InterproRESTTestCase):
def test_pagesize(self):
for size in range(1, 2):
response = self.client.get(
"/api/entry?ida_search=IPR003165&page_size={}".format(size)
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data["results"]), size)
def test_pagesize_larger_than_total(self):
size = 20
response = self.client.get(
"/api/entry?ida_search=IPR003165&page_size={}".format(size)
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["count"], len(response.data["results"]))
self.assertLess(response.data["count"], size)
def test_next_and_back_should_be_the_same(self):
response = self.client.get("/api/entry?ida_search=IPR003165&page_size=1")
self.assertEqual(response.status_code, status.HTTP_200_OK)
next_url = get_next_value_from_response(response)
self.assertIsNotNone(next_url)
next_response = self.client.get(next_url)
self.assertEqual(next_response.status_code, status.HTTP_200_OK)
previous_url = get_previous_value_from_response(next_response)
self.assertIsNotNone(previous_url)
previous_response = self.client.get(previous_url)
self.assertEqual(previous_response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["count"], previous_response.data["count"])
self.assertEqual(response.data["results"], previous_response.data["results"])
self.assertEqual(response.data["next"], previous_response.data["next"])
def test_search_by_a_single_accession_paginated(self):
response = self.client.get("/api/entry?ida_search=IPR003165&page_size=1")
self.assertEqual(response.status_code, status.HTTP_200_OK)
class PaginationOverEntryIDA(InterproRESTTestCase):
def test_pagesize(self):
for size in range(1, 2):
response = self.client.get(
"/api/entry/interpro/IPR003165?ida&page_size={}".format(size)
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data["results"]), size)
def test_pagesize_larger_than_total(self):
size = 20
response = self.client.get(
"/api/entry/interpro/IPR003165?ida&page_size={}".format(size)
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["count"], len(response.data["results"]))
self.assertLess(response.data["count"], size)
def test_next_and_back_should_be_the_same(self):
response = self.client.get("/api/entry/interpro/IPR003165?ida&page_size=1")
self.assertEqual(response.status_code, status.HTTP_200_OK)
next_url = get_next_value_from_response(response)
self.assertIsNotNone(next_url)
next_response = self.client.get(next_url)
self.assertEqual(next_response.status_code, status.HTTP_200_OK)
previous_url = get_previous_value_from_response(next_response)
self.assertIsNotNone(previous_url)
previous_response = self.client.get(previous_url)
self.assertEqual(previous_response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["count"], previous_response.data["count"])
self.assertEqual(response.data["results"], previous_response.data["results"])
self.assertEqual(response.data["next"], previous_response.data["next"])
def test_search_by_a_single_accession_paginated(self):
response = self.client.get("/api/entry/interpro/IPR003165?ida&page_size=1")
self.assertEqual(response.status_code, status.HTTP_200_OK)
| 47.645299 | 93 | 0.652435 | 1,325 | 11,149 | 5.230189 | 0.064906 | 0.093506 | 0.085714 | 0.1 | 0.947619 | 0.937518 | 0.92381 | 0.92381 | 0.92381 | 0.904185 | 0 | 0.01967 | 0.238497 | 11,149 | 233 | 94 | 47.849785 | 0.796584 | 0.007086 | 0 | 0.850242 | 0 | 0 | 0.095229 | 0.066498 | 0 | 0 | 0 | 0 | 0.294686 | 1 | 0.091787 | false | 0 | 0.009662 | 0 | 0.140097 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d32cb95291bb6e9e57a6eab370c99b31a392e9b5 | 164 | py | Python | API/Python/reactrobotics/__init__.py | eborghi10/DogBotSoftware | 40a06e01bb63cc7c1bfc5458850092db0b44ddc8 | [
"MIT"
] | null | null | null | API/Python/reactrobotics/__init__.py | eborghi10/DogBotSoftware | 40a06e01bb63cc7c1bfc5458850092db0b44ddc8 | [
"MIT"
] | null | null | null | API/Python/reactrobotics/__init__.py | eborghi10/DogBotSoftware | 40a06e01bb63cc7c1bfc5458850092db0b44ddc8 | [
"MIT"
] | null | null | null | # Tools for interacting with DogBot and other React Robotics systems
#
# July 2018, Nic Greenway, React Robotics
import pydogbotapi
def version():
return "1.0"
| 18.222222 | 68 | 0.756098 | 23 | 164 | 5.391304 | 0.913043 | 0.209677 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.044444 | 0.176829 | 164 | 8 | 69 | 20.5 | 0.874074 | 0.646341 | 0 | 0 | 0 | 0 | 0.055556 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 8 |
d36826e1b3c63fc1c5bd26a28e8df97ee4639229 | 11,013 | py | Python | tests/st/ops/gpu/test_squared_difference_op.py | GuoSuiming/mindspore | 48afc4cfa53d970c0b20eedfb46e039db2a133d5 | [
"Apache-2.0"
] | 3,200 | 2020-02-17T12:45:41.000Z | 2022-03-31T20:21:16.000Z | tests/st/ops/gpu/test_squared_difference_op.py | zimo-geek/mindspore | 665ec683d4af85c71b2a1f0d6829356f2bc0e1ff | [
"Apache-2.0"
] | 176 | 2020-02-12T02:52:11.000Z | 2022-03-28T22:15:55.000Z | tests/st/ops/gpu/test_squared_difference_op.py | zimo-geek/mindspore | 665ec683d4af85c71b2a1f0d6829356f2bc0e1ff | [
"Apache-2.0"
] | 621 | 2020-03-09T01:31:41.000Z | 2022-03-30T03:43:19.000Z | # Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
import pytest
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.ops import operations as P
class SquaredDifference(nn.Cell):
def __init__(self):
super(SquaredDifference, self).__init__()
self.squaredDiff = P.SquaredDifference()
def construct(self, x, y):
return self.squaredDiff(x, y)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_nobroadcast_f16():
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
np.random.seed(42)
net = SquaredDifference()
input_x = np.random.uniform(10, 20, (3, 4, 5, 2)).astype(np.float16)
input_y = np.random.uniform(40, 50, (3, 4, 5, 2)).astype(np.float16)
output = net(Tensor(input_x), Tensor(input_y)).asnumpy()
diff = input_x-input_y
expect = diff*diff
assert np.all(output == expect)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_nobroadcast_f32():
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
np.random.seed(42)
net = SquaredDifference()
input_x = np.random.rand(3, 4, 5, 2).astype(np.float32)
input_y = np.random.rand(3, 4, 5, 2).astype(np.float32)
output = net(Tensor(input_x), Tensor(input_y)).asnumpy()
diff = input_x-input_y
expect = diff*diff
assert np.all(output == expect)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_nobroadcast_int32():
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
np.random.seed(42)
net = SquaredDifference()
input_x = np.random.rand(3, 4, 5, 2).astype(np.int32)
input_y = np.random.rand(3, 4, 5, 2).astype(np.int32)
output = net(Tensor(input_x), Tensor(input_y)).asnumpy()
diff = input_x-input_y
expect = diff*diff
assert np.all(output == expect)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_broadcast_int32():
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
np.random.seed(42)
net = SquaredDifference()
input_x = np.random.rand(1, 4, 1, 2).astype(np.int32)
input_y = np.random.rand(3, 1, 5, 1).astype(np.int32)
output = net(Tensor(input_x), Tensor(input_y)).asnumpy()
diff = input_x-input_y
expect = diff*diff
assert np.all(output == expect)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_broadcast_f32():
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
np.random.seed(42)
net = SquaredDifference()
input_x = np.random.rand(1, 4, 1, 2).astype(np.float32)
input_y = np.random.rand(3, 1, 5, 1).astype(np.float32)
output = net(Tensor(input_x), Tensor(input_y)).asnumpy()
diff = input_x-input_y
expect = diff*diff
assert np.all(output == expect)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_broadcast_f16():
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
np.random.seed(42)
net = SquaredDifference()
input_x = np.random.rand(1, 4, 1, 2).astype(np.float16)
input_y = np.random.rand(3, 1, 5, 1).astype(np.float16)
output = net(Tensor(input_x), Tensor(input_y)).asnumpy()
diff = input_x-input_y
expect = diff*diff
assert np.all(output == expect)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_broadcast_bool():
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
np.random.seed(42)
net = SquaredDifference()
input_x = np.random.rand(1, 4, 1, 2).astype(np.bool)
input_y = np.random.uniform(10, 20, (3, 1, 5, 1)).astype(np.float32)
output = net(Tensor(input_x), Tensor(input_y)).asnumpy()
diff = input_x-input_y
expect = diff*diff
error = np.ones(shape=np.array(output.shape, dtype=int))*1.0e-6
double_check = np.abs(output-expect)/expect
assert np.all(double_check < error)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_nobroadcast_bool():
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
np.random.seed(42)
net = SquaredDifference()
input_x = np.random.rand(3, 4, 5, 2).astype(np.bool)
input_y = np.random.rand(3, 4, 5, 2).astype(np.float32)
output = net(Tensor(input_x), Tensor(input_y)).asnumpy()
diff = input_x-input_y
expect = diff*diff
error = np.ones(shape=np.array(output.shape, dtype=int))*1.0e-6
double_check = np.abs(output-expect)/expect
assert np.all(double_check < error)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_broadcast_int32_f16():
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
np.random.seed(42)
net = SquaredDifference()
input_x = np.random.rand(1, 4, 1, 2).astype(np.int32)
input_y = np.random.uniform(10, 20, (3, 1, 5, 1)).astype(np.float16)
output = net(Tensor(input_x), Tensor(input_y)).asnumpy()
diff = input_x-input_y
expect = diff*diff
error = np.ones(shape=np.array(output.shape, dtype=int))*1.0e-3
double_check = np.abs(output-expect)/expect
assert np.all(double_check < error)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_broadcast_int32_f32():
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
np.random.seed(42)
net = SquaredDifference()
input_x = np.random.rand(1, 4, 1, 2).astype(np.int32)
input_y = np.random.uniform(10, 20, (3, 1, 5, 1)).astype(np.float32)
output = net(Tensor(input_x), Tensor(input_y)).asnumpy()
diff = input_x-input_y
expect = diff*diff
error = np.ones(shape=np.array(output.shape, dtype=int))*1.0e-6
double_check = np.abs(output-expect)/expect
assert np.all(double_check < error)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_nobroadcast_int32_f16():
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
np.random.seed(42)
net = SquaredDifference()
input_x = np.random.rand(2, 4, 3, 2).astype(np.int32)
input_y = np.random.uniform(10, 20, (2, 4, 3, 2)).astype(np.float16)
output = net(Tensor(input_x), Tensor(input_y)).asnumpy()
diff = input_x-input_y
expect = diff*diff
error = np.ones(shape=np.array(output.shape, dtype=int))*1.0e-3
double_check = np.abs(output-expect)/expect
assert np.all(double_check < error)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_nobroadcast_int32_f32():
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
np.random.seed(42)
net = SquaredDifference()
input_x = np.random.rand(2, 4, 3, 2).astype(np.int32)
input_y = np.random.uniform(10, 20, (2, 4, 3, 2)).astype(np.float32)
output = net(Tensor(input_x), Tensor(input_y)).asnumpy()
diff = input_x-input_y
expect = diff*diff
error = np.ones(shape=np.array(output.shape, dtype=int))*1.0e-6
double_check = np.abs(output-expect)/expect
assert np.all(double_check < error)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_broadcast_f32_scalar_tensor():
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
np.random.seed(42)
net = SquaredDifference()
input_x = np.random.rand(2).astype(np.float32)
input_y = np.random.rand(3, 1, 5, 1).astype(np.float32)
output = net(Tensor(input_x), Tensor(input_y)).asnumpy()
diff = input_x-input_y
expect = diff*diff
assert np.all(output == expect)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_broadcast_f32_tensor_tensor():
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
np.random.seed(42)
net = SquaredDifference()
input_x = np.random.rand(1, 2).astype(np.float32)
input_y = np.random.rand(3, 1, 5, 1).astype(np.float32)
output = net(Tensor(input_x), Tensor(input_y)).asnumpy()
diff = input_x-input_y
expect = diff*diff
assert np.all(output == expect)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_broadcast_f32_tensor_tensor_dim_over_7():
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
np.random.seed(42)
net = SquaredDifference()
input_x = np.random.rand(1, 2).astype(np.float32)
input_y = np.random.rand(3, 1, 5, 1, 3, 4, 2, 1).astype(np.float32)
try:
net(Tensor(input_x), Tensor(input_y))
except RuntimeError:
assert True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_broadcast_f32_tensor_tensor_cannot_brocast():
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
np.random.seed(42)
net = SquaredDifference()
input_x = np.random.rand(5, 3).astype(np.float32)
input_y = np.random.rand(3, 1, 5, 1, 3, 4, 2).astype(np.float32)
try:
net(Tensor(input_x), Tensor(input_y))
except ValueError:
assert True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_broadcast_int_f32_precision():
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
np.random.seed(42)
net = SquaredDifference()
input_x = np.random.randint(20, 30, (1, 2)).astype(np.int32)
input_y = np.random.rand(3, 1, 5, 1).astype(np.float32)
output = net(Tensor(input_x), Tensor(input_y)).asnumpy()
diff = input_x-input_y
expect = diff*diff
error = np.ones(shape=np.array(output.shape, dtype=int))*1.0e-3
double_thousand = np.abs(output-expect)/expect
assert np.all(double_thousand < error)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_broadcast_type_error():
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
np.random.seed(42)
net = SquaredDifference()
input_x = np.random.randint(20, 30, (1, 2)).astype(np.bool)
input_y = np.random.rand(3, 1, 5, 1).astype(np.bool)
try:
net(Tensor(input_x), Tensor(input_y))
except TypeError:
assert True
| 34.961905 | 78 | 0.707891 | 1,715 | 11,013 | 4.367347 | 0.093294 | 0.072096 | 0.043258 | 0.05287 | 0.886115 | 0.88518 | 0.883712 | 0.882911 | 0.877971 | 0.86729 | 0 | 0.042272 | 0.149369 | 11,013 | 314 | 79 | 35.073248 | 0.757259 | 0.057932 | 0 | 0.784615 | 0 | 0 | 0.005212 | 0 | 0 | 0 | 0 | 0 | 0.069231 | 1 | 0.076923 | false | 0 | 0.023077 | 0.003846 | 0.107692 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6c9e2be200f7fe54da5ac870cce50c0cf20766de | 896 | py | Python | psana/psana/detector/opal.py | slac-lcls/lcls2 | c89f61634005339ce409b06acedfa27708327b0f | [
"BSD-3-Clause-LBNL"
] | 16 | 2017-11-09T17:10:56.000Z | 2022-03-09T23:03:10.000Z | psana/psana/detector/opal.py | slac-lcls/lcls2 | c89f61634005339ce409b06acedfa27708327b0f | [
"BSD-3-Clause-LBNL"
] | 6 | 2017-12-12T19:30:05.000Z | 2020-07-09T00:28:33.000Z | psana/psana/detector/opal.py | slac-lcls/lcls2 | c89f61634005339ce409b06acedfa27708327b0f | [
"BSD-3-Clause-LBNL"
] | 25 | 2017-09-18T20:02:43.000Z | 2022-03-27T22:27:42.000Z |
import numpy as np
from psana.detector.opal_base import opal_base, logging
logger = logging.getLogger(__name__)
class opal_raw_2_0_0(opal_base):
def __init__(self, *args, **kwa):
opal_base.__init__(self, *args, **kwa)
#self._add_fields() < overrides det.raw.image(...)
class opal_ttfex_2_0_0(opal_base):
def __init__(self, *args, **kwa):
opal_base.__init__(self, *args, **kwa)
self._add_fields()
class opal_simfex_2_0_0(opal_base):
def __init__(self, *args, **kwa):
opal_base.__init__(self, *args, **kwa)
self._add_fields()
class opal_simfex_2_0_0(opal_base):
def __init__(self, *args, **kwa):
opal_base.__init__(self, *args, **kwa)
self._add_fields()
class opal_ref_2_0_0(opal_base):
def __init__(self, *args, **kwa):
opal_base.__init__(self, *args, **kwa)
self._add_fields()
# EOF
| 23.578947 | 58 | 0.657366 | 132 | 896 | 3.810606 | 0.234848 | 0.190855 | 0.238569 | 0.298211 | 0.723658 | 0.723658 | 0.723658 | 0.723658 | 0.723658 | 0.723658 | 0 | 0.02095 | 0.200893 | 896 | 37 | 59 | 24.216216 | 0.681564 | 0.059152 | 0 | 0.727273 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.227273 | false | 0 | 0.090909 | 0 | 0.545455 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 10 |
6caa69a4c1ea81dc77151712c57f74bdc7469485 | 122 | py | Python | src/test/python/test_modifyInputFile.py | JuliGonzalez/TDP | 7e32ad40ca593d2eb1fea3d56948729e0b236314 | [
"MIT"
] | null | null | null | src/test/python/test_modifyInputFile.py | JuliGonzalez/TDP | 7e32ad40ca593d2eb1fea3d56948729e0b236314 | [
"MIT"
] | null | null | null | src/test/python/test_modifyInputFile.py | JuliGonzalez/TDP | 7e32ad40ca593d2eb1fea3d56948729e0b236314 | [
"MIT"
] | null | null | null | from hypothesis import given
from hypothesis import strategies
from hypothesis.extra.pandas import column, data_frames
| 17.428571 | 55 | 0.844262 | 16 | 122 | 6.375 | 0.625 | 0.411765 | 0.392157 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.131148 | 122 | 6 | 56 | 20.333333 | 0.962264 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
9f36b6402f1b03515618c324c1bae004e5a460d7 | 2,600 | py | Python | NN_layers/DarkNet19.py | Mxbonn/zigzag_fork | 250ee5e22904ba846dfb106983d46b83bd9ee230 | [
"BSD-3-Clause"
] | 34 | 2020-08-11T14:38:29.000Z | 2022-03-30T10:32:34.000Z | NN_layers/DarkNet19.py | Mxbonn/zigzag_fork | 250ee5e22904ba846dfb106983d46b83bd9ee230 | [
"BSD-3-Clause"
] | 9 | 2020-11-16T19:19:48.000Z | 2022-03-31T18:29:24.000Z | NN_layers/DarkNet19.py | Mxbonn/zigzag_fork | 250ee5e22904ba846dfb106983d46b83bd9ee230 | [
"BSD-3-Clause"
] | 18 | 2020-08-24T07:26:51.000Z | 2022-01-06T00:59:11.000Z | layer_info = \
{1: {'B': 1, 'K': 32, 'C': 3, 'OX': 224, 'OY': 224, 'FX': 3, 'FY': 3, 'SX': 1, 'SY': 1, 'SFX': 1, 'SFY': 1, 'PY': 0, 'PX': 0, 'G': 1},
2: {'B': 1, 'K': 64, 'C': 32, 'OX': 112, 'OY': 112, 'FX': 3, 'FY': 3, 'SX': 1, 'SY': 1, 'SFX': 1, 'SFY': 1, 'PY': 0, 'PX': 0, 'G': 1},
3: {'B': 1, 'K': 128, 'C': 64, 'OX': 56, 'OY': 56, 'FX': 3, 'FY': 3, 'SX': 1, 'SY': 1, 'SFX': 1, 'SFY': 1, 'PY': 0, 'PX': 0, 'G': 1},
4: {'B': 1, 'K': 64, 'C': 128, 'OX': 56, 'OY': 56, 'FX': 1, 'FY': 1, 'SX': 1, 'SY': 1, 'SFX': 1, 'SFY': 1, 'PY': 0, 'PX': 0, 'G': 1},
5: {'B': 1, 'K': 128, 'C': 64, 'OX': 56, 'OY': 56, 'FX': 3, 'FY': 3, 'SX': 1, 'SY': 1, 'SFX': 1, 'SFY': 1, 'PY': 0, 'PX': 0, 'G': 1},
6: {'B': 1, 'K': 256, 'C': 128, 'OX': 28, 'OY': 28, 'FX': 3, 'FY': 3, 'SX': 1, 'SY': 1, 'SFX': 1, 'SFY': 1, 'PY': 0, 'PX': 0, 'G': 1},
7: {'B': 1, 'K': 128, 'C': 256, 'OX': 28, 'OY': 28, 'FX': 1, 'FY': 1, 'SX': 1, 'SY': 1, 'SFX': 1, 'SFY': 1, 'PY': 0, 'PX': 0, 'G': 1},
8: {'B': 1, 'K': 256, 'C': 128, 'OX': 28, 'OY': 28, 'FX': 3, 'FY': 3, 'SX': 1, 'SY': 1, 'SFX': 1, 'SFY': 1, 'PY': 0, 'PX': 0, 'G': 1},
9: {'B': 1, 'K': 512, 'C': 256, 'OX': 14, 'OY': 14, 'FX': 3, 'FY': 3, 'SX': 1, 'SY': 1, 'SFX': 1, 'SFY': 1, 'PY': 0, 'PX': 0, 'G': 1},
10: {'B': 1, 'K': 256, 'C': 512, 'OX': 14, 'OY': 14, 'FX': 1, 'FY': 1, 'SX': 1, 'SY': 1, 'SFX': 1, 'SFY': 1, 'PY': 0, 'PX': 0, 'G': 1},
11: {'B': 1, 'K': 512, 'C': 256, 'OX': 14, 'OY': 14, 'FX': 3, 'FY': 3, 'SX': 1, 'SY': 1, 'SFX': 1, 'SFY': 1, 'PY': 0, 'PX': 0, 'G': 1},
12: {'B': 1, 'K': 256, 'C': 512, 'OX': 14, 'OY': 14, 'FX': 1, 'FY': 1, 'SX': 1, 'SY': 1, 'SFX': 1, 'SFY': 1, 'PY': 0, 'PX': 0, 'G': 1},
13: {'B': 1, 'K': 512, 'C': 256, 'OX': 14, 'OY': 14, 'FX': 3, 'FY': 3, 'SX': 1, 'SY': 1, 'SFX': 1, 'SFY': 1, 'PY': 0, 'PX': 0, 'G': 1},
14: {'B': 1, 'K': 1024, 'C': 512, 'OX': 7, 'OY': 7, 'FX': 3, 'FY': 3, 'SX': 1, 'SY': 1, 'SFX': 1, 'SFY': 1, 'PY': 0, 'PX': 0, 'G': 1},
15: {'B': 1, 'K': 512, 'C': 1024, 'OX': 7, 'OY': 7, 'FX': 1, 'FY': 1, 'SX': 1, 'SY': 1, 'SFX': 1, 'SFY': 1, 'PY': 0, 'PX': 0, 'G': 1},
16: {'B': 1, 'K': 1024, 'C': 512, 'OX': 7, 'OY': 7, 'FX': 3, 'FY': 3, 'SX': 1, 'SY': 1, 'SFX': 1, 'SFY': 1, 'PY': 0, 'PX': 0, 'G': 1},
17: {'B': 1, 'K': 512, 'C': 1024, 'OX': 7, 'OY': 7, 'FX': 1, 'FY': 1, 'SX': 1, 'SY': 1, 'SFX': 1, 'SFY': 1, 'PY': 0, 'PX': 0, 'G': 1},
18: {'B': 1, 'K': 1024, 'C': 512, 'OX': 7, 'OY': 7, 'FX': 3, 'FY': 3, 'SX': 1, 'SY': 1, 'SFX': 1, 'SFY': 1, 'PY': 0, 'PX': 0, 'G': 1},
19: {'B': 1, 'K': 1000, 'C': 1024, 'OX': 7, 'OY': 7, 'FX': 1, 'FY': 1, 'SX': 1, 'SY': 1, 'SFX': 1, 'SFY': 1, 'PY': 0, 'PX': 0, 'G': 1}}
| 123.809524 | 136 | 0.347692 | 553 | 2,600 | 1.632911 | 0.084991 | 0.042082 | 0.063123 | 0.126246 | 0.911406 | 0.872647 | 0.872647 | 0.872647 | 0.872647 | 0.872647 | 0 | 0.197239 | 0.22 | 2,600 | 20 | 137 | 130 | 0.248028 | 0 | 0 | 0 | 0 | 0 | 0.19 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
9f6c9ff9b71d8b16f2aa3484548a6719c5195a81 | 4,121 | py | Python | 53_丑数.py | wode1/-offer-python- | 22cb55b7a89f48c7d3686f0a7a822c16ff3976f1 | [
"Apache-2.0"
] | null | null | null | 53_丑数.py | wode1/-offer-python- | 22cb55b7a89f48c7d3686f0a7a822c16ff3976f1 | [
"Apache-2.0"
] | null | null | null | 53_丑数.py | wode1/-offer-python- | 22cb55b7a89f48c7d3686f0a7a822c16ff3976f1 | [
"Apache-2.0"
] | null | null | null | '''
把只包含因子2、3和5的数称作丑数(Ugly Number)。例如6、8都是丑数,但14不是,因为它包含因子7。
习惯上我们把1当做是第一个丑数。求按从小到大的顺序的第N个丑数。
'''
class Solution(object):
# 文中的第一种方法,时间消耗较大
def isUgry(self, number):
while number%2==0:
number//=2
while number%3==0:
number//=3
while number%5==0:
number//=5
return number==1
def getUgryNumber(self, index):
if index<=0:
return index
if index==1:
return index
number=0
UgryNumver=0
while UgryNumver<index:
number+=1
if self.isUgry(number):
UgryNumver+=1
return number
# ******************************************************************
# 第二种方法,以空间换时间
def GetUalyNumber_solution(self, index):
if index==None and len(index)<=0:
return 0
uglyNumbers=[1]*index
nextIndex=1
index2=0
index3=0
index5=0
while nextIndex<index:
minVal=min(uglyNumbers[index2]*2, uglyNumbers[index3]*3,
uglyNumbers[index5]*5)
uglyNumbers[nextIndex]=minVal
while uglyNumbers[index2]*2 <=uglyNumbers[nextIndex]:
index2 += 1
while uglyNumbers[index3]*3<=uglyNumbers[nextIndex]:
index3+=1
while uglyNumbers[index5]*5<=uglyNumbers[nextIndex]:
index5+=1
nextIndex+=1
return uglyNumbers[-1]
def GetUrlyNumber_solution1(self, index):
if index==None or len(index)<=0:
return 0
uglyNumbers=[1]*index
nextIndex=1
index2=0
index3=0
index5=0
while nextIndex<index:
minVal=min(uglyNumbers[index2]*2, uglyNumbers[index3]*3,
uglyNumbers[index5]*5)
uglyNumbers[nextIndex]=minVal
while uglyNumbers[index2]*2<=uglyNumbers[nextIndex]:
index2+=1
while uglyNumbers[index3]*3<=uglyNumbers[nextIndex]:
index3+=1
while uglyNumbers[index5]*5<=uglyNumbers[nextIndex]:
index5+=1
nextIndex+=1
return uglyNumbers[-1]
def GetUrlyNumber_solution2(self, index):
if index==None or index<=0:
return 0
uglyNumbers=[1]*index
nextIndex=1
index2=0
index3=0
index5=0
while nextIndex<index:
minVal=min(uglyNumbers[index2]*2, uglyNumbers[index3]*3,
uglyNumbers[index5]*5)
uglyNumbers[nextIndex]=minVal
while uglyNumbers[index2]*2<=uglyNumbers[nextIndex]:
index2+=1
while uglyNumbers[index3]*3<=uglyNumbers[nextIndex]:
index3+=1
while uglyNumbers[index5]*5<=uglyNumbers[nextIndex]:
index5+=1
nextIndex+=1
return uglyNumbers[-1]
def GetUrlyNumber_solution3(self, index):
if index==None or index<=0:
return 0
uglyNumbers=[1]*index
nextIndex=1
index2=0
index3=0
index5=0
while nextIndex<index:
minVal=min(uglyNumbers[index2]*2, uglyNumbers[index3]*3,
uglyNumbers[index5]*5)
uglyNumbers[nextIndex]=minVal
while uglyNumbers[index2]<=uglyNumbers[nextIndex]:
index2+=1
while uglyNumbers[index3]<=uglyNumbers[nextIndex]:
index3+=1
while uglyNumbers[index5]<=uglyNumbers[nextIndex]:
index5+=1
nextIndex+=1
return uglyNumbers[-1]
def GetUrlyNumber_solution4(self, index):
if index==None or index<=0:
return 0
uglyNumbers=[1]*index
nextIndex=1
index2=0
index3=0
index5=0
while nextIndex<index:
minVal=min(uglyNumbers[index2]*2, uglyNumbers[index3]*3,
uglyNumbers[index5]*5)
uglyNumbers[nextIndex]=minVal
while uglyNumbers[index2]*2<=uglyNumbers[nextIndex]:
index2+=1
while uglyNumbers[index3]*3<=uglyNumbers[nextIndex]:
index3+=1
while uglyNumbers[index5]*5<=uglyNumbers[nextIndex]:
index5+=1
nextIndex+=1
return uglyNumbers[-1]
def GetUrlyNumber_solution5(self, index):
if index==None or index<=0:
return 0
uglyNumbers=[1]*index
nextIndex=1
index2=0
index3=0
index5=0
while nextIndex<index:
minVal=min(uglyNumbers[index2]*index2, uglyNumbers[index3]*3,
uglyNumbers[index5]*5)
uglyNumbers[nextIndex]=minVal
while uglyNumbers[index2]*2<=uglyNumbers[nextIndex]:
index2+=1
while uglyNumbers[index3]*3<=uglyNumbers[nextIndex]:
index3+=1
while uglyNumbers[index5]*5<=uglyNumbers[nextIndex]:
index5+=1
nextIndex+=1
return uglyNumbers[-1]
s=Solution()
# print(s.isUgry(11))
# print(s.getUgryNumber(11))
print(s.GetUalyNumber_solution(11))
| 17.028926 | 69 | 0.682844 | 507 | 4,121 | 5.536489 | 0.102564 | 0.171001 | 0.072675 | 0.113644 | 0.817243 | 0.810118 | 0.80228 | 0.767367 | 0.767367 | 0.767367 | 0 | 0.062647 | 0.174958 | 4,121 | 241 | 70 | 17.099585 | 0.762941 | 0.05654 | 0 | 0.769231 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.055944 | false | 0 | 0 | 0 | 0.174825 | 0.006993 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
9f840d87cab535f58a3973031c778b20af0d6dba | 19,490 | py | Python | torchmps/prob_mps.py | kaywenith/TorchMPS | abd2dca54f3eb35b931cfb8c43ba09a6fa94ae49 | [
"MIT"
] | null | null | null | torchmps/prob_mps.py | kaywenith/TorchMPS | abd2dca54f3eb35b931cfb8c43ba09a6fa94ae49 | [
"MIT"
] | null | null | null | torchmps/prob_mps.py | kaywenith/TorchMPS | abd2dca54f3eb35b931cfb8c43ba09a6fa94ae49 | [
"MIT"
] | null | null | null | # MIT License
#
# Copyright (c) 2021 Jacob Miller
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Uniform and non-uniform probabilistic MPS classes"""
from math import sqrt
from typing import Optional, Callable
import torch
from torch import Tensor, nn
from torchmps.mps_base import (
contract_matseq,
near_eye_init,
normal_init,
get_mat_slices,
get_log_norm,
slim_eval_fun,
)
from torchmps.utils2 import phaseify
from torchmps.embeddings import DataDomain, FixedEmbedding, TrainableEmbedding
class ProbMPS(nn.Module):
r"""
Fixed-length MPS model using L2 probabilities for generative modeling
Probabilities of fixed-length inputs are obtained via the Born rule of
quantum mechanics, making ProbMPS a "Born machine" model. For a model
acting on length-n inputs, the probability assigned to the sequence
:math:`x = x_1 x_2 \dots x_n` is :math:`P(x) = |h_n^T \omega|^2 / Z`,
where :math:`Z` is a normalization constant and the hidden state
vectors :math:`h_t` are updated according to:
.. math::
h_t = (A_t[x_t] + B) h_{t-1},
with :math:`h_0 := \alpha` (for :math:`\alpha, \omega` trainable
parameter vectors), :math:`A_t[i]` the i'th matrix slice of a
third-order core tensor for the t'th input, and :math:`B` an optional
bias matrix.
Note that calling a :class:`ProbMPS` instance with given input will
return the **logarithm** of the input probabilities, to avoid underflow
in the case of longer sequences. To get the negative log likelihood
loss for a batch of inputs, use the :attr:`loss` function of the
:class:`ProbMPS`.
Args:
seq_len: Length of fixed-length discrete sequence inputs. Inputs
can be either batches of discrete sequences, with a shape of
`(input_len, batch)`, or batches of vector sequences, with a
shape of `(input_len, batch, input_dim)`.
input_dim: Dimension of the inputs to each core. For vector
sequence inputs this is the dimension of the input vectors,
while for discrete sequence inputs this is the size of the
discrete alphabet.
bond_dim: Dimension of the bond spaces linking adjacent MPS cores,
which are assumed to be equal everywhere.
complex_params: Whether model parameters are complex or real. The
former allows more expressivity, but is less common in Pytorch.
Default: ``False``
use_bias: Whether to use a trainable bias matrix in evaluation.
Default: ``False``
init_method: String specifying how to initialize the MPS core tensors.
Giving "near_eye" initializes all core slices to near the identity,
while "normal" has all core elements be normally distributed.
Default: ``"near_eye"``
embed_fun: Function which embeds discrete or continous scalar values
into vectors of dimension `input_dim`. Must be able to take in a
tensor of any order `n` and output a tensor of order `n+1`, where
the scalar values of the input are represented as vectors in the
*last* axis of the output.
Default: ``None`` (no embedding function)
domain: Instance of the `DataDomain` class, which specifies whether
the input data domain is continuous vs. discrete, and what range
of values the domain takes.
Default: ``None``
"""
def __init__(
self,
seq_len: int,
input_dim: int,
bond_dim: int,
complex_params: bool = False,
use_bias: bool = False,
init_method: str = "near_eye",
embed_fun: Optional[Callable] = None,
domain: Optional[DataDomain] = None,
) -> None:
super().__init__()
assert min(seq_len, input_dim, bond_dim) > 0
# Initialize core tensor and edge vectors
assert init_method in ("near_eye", "normal")
init_fun = near_eye_init if init_method == "near_eye" else normal_init
core_tensors = init_fun(
(seq_len, input_dim, bond_dim, bond_dim), is_complex=complex_params
)
# Left and right vectors initialized to be identical, since it avoids
# issues with exponentially small overlap
rand_vec = torch.randn(bond_dim) / sqrt(bond_dim)
edge_vecs = torch.stack((rand_vec,) * 2)
if complex_params:
edge_vecs = phaseify(edge_vecs)
self.core_tensors = nn.Parameter(core_tensors)
self.edge_vecs = nn.Parameter(edge_vecs)
# Initialize (optional) bias matrices at zero
if use_bias:
bias_mat = torch.zeros(bond_dim, bond_dim)
if complex_params:
bias_mat = phaseify(bias_mat)
self.bias_mat = nn.Parameter(bias_mat)
# Set other MPS attributes
self.complex_params = complex_params
self.embedding = None
# Set up embedding object if desired
if isinstance(embed_fun, (FixedEmbedding, TrainableEmbedding)):
self.embedding = embed_fun
if hasattr(embed_fun, "emb_dim"):
assert self.embedding.emb_dim == input_dim
elif embed_fun is not None:
assert domain is not None
self.embedding = FixedEmbedding(embed_fun, domain)
assert self.embedding.emb_dim == input_dim
def forward(
self, input_data: Tensor, slim_eval: bool = False, parallel_eval: bool = False
) -> Tensor:
"""
Get the log probabilities of batch of input data
Args:
input_data: Sequential with shape `(batch, seq_len)`, for
discrete inputs, or shape `(batch, seq_len, input_dim)`,
for vector inputs.
slim_eval: Whether to use a less memory intensive MPS
evaluation function, useful for larger inputs.
Default: ``False``
parallel_eval: Whether to use a more memory intensive parallel
MPS evaluation function, useful for smaller models.
Overrides `slim_eval` when both are requested.
Default: ``False``
Returns:
log_probs: Vector with shape `(batch,)` giving the natural
logarithm of the probability of each input sequence.
"""
# Apply embedding function if it is defined
if self.embedding is not None:
input_data = self.embedding(input_data)
if slim_eval:
if self.use_bias:
raise ValueError("Bias matrices not supported for slim_eval")
psi_vals, log_scales = slim_eval_fun(
input_data, self.core_tensors, self.edge_vecs
)
else:
# Contract inputs with core tensors and add bias matrices
mat_slices = get_mat_slices(input_data, self.core_tensors)
if self.use_bias:
mat_slices = mat_slices + self.bias_mat[None, None]
# Contract all bond dims to get (unnormalized) prob amplitudes
psi_vals, log_scales = contract_matseq(
mat_slices,
self.edge_vecs[0],
self.edge_vecs[1],
parallel_eval,
log_format=True,
)
# Get log normalization and check for infinities
log_norm = self.log_norm()
assert log_norm.isfinite()
assert torch.all(psi_vals.isfinite())
# Compute unnormalized log probabilities
log_uprobs = torch.log(torch.abs(psi_vals)) + log_scales
# Return normalized probabilities
return 2 * log_uprobs - log_norm
def loss(
self, input_data: Tensor, slim_eval: bool = False, parallel_eval: bool = False
) -> Tensor:
"""
Get the negative log likelihood loss for batch of input data
Args:
input_data: Sequential with shape `(seq_len, batch)`, for
discrete inputs, or shape `(seq_len, batch, input_dim)`,
for vector inputs.
slim_eval: Whether to use a less memory intensive MPS
evaluation function, useful for larger inputs.
Default: ``False``
parallel_eval: Whether to use a more memory intensive parallel
MPS evaluation function, useful for smaller models.
Overrides `slim_eval` when both are requested.
Default: ``False``
Returns:
loss_val: Scalar value giving average of the negative log
likelihood loss of all sequences in input batch.
"""
return -torch.mean(
self.forward(input_data, slim_eval=slim_eval, parallel_eval=parallel_eval)
)
def log_norm(self) -> Tensor:
r"""
Compute the log normalization of the MPS for its fixed-size input
Uses iterated tensor contraction to compute :math:`\log(|\psi|^2)`,
where :math:`\psi` is the n'th order tensor described by the
contraction of MPS parameter cores. In the Born machine paradigm,
this is also :math:`\log(Z)`, for :math:`Z` the normalization
constant for the probability.
Returns:
l_norm: Scalar value giving the log squared L2 norm of the
n'th order prob. amp. tensor described by the MPS.
"""
# Account for bias matrices before calling log norm implementation
if self.use_bias:
core_tensors = self.core_tensors + self.bias_mat[None, None]
else:
core_tensors = self.core_tensors
# Account for non-trivial lambda function in the embedding
lamb_mat = None if self.embedding is None else self.embedding.lamb_mat
return get_log_norm(core_tensors, self.edge_vecs, lamb_mat=lamb_mat)
@property
def seq_len(self):
return self.core_tensors.shape[0]
@property
def input_dim(self):
return self.core_tensors.shape[1]
@property
def bond_dim(self):
return self.core_tensors.shape[2]
@property
def use_bias(self):
return hasattr(self, "bias_mat")
class ProbUnifMPS(ProbMPS):
r"""
Uniform MPS model using L2 probabilities for generative modeling
Probabilities of sequential inputs are obtained via the Born rule of
quantum mechanics, making ProbUnifMPS a "Born machine" model. Given an
input sequence of length n, the probability assigned to the sequence
:math:`x = x_1 x_2 \dots x_n` is :math:`P(x) = |h_n^T \omega|^2 / Z`,
where :math:`Z` is a normalization constant and the hidden state
vectors :math:`h_t` are updated according to:
.. math::
h_t = (A[x_t] + B) h_{t-1},
with :math:`h_0 := \alpha` (for :math:`\alpha, \omega` trainable
parameter vectors), :math:`A[i]` the i'th matrix slice of the
third-order MPS core tensor, and :math:`B` an optional bias matrix.
Note that calling a :class:`ProbUnifMPS` instance with given input will
return the **logarithm** of the input probabilities, to avoid underflow
in the case of longer sequences. To get the negative log likelihood
loss for a batch of inputs, use the :attr:`loss` function of the
:class:`ProbUnifMPS`.
Args:
input_dim: Dimension of the inputs to the uMPS core. For vector
sequence inputs this is the dimension of the input vectors,
while for discrete sequence inputs this is the size of the
discrete alphabet.
bond_dim: Dimension of the bond spaces linking copies of uMPS core.
complex_params: Whether model parameters are complex or real. The
former allows more expressivity, but is less common in Pytorch.
Default: ``False``
use_bias: Whether to use a trainable bias matrix in evaluation.
Default: ``False``
init_method: String specifying how to initialize the MPS core tensors.
Giving "near_eye" initializes all core slices to near the identity,
while "normal" has all core elements be normally distributed.
Default: ``"near_eye"``
embed_fun: Function which embeds discrete or continous scalar values
into vectors of dimension `input_dim`. Must be able to take in a
tensor of any order `n` and output a tensor of order `n+1`, where
the scalar values of the input are represented as vectors in the
*last* axis of the output.
Default: ``None`` (no embedding function)
domain: Instance of the `DataDomain` class, which specifies whether
the input data domain is continuous vs. discrete, and what range
of values the domain takes.
Default: ``None``
"""
def __init__(
self,
input_dim: int,
bond_dim: int,
complex_params: bool = False,
use_bias: bool = False,
init_method: str = "near_eye",
embed_fun: Optional[Callable] = None,
domain: Optional[DataDomain] = None,
) -> None:
super(ProbMPS, self).__init__()
assert min(input_dim, bond_dim) > 0
# Initialize core tensor and edge vectors
assert init_method in ("near_eye", "normal")
init_fun = near_eye_init if init_method == "near_eye" else normal_init
core_tensors = init_fun(
(input_dim, bond_dim, bond_dim), is_complex=complex_params
)
# Left and right vectors initialized to be identical, since it avoids
# issues with exponentially small overlap
rand_vec = torch.randn(bond_dim) / sqrt(bond_dim)
edge_vecs = torch.stack((rand_vec, rand_vec.conj()))
if complex_params:
edge_vecs = phaseify(edge_vecs)
self.core_tensors = nn.Parameter(core_tensors)
self.edge_vecs = nn.Parameter(edge_vecs)
# Initialize (optional) bias matrices at zero
if use_bias:
bias_mat = torch.zeros(bond_dim, bond_dim)
if complex_params:
bias_mat = phaseify(bias_mat)
self.bias_mat = nn.Parameter(bias_mat)
# Set other MPS attributes
self.complex_params = complex_params
self.embedding = None
# Set up embedding object if desired
if isinstance(embed_fun, (FixedEmbedding, TrainableEmbedding)):
self.embedding = embed_fun
if hasattr(embed_fun, "emb_dim"):
assert self.embedding.emb_dim == input_dim
elif embed_fun is not None:
assert domain is not None
self.embedding = FixedEmbedding(embed_fun, domain)
assert self.embedding.emb_dim == input_dim
def forward(
self, input_data: Tensor, slim_eval: bool = False, parallel_eval: bool = False
) -> Tensor:
"""
Get the log probabilities of batch of input data
Args:
input_data: Sequential with shape `(batch, seq_len)`, for
discrete inputs, or shape `(batch, seq_len, input_dim)`,
for vector inputs.
slim_eval: Whether to use a less memory intensive MPS
evaluation function, useful for larger inputs.
Default: ``False``
parallel_eval: Whether to use a more memory intensive parallel
MPS evaluation function, useful for smaller models.
Overrides `slim_eval` when both are requested.
Default: ``False``
Returns:
log_probs: Vector with shape `(batch,)` giving the natural
logarithm of the probability of each input sequence.
"""
batch, seq_len = input_data.shape[:2]
# Apply embedding function if it is defined
if self.embedding is not None:
input_data = self.embedding(input_data)
if slim_eval:
if self.use_bias:
raise ValueError("Bias matrices not supported for slim_eval")
psi_vals, log_scales = slim_eval_fun(
input_data, self.core_tensors, self.edge_vecs
)
else:
# Contract inputs with core tensors and add bias matrices
mat_slices = get_mat_slices(input_data, self.core_tensors)
if self.use_bias:
mat_slices = mat_slices + self.bias_mat[None]
# Contract all bond dims to get (unnormalized) prob amplitudes
psi_vals, log_scales = contract_matseq(
mat_slices,
self.edge_vecs[0],
self.edge_vecs[1],
parallel_eval,
log_format=True,
)
# Get log normalization and check for infinities
log_norm = self.log_norm(seq_len)
assert log_norm.isfinite()
assert torch.all(psi_vals.isfinite())
# Compute unnormalized log probabilities
log_uprobs = torch.log(torch.abs(psi_vals)) + log_scales
assert log_uprobs.shape == (batch,)
# Return normalized probabilities
return 2 * log_uprobs - log_norm
def log_norm(self, data_len) -> Tensor:
r"""
Compute the log normalization of the MPS for its fixed-size input
Uses iterated tensor contraction to compute :math:`\log(|\psi|^2)`,
where :math:`\psi` is the n'th order tensor described by the
contraction of MPS parameter cores. In the Born machine paradigm,
this is also :math:`\log(Z)`, for :math:`Z` the normalization
constant for the probability.
Returns:
l_norm: Scalar value giving the log squared L2 norm of the
n'th order prob. amp. tensor described by the MPS.
"""
# Account for bias matrices before calling log norm implementation
if self.use_bias:
core_tensors = self.core_tensors + self.bias_mat[None]
else:
core_tensors = self.core_tensors
# Account for non-trivial lambda function in the embedding
lamb_mat = None if self.embedding is None else self.embedding.lamb_mat
return get_log_norm(
core_tensors, self.edge_vecs, lamb_mat=lamb_mat, length=data_len
)
@property
def input_dim(self):
return self.core_tensors.shape[0]
@property
def bond_dim(self):
return self.core_tensors.shape[1]
@property
def use_bias(self):
return hasattr(self, "bias_mat")
| 41.205074 | 86 | 0.638173 | 2,564 | 19,490 | 4.713339 | 0.150936 | 0.026396 | 0.018618 | 0.008606 | 0.834009 | 0.829706 | 0.828713 | 0.818122 | 0.812495 | 0.809847 | 0 | 0.002758 | 0.293073 | 19,490 | 472 | 87 | 41.292373 | 0.874365 | 0.544125 | 0 | 0.719388 | 0 | 0 | 0.021852 | 0 | 0 | 0 | 0 | 0 | 0.076531 | 1 | 0.071429 | false | 0 | 0.035714 | 0.035714 | 0.178571 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
9fafe1675befcefcde76efd5be8354ce592f16ac | 22,857 | py | Python | KIDs/psd_fitting.py | Wheeler1711/submm_python_routines | 4145a37ca96867d7425e6b88a1b8bce4c6bcf67f | [
"Apache-2.0"
] | 1 | 2021-01-13T22:38:51.000Z | 2021-01-13T22:38:51.000Z | KIDs/psd_fitting.py | Wheeler1711/submm_python_routines | 4145a37ca96867d7425e6b88a1b8bce4c6bcf67f | [
"Apache-2.0"
] | 1 | 2021-01-29T03:12:41.000Z | 2021-01-29T16:59:18.000Z | KIDs/psd_fitting.py | Wheeler1711/submm_python_routines | 4145a37ca96867d7425e6b88a1b8bce4c6bcf67f | [
"Apache-2.0"
] | null | null | null | import numpy as np
from scipy.stats import binned_statistic
import scipy.optimize as optimization
import matplotlib.pyplot as plt
#set of modules for fitting psd of kinetic inductance detectors
#Written by Jordan 1/5/2017
#To Do
#add verbose = true keyword
#Change Log
#1/9/2017 Added sigma_increase_cutoff and sigma_increase_factor to fit_psd modules
# noise profiles
def noise_profile(y,a,b,c):
return a+b*y**-c
def noise_slope(y,b,c):
return b*y**-c
def noise_white(y,a):
return np.ones(y.shape[0])*a
def noise_profile_lor(y,a,b,c,d):
return (a+b*y**-c)/(1+(2*np.pi*y*d)**2.)
def noise_profile_lor_vec(y,a,b,c,d):
y = np.reshape(y,(y.shape[0],1,1,1,1))
return (a+b*y**-c)/(1+(2*np.pi*y*d)**2.)
def std_of_mean(x):
if np.size(x) == 1:
return x
else:
return np.std(x)/np.sqrt(np.size(x))
index_for_fitting = np.where(((x>=freq_range[0]) & (x <=freq_range[1])))
if ('ranges' in keywords):
ranges = keywords['ranges']
else:
if ('white_freq' in keywords):
white_freq = keywords['white_freq']
white_index = np.argmin(np.abs(x[index_for_fitting]-white_freq))
else:
white_index = len(y[index_for_fitting])//2 # noise is white in the middle of psd
white_guess = y[index_for_fitting][white_index]
x0_guess = np.array([ white_guess,
(y[index_for_fitting][0]-white_guess)/x[index_for_fitting][0]**(-1.), # assume 1/f dominates at lowest frequency but still subtract off white noise
1,# guess 1/f is index is 1
1./2/np.pi/x[index_for_fitting][np.argmin(np.abs(white_guess/2.-y[index_for_fitting]))]]) # look for 3dB decrease from white noise guess
print("guess values are")
print(x0_guess)
ranges = np.asarray(([x0_guess[0]/2,x0_guess[1]/20,0.5,x0_guess[3]/2],[ x0_guess[0]*2,x0_guess[1]*10,2,x0_guess[3]*2]))
if error is None:
error = np.ones(len(x[index_for_fitting]))
a_values = np.linspace(ranges[0][0],ranges[1][0],n_grid_points)
b_values = np.linspace(ranges[0][1],ranges[1][1],n_grid_points)
c_values = np.linspace(ranges[0][2],ranges[1][2],n_grid_points)
d_values = np.linspace(ranges[0][3],ranges[1][3],n_grid_points)
evaluated_ranges = np.vstack((a_values,b_values,c_values,d_values))
a,b,c,d = np.meshgrid(a_values,b_values,c_values,d_values,indexing = "ij") #always index ij
evaluated = noise_profile_lor_vec(x[index_for_fitting],a,b,c,d)
data_values = np.reshape(y[index_for_fitting],(y[index_for_fitting].shape[0],1,1,1,1))
error = np.reshape(error,(y[index_for_fitting].shape[0],1,1,1,1))
#print(evaluated.shape)
#print(data_values.shape)
#print(error.shape)
sum_dev = np.sum(((evaluated-data_values)**2/error**2),axis = 0) # comparing in magnitude space rather than magnitude squared
#print(sum_dev.shape)
min_index = np.where(sum_dev == np.min(sum_dev))
print("grid values at minimum are")
print(min_index)
index1 = min_index[0][0]
index2 = min_index[1][0]
index3 = min_index[2][0]
index4 = min_index[3][0]
fit_values = np.asarray((a_values[index1],b_values[index2],c_values[index3],d_values[index4]))
fit_values_names = ('a (white)','b (1/f)','c (1/f exponent)','d (tau)')
fit_result = noise_profile_lor(x,a_values[index1],b_values[index2],c_values[index3],d_values[index4])
x0_guess_result = noise_profile_lor(x,x0_guess[0],x0_guess[1],x0_guess[2],x0_guess[3])
noise_slope_result = noise_slope(x,fit_values[1],fit_values[2])
fine_freqs = np.logspace(np.log10(freq_range[0]),np.log10(freq_range[1]),10000)
print(fine_freqs)
knee = fine_freqs[np.argmin(np.abs( fit_values[1]*fine_freqs**-fit_values[2]-fit_values[0] ))]
fit_dict = {'fit_values': fit_values,'fit_values_names':fit_values_names, 'sum_dev': sum_dev, 'fit_result': fit_result,'x0_guess_result':x0_guess_result,'evaluated_ranges':evaluated_ranges,'knee':knee,'noise_slope_result':noise_slope_result}#, 'x0':x0, 'z':z},'marginalized_2d':marginalized_2d,'marginalized_1d':marginalized_1d,
return fit_dict
return fit_dict
def fit_psd_lor(x,y,**keywords):
'''
# keywards are
# use_range ---is an n length tuple of frequencies to use while fitting
# Example: [[1,57],[63,117],[123,177]] here we fit from 1 to 57Hz and 63 to 117 Hz and 123 to 177Hz avoid 60 Hz and harmonics
# x0 --- intial guess for the fit this can be very important becuase because least square space over all the parameter is comple
# there are two way the function if fit one is it is fit without binning. In this case a error for the fit is calculated
# by calculating the standard deviation of the surronding 50 or so points. Then the data below 10Hz has the error artificially
# lowered so that the fitter doesn't ignore in when comparing it to the many more points at higher frequencies. The other way is to use
# the log keyword which then log bins the data and calculates the error for each bin. This way there are around the same number of points
# at low frequency as compared to high frequency
# since there are less points at low frequencies than high frequencies I artificially increase the accuracy of the low frequency points
# below sigma_increase_cutoff by scaling the error that the fitter uses by sigma_increase_factor
'''
if ('sigma_increase_cutoff' in keywords):
sigma_increase_cutoff = keywords['sigma_increase_cutoff']
else:
#define default bounds
sigma_increase_cutoff = 2. #(Hz)
if ('sigma_increase_cutoff' in keywords):
sigma_increase_factor = keywords['sigma_increase_factor']
else:
#define default bounds
sigma_increase_factor = 5.
# bounds with out these some paramter might converge to non physical values
if ('bounds' in keywords):
bounds = keywords['bounds']
else:
#define default bounds
print("default bounds used")
bounds = ([10**-20,0.0,0.0,9e-7],[10**-8,10**-8,3,0.001])
if ('use_range' in keywords):
use_range = keywords['use_range']
# create an index of the values you want to fit
index = np.where((x>use_range[0][0]) & (x<use_range[0][1]) )[0]
for i in range(1,len(use_range)):
index2 = np.where((x>use_range[i][0]) & (x<use_range[i][1]) )
index = np.hstack((index,index2[0]))
else:
index = range(0,x.shape[0])
# initial conditions
if ('x0' in keywords):
x0 = keywords['x0']
else:
#define default intial guess
print("default initial guess used")
x0 = np.array([1.*10**(-15.75), 1.*10**(-17),1,0.00001]) # default values that work OK for superspec
# log bin the data first or no
if ('log' in keywords):
print("psd will be log binned before fitting")
log = 1
bins = np.logspace(np.log10(x[0]),np.log10(x[x.shape[0]-1]),100) #100 logspaced bins
else:
log = 0
if log == 1:
binnedfreq_temp = binned_statistic(x[index], x[index], bins=bins)[0]
binnedvals_temp = binned_statistic(x[index], y[index], bins=bins)[0]
binnedvals_std = binned_statistic(x[index], y[index], bins=bins, statistic = std_of_mean)[0]
binnedfreq = binnedfreq_temp[~np.isnan(binnedfreq_temp)]
binnedvals = binnedvals_temp[~np.isnan(binnedfreq_temp)]
binnedstd = binnedvals_std[~np.isnan(binnedfreq_temp)]
freqs = x[index]
vals = y[index]
if log ==0: #when fitting there are so many points at high frequencies compared to at low frequecies a fitting will almost ingnore the low frequency end
# I get an extimate fo the noise by taking the standard deviation of each 10 consective points (will be some error for th last 10 points)
std_pts = 100 # if this number is to low it seems to bias the fits to the lower side
low_freq_index = np.where(freqs<sigma_increase_cutoff)
temp = np.zeros((vals.shape[0],std_pts))
# here I estimate the error by looking at the 100 surronding points and calculated the std
for i in range(0,std_pts):
temp[:,i] = np.roll(vals,-i)
sigma = np.std(temp,axis = 1)
sigma[low_freq_index] = sigma[low_freq_index]/sigma_increase_factor # artificial pretend the noise at low frequcies is 5 time lower than every where else
fit = optimization.curve_fit(noise_profile_lor, freqs, vals, x0 , sigma,bounds = bounds)
else:
sigma = binnedstd
fit = optimization.curve_fit(noise_profile_lor, binnedfreq, binnedvals, x0 ,sigma,bounds = bounds)
return fit
def fit_psd_lor_brute(x, y, n_grid_points=20, error=None, **keywords):
"""
brute force fitting (the only way to fit with 4 or less variables)
x is the psd frequency
y is the psd magnitude
n_grid_points is the deminsion of the 4 dimensional cube over which function will be evaluated
keywords are
ranges is the ranges for each parameter i.e. np.asarray(([a_low,b_low,c_low,d_low],[a_high,b_high,c_high,d_high]))
freq_range = (f_low,f_high) bounds over which the psd should be fit
white_freq = 50 (i.e) Hz if using the automated range it is very useful to specify where the noise psd is white (frequency independant)
due to the vector nature of the calculations used by this brute for fitter
n_grid_points will be limited by your computers ram and it grows fast
for example 300points 50^4*(2bytes per float*(2arrays) = 3.5GB of ram
just watch your resources when you fit if you exceed your ram you will write to disk and the fit will never finish
To Do add in marginilaztions for error bars like in the brute force fitter in resonance fitting
also add in the corner plot for marginalized values
would be good to add in a nested version of this where it fits again over a smaller paramter space
"""
if ('freq_range' in keywords):
freq_range = keywords['freq_range']
else:
freq_range = (x[0], x[-1])
index_for_fitting = np.where(((x >= freq_range[0]) & (x <= freq_range[1])))
if ('ranges' in keywords):
ranges = keywords['ranges']
else:
if ('white_freq' in keywords):
white_freq = keywords['white_freq']
white_index = np.argmin(np.abs(x[index_for_fitting] - white_freq))
else:
white_index = len(y[index_for_fitting]) // 2 # noise is white in the middle of psd
white_guess = y[index_for_fitting][white_index]
x0_guess = np.array([white_guess,
(y[index_for_fitting][0] - white_guess) / x[index_for_fitting][0] ** (-1.),
# assume 1/f dominates at lowest frequency but still subtract off white noise
1, # guess 1/f is index is 1
1. / 2 / np.pi / x[index_for_fitting][np.argmin(np.abs(white_guess / 2. - y[
index_for_fitting]))]]) # look for 3dB decrease from white noise guess
print("guess values are")
print(x0_guess)
ranges = np.asarray(([x0_guess[0] / 2, x0_guess[1] / 20, 0.5, x0_guess[3] / 2],
[x0_guess[0] * 2, x0_guess[1] * 10, 2, x0_guess[3] * 2]))
if error is None:
error = np.ones(len(x[index_for_fitting]))
a_values = np.linspace(ranges[0][0], ranges[1][0], n_grid_points)
b_values = np.linspace(ranges[0][1], ranges[1][1], n_grid_points)
c_values = np.linspace(ranges[0][2], ranges[1][2], n_grid_points)
d_values = np.linspace(ranges[0][3], ranges[1][3], n_grid_points)
evaluated_ranges = np.vstack((a_values, b_values, c_values, d_values))
a, b, c, d = np.meshgrid(a_values, b_values, c_values, d_values, indexing="ij") # always index ij
evaluated = noise_profile_lor_vec(x[index_for_fitting], a, b, c, d)
data_values = np.reshape(y[index_for_fitting], (y[index_for_fitting].shape[0], 1, 1, 1, 1))
error = np.reshape(error, (y[index_for_fitting].shape[0], 1, 1, 1, 1))
# print(evaluated.shape)
# print(data_values.shape)
# print(error.shape)
sum_dev = np.sum(((evaluated - data_values) ** 2 / error ** 2),
axis=0) # comparing in magnitude space rather than magnitude squared
# print(sum_dev.shape)
min_index = np.where(sum_dev == np.min(sum_dev))
print("grid values at minimum are")
print(min_index)
index1 = min_index[0][0]
index2 = min_index[1][0]
index3 = min_index[2][0]
index4 = min_index[3][0]
fit_values = np.asarray((a_values[index1], b_values[index2], c_values[index3], d_values[index4]))
fit_values_names = ('a (white)', 'b (1/f)', 'c (1/f exponent)', 'd (tau)')
fit_result = noise_profile_lor(x, a_values[index1], b_values[index2], c_values[index3], d_values[index4])
x0_guess_result = noise_profile_lor(x, x0_guess[0], x0_guess[1], x0_guess[2], x0_guess[3])
noise_slope_result = noise_slope(x, fit_values[1], fit_values[2])
fine_freqs = np.logspace(np.log10(freq_range[0]), np.log10(freq_range[1]), 10000)
print(fine_freqs)
knee = fine_freqs[np.argmin(np.abs(fit_values[1] * fine_freqs ** -fit_values[2] - fit_values[0]))]
fit_dict = {'fit_values': fit_values, 'fit_values_names': fit_values_names, 'sum_dev': sum_dev,
'fit_result': fit_result, 'x0_guess_result': x0_guess_result, 'evaluated_ranges': evaluated_ranges,
'knee': knee,
'noise_slope_result': noise_slope_result} # , 'x0':x0, 'z':z},'marginalized_2d':marginalized_2d,'marginalized_1d':marginalized_1d,
return fit_dict
def fit_psd(x,y,plot = False,**keywords):
"""
# keywards are
# use_range ---is an n length tuple of frequencies to use while fitting
# Example: [[1,57],[63,117],[123,177]] here we fit from 1 to 57Hz and 63 to 117 Hz and 123 to 177Hz avoid 60 Hz and harmonics
# x0 --- intial guess for the fit this can be very important becuase because least square space over all the parameter is comple
# there are two way the function if fit one is it is fit without binning. In this case a error for the fit is calculated
# by calculating the standard deviation of the surronding 50 or so points. Then the data below 10Hz has the error artificially
# lowered so that the fitter doesn't ignore in when comparing it to the many more points at higher frequencies. The other way is to use
# the log keyword which then log bins the data and calculates the error for each bin. This way there are around the same number of points
# at low frequency as compared to high frequency
# since there are less points at low frequencies than high frequencies I artificially increase the accuracy of the low frequency points
# below sigma_increase_cutoff by scaling the error that the fitter uses by sigma_increase_factor
"""
if ('sigma_increase_cutoff' in keywords):
sigma_increase_cutoff = keywords['sigma_increase_cutoff']
else:
#define default bounds
sigma_increase_cutoff = 2. #(Hz)
if ('sigma_increase_cutoff' in keywords):
sigma_increase_factor = keywords['sigma_increase_factor']
else:
#define default bounds
sigma_increase_factor = 5.
index_for_fitting = np.where(((x>=freq_range[0]) & (x <=freq_range[1])))
if ('ranges' in keywords):
ranges = keywords['ranges']
else:
if ('white_freq' in keywords):
white_freq = keywords['white_freq']
white_index = np.argmin(np.abs(x[index_for_fitting]-white_freq))
else:
white_index = len(y[index_for_fitting])//2 # noise is white in the middle of psd
white_guess = y[index_for_fitting][white_index]
x0_guess = np.array([ white_guess,
(y[index_for_fitting][0]-white_guess)/x[index_for_fitting][0]**(-1.), # assume 1/f dominates at lowest frequency but still subtract off white noise
1,# guess 1/f is index is 1
1./2/np.pi/x[index_for_fitting][np.argmin(np.abs(white_guess/2.-y[index_for_fitting]))]]) # look for 3dB decrease from white noise guess
print("guess values are")
print(x0_guess)
ranges = np.asarray(([x0_guess[0]/2,x0_guess[1]/20,0.5,x0_guess[3]/2],[ x0_guess[0]*2,x0_guess[1]*10,2,x0_guess[3]*2]))
if error is None:
error = np.ones(len(x[index_for_fitting]))
a_values = np.linspace(ranges[0][0],ranges[1][0],n_grid_points)
b_values = np.linspace(ranges[0][1],ranges[1][1],n_grid_points)
c_values = np.linspace(ranges[0][2],ranges[1][2],n_grid_points)
d_values = np.linspace(ranges[0][3],ranges[1][3],n_grid_points)
evaluated_ranges = np.vstack((a_values,b_values,c_values,d_values))
a,b,c,d = np.meshgrid(a_values,b_values,c_values,d_values,indexing = "ij") #always index ij
evaluated = noise_profile_lor_vec(x[index_for_fitting],a,b,c,d)
data_values = np.reshape(y[index_for_fitting],(y[index_for_fitting].shape[0],1,1,1,1))
error = np.reshape(error,(y[index_for_fitting].shape[0],1,1,1,1))
#print(evaluated.shape)
#print(data_values.shape)
#print(error.shape)
sum_dev = np.sum(((evaluated-data_values)**2/error**2),axis = 0) # comparing in magnitude space rather than magnitude squared
#print(sum_dev.shape)
min_index = np.where(sum_dev == np.min(sum_dev))
print("grid values at minimum are")
print(min_index)
index1 = min_index[0][0]
index2 = min_index[1][0]
index3 = min_index[2][0]
index4 = min_index[3][0]
fit_values = np.asarray((a_values[index1],b_values[index2],c_values[index3],d_values[index4]))
fit_values_names = ('a (white)','b (1/f)','c (1/f exponent)','d (tau)')
fit_result = noise_profile_lor(x,a_values[index1],b_values[index2],c_values[index3],d_values[index4])
x0_guess_result = noise_profile_lor(x,x0_guess[0],x0_guess[1],x0_guess[2],x0_guess[3])
noise_slope_result = noise_slope(x,fit_values[1],fit_values[2])
fine_freqs = np.logspace(np.log10(freq_range[0]),np.log10(freq_range[1]),10000)
print(fine_freqs)
knee = fine_freqs[np.argmin(np.abs( fit_values[1]*fine_freqs**-fit_values[2]-fit_values[0] ))]
fit_dict = {'fit_values': fit_values,'fit_values_names':fit_values_names, 'sum_dev': sum_dev, 'fit_result': fit_result,'x0_guess_result':x0_guess_result,'evaluated_ranges':evaluated_ranges,'knee':knee,'noise_slope_result':noise_slope_result}#, 'x0':x0, 'z':z},'marginalized_2d':marginalized_2d,'marginalized_1d':marginalized_1d,
return fit_dict
return fit_dict
if ('use_range' in keywords):
use_range = keywords['use_range']
# create an index of the values you want to fit
index = np.where((x>use_range[0][0]) & (x<use_range[0][1]) )[0]
for i in range(1,len(use_range)):
index2 = np.where((x>use_range[i][0]) & (x<use_range[i][1]) )
index = np.hstack((index,index2[0]))
else:
index = range(0,x.shape[0])
if ('uniform_weight' in keywords):
uniform_weight = keywords['uniform_weight']
else:
uniform_weight = False
# initial conditions
if ('x0' in keywords):
x0 = keywords['x0']
else:
#define default intial guess # guess for b: b*freqs**-c = value => b = value/freqs**-c
print("default initial guess used")
x0 = np.array([y[index][-1], y[index][0]/x[index][0]**(-1.) ,1]) # default values that work OK for superspec
# bounds with out these some paramter might converge to non physical values
if ('bounds' in keywords):
bounds = keywords['bounds']
else:
#define default bounds
print("default bounds used")
#bounds = ([10**-20,10**-20,0],[10**-10,10**-10,3])
bounds = ([x0[0]/10.,x0[1]/10.,0],[x0[0]*10,x0[1]*10.,3])
# log bin the data first or no
if ('log' in keywords):
print("psd will be log binned before fitting")
log = 1
bins = np.logspace(np.log10(x[0]),np.log10(x[x.shape[0]-1]),100) #100 logspaced bins
else:
log = 0
bins = x.shape[0] # doesn't bin at all
binnedfreq_temp = binned_statistic(x[index], x[index], bins=bins)[0]
binnedvals_temp = binned_statistic(x[index], y[index], bins=bins)[0]
binnedvals_std = binned_statistic(x[index], y[index], bins=bins, statistic = std_of_mean)[0]
binnedfreq = binnedfreq_temp[~np.isnan(binnedfreq_temp)]
binnedvals = binnedvals_temp[~np.isnan(binnedfreq_temp)]
binnedstd = binnedvals_std[~np.isnan(binnedfreq_temp)]
freqs = x[index]
vals = y[index]
if log ==0: #when fitting there are so many points at high frequencies compared to at low frequecies a fitting will almost ingnore the low frequency end
# I get an extimate fo the noise by taking the standard deviation of each 10 consective points (will be some error for th last 10 points)
std_pts = 100 # if this number is to low it seems to bias the fits to the lower side
low_freq_index = np.where(freqs<sigma_increase_cutoff)
temp = np.zeros((vals.shape[0],std_pts))
for i in range(0,std_pts):
temp[:,i] = np.roll(vals,-i)
sigma = np.std(temp,axis = 1)
sigma[low_freq_index] = sigma[low_freq_index]/sigma_increase_factor # artificial pretend the noise at low frequcies is 10 time lower than every where else
fit = optimization.curve_fit(noise_profile, freqs, vals, x0 , sigma,bounds = bounds)
print("hello")
else:
if uniform_weight == True:
sigma = np.ones(len(binnedstd))*np.mean(binnedstd)
else:
sigma = binnedstd
fit = optimization.curve_fit(noise_profile, binnedfreq, binnedvals, x0 ,sigma,bounds = bounds)
if plot == True:
plt.loglog(binnedfreq,binnedvals,label = "Data",linewidth = 2)
#plt.errorbar(binnedfreq,binned_psd_log,binned_std_log, fmt='o')
plt.loglog(binnedfreq,noise_profile(binnedfreq,x0[0],x0[1],x0[2]),linewidth = 2,label = "Initial Guess")
plt.loglog(binnedfreq,noise_profile(binnedfreq,fit[0][0],fit[0][1],fit[0][2]),linewidth = 2,label = "Fit")
plt.loglog(binnedfreq,noise_slope(binnedfreq,fit[0][1],fit[0][2]),linewidth = 2, label = "1/f^" + str(fit[0][2])[0:4])
plt.loglog(binnedfreq,noise_white(binnedfreq,fit[0][0]),linewidth = 2,label = "White"+ " " + str(fit[0][0]*10**16)[0:4] +" x10^-16")
plt.legend()
plt.xlabel("Frequency (Hz)")
plt.ylabel("Sxx (1/Hz)")
plt.ylim(np.min(y),np.max(y))
plt.show(block = False)
return fit
| 50.793333 | 332 | 0.660017 | 3,648 | 22,857 | 3.965735 | 0.103893 | 0.021774 | 0.040437 | 0.023225 | 0.846478 | 0.843713 | 0.825465 | 0.820212 | 0.815096 | 0.804313 | 0 | 0.039628 | 0.213939 | 22,857 | 449 | 333 | 50.906459 | 0.76557 | 0.294264 | 0 | 0.8 | 0 | 0 | 0.072697 | 0.010583 | 0.003333 | 0 | 0 | 0 | 0 | 1 | 0.03 | false | 0 | 0.013333 | 0.013333 | 0.09 | 0.073333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
4cddf6d1d344a17f6fe93174ca51d66ce7c4b42e | 8,088 | py | Python | synth/utils/segment.py | MTG/content_choral_separation | f3710fb9a15a88651d13ea2c07c6d0368f1cfb8f | [
"Apache-2.0"
] | 5 | 2020-09-23T09:44:35.000Z | 2022-03-20T11:21:58.000Z | synth/utils/segment.py | MTG/content_choral_separation | f3710fb9a15a88651d13ea2c07c6d0368f1cfb8f | [
"Apache-2.0"
] | 9 | 2020-09-25T22:42:25.000Z | 2022-02-10T02:10:29.000Z | synth/utils/segment.py | MTG/content_choral_separation | f3710fb9a15a88651d13ea2c07c6d0368f1cfb8f | [
"Apache-2.0"
] | 2 | 2021-06-03T14:52:20.000Z | 2021-12-11T18:10:16.000Z | #! /usr/bin/env python
# encoding: utf-8
# Copied from https://github.com/mauriciovander/silence-removal/blob/master/segment.py
import numpy
import scipy.io.wavfile as wf
import sys
from synth.config import config
class VoiceActivityDetectionYAM:
def __init__(self, sr, ms, channel):
self.__sr = sr
self.__channel = channel
self.__step = int(sr/50)
self.__buffer_size = int(sr/50)
self.__buffer_back = numpy.array([],dtype=numpy.int16)
self.__buffer = numpy.array([],dtype=numpy.int16)
self.__out_buffer_back = numpy.array([],dtype=numpy.int16)
self.__out_buffer = numpy.array([],dtype=numpy.int16)
self.__n = 0
self.__VADthd = 0.
self.__VADn = 0.
self.__silence_counter = 0
self.__segment_count = 0
self.__voice_detected = False
self.__silence_thd_ms = ms
self.out_segments = []
self.out_segments_back = []
# Voice Activity Detection
# Adaptive threshold
def vad(self, _frame):
frame = numpy.array(_frame) ** 2.
result = True
threshold = 0.1
thd = numpy.min(frame) + numpy.ptp(frame) * threshold
self.__VADthd = (self.__VADn * self.__VADthd + thd) / float(self.__VADn + 1.)
self.__VADn += 1.
if numpy.mean(frame) <= self.__VADthd:
self.__silence_counter += 1
else:
self.__silence_counter = 0
if self.__silence_counter > self.__silence_thd_ms*self.__sr/(1000*self.__buffer_size):
result = False
return result
# Push new audio samples into the buffer.
def add_samples(self, data, back):
self.__buffer = numpy.append(self.__buffer, data)
self.__buffer_back = numpy.append(self.__buffer_back, back)
result = len(self.__buffer) >= self.__buffer_size
# print('__buffer size %i'%self.__buffer.size)
return result
# Pull a portion of the buffer to process
# (pulled samples are deleted after being
# processed
def get_frame(self):
window = self.__buffer[:self.__buffer_size]
window_back = self.__buffer_back[:self.__buffer_size]
self.__buffer = self.__buffer[self.__step:]
self.__buffer_back = self.__buffer_back[self.__step:]
# print('__buffer size %i'%self.__buffer.size)
return window, window_back
# Adds new audio samples to the internal
# buffer and process them
def process(self, data, back):
if self.add_samples(data, back):
while len(self.__buffer) >= self.__buffer_size:
# Framing
window, window_back = self.get_frame()
# print('window size %i'%window.size)
if self.vad(window): # speech frame
# print('voiced')
self.__out_buffer = numpy.append(self.__out_buffer, window)
self.__out_buffer_back = numpy.append(self.__out_buffer_back, window_back)
self.__voice_detected = True
elif self.__voice_detected:
self.__out_buffer = numpy.append(self.__out_buffer, window)
self.__out_buffer_back = numpy.append(self.__out_buffer_back, window_back)
self.__voice_detected = False
self.__segment_count = self.__segment_count + 1
assert len(self.__out_buffer) == len(self.__out_buffer_back)
self.out_segments.append(self.__out_buffer)
self.out_segments_back.append(self.__out_buffer_back)
# wf.write('%s.%i.%i.wav'%(sys.argv[2],self.__channel,self.__segment_count),sr,self.__out_buffer)
self.__out_buffer = numpy.array([],dtype=numpy.int16)
self.__out_buffer_back = numpy.array([],dtype=numpy.int16)
# print(self.__segment_count)
return self.out_segments, self.out_segments_back
# print('__out_buffer size %i'%self.__out_buffer.size)
def get_voice_samples(self):
return self.__out_buffer
class VoiceActivityDetection:
def __init__(self, sr, ms, channel):
self.__sr = sr
self.__channel = channel
self.__step = int(sr/50)
self.__buffer_size = int(sr/50)
self.__buffer_back = numpy.array([],dtype=numpy.int16)
self.__buffer = numpy.array([],dtype=numpy.int16)
self.__out_buffer_back = numpy.array([],dtype=numpy.int16)
self.__out_buffer = numpy.array([],dtype=numpy.int16)
self.__n = 0
self.__VADthd = 0.
self.__VADn = 0.
self.__silence_counter = 0
self.__segment_count = 0
self.__voice_detected = False
self.__silence_thd_ms = ms
self.out_segments = []
self.out_segments_back = []
# Voice Activity Detection
# Adaptive threshold
def vad(self, _frame):
frame = numpy.array(_frame) ** 2.
result = True
threshold = 0.1
thd = numpy.min(frame) + numpy.ptp(frame) * threshold
self.__VADthd = (self.__VADn * self.__VADthd + thd) / float(self.__VADn + 1.)
self.__VADn += 1.
if numpy.mean(frame) <= self.__VADthd:
self.__silence_counter += 1
else:
self.__silence_counter = 0
if self.__silence_counter > self.__silence_thd_ms*self.__sr/(1000*self.__buffer_size):
result = False
return result
# Push new audio samples into the buffer.
def add_samples(self, data, back):
self.__buffer = numpy.append(self.__buffer, data)
self.__buffer_back = numpy.append(self.__buffer_back, back)
result = len(self.__buffer) >= self.__buffer_size
# print('__buffer size %i'%self.__buffer.size)
return result
# Pull a portion of the buffer to process
# (pulled samples are deleted after being
# processed
def get_frame(self):
window = self.__buffer[:self.__buffer_size]
window_back = self.__buffer_back[:self.__buffer_size]
self.__buffer = self.__buffer[self.__step:]
self.__buffer_back = self.__buffer_back[self.__step:]
# print('__buffer size %i'%self.__buffer.size)
return window, window_back
# Adds new audio samples to the internal
# buffer and process them
def process(self, data):
back = numpy.arange(0, len(data)/config.fs, 1/config.fs)
if self.add_samples(data, back):
while len(self.__buffer) >= self.__buffer_size:
# Framing
window, window_back = self.get_frame()
# print('window size %i'%window.size)
if self.vad(window): # speech frame
# print('voiced')
self.__out_buffer = numpy.append(self.__out_buffer, window)
self.__out_buffer_back = numpy.append(self.__out_buffer_back, window_back)
self.__voice_detected = True
elif self.__voice_detected:
self.__out_buffer = numpy.append(self.__out_buffer, window)
self.__out_buffer_back = numpy.append(self.__out_buffer_back, window_back)
self.__voice_detected = False
self.__segment_count = self.__segment_count + 1
assert len(self.__out_buffer) == len(self.__out_buffer_back)
self.out_segments.append(self.__out_buffer)
self.out_segments_back.append(self.__out_buffer_back)
# wf.write('%s.%i.%i.wav'%(sys.argv[2],self.__channel,self.__segment_count),sr,self.__out_buffer)
self.__out_buffer = numpy.array([],dtype=numpy.int16)
self.__out_buffer_back = numpy.array([],dtype=numpy.int16)
# print(self.__segment_count)
return self.out_segments, self.out_segments_back
# print('__out_buffer size %i'%self.__out_buffer.size)
def get_voice_samples(self):
return self.__out_buffer | 40.238806 | 117 | 0.613254 | 982 | 8,088 | 4.576375 | 0.121181 | 0.077882 | 0.109924 | 0.060525 | 0.9417 | 0.9417 | 0.9417 | 0.9417 | 0.9417 | 0.9417 | 0 | 0.012267 | 0.284372 | 8,088 | 201 | 118 | 40.238806 | 0.764167 | 0.157765 | 0 | 0.933333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014815 | 1 | 0.088889 | false | 0 | 0.02963 | 0.014815 | 0.207407 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e2669ef89e64b852688838f97cfaebd66e7dd238 | 1,695 | py | Python | source/device_manager/tests/test_imports.py | ElsevierSoftwareX/SOFTX-D-21-00051 | 7ae5ad60971129286e2ee452a797c26810a9cb0f | [
"MIT"
] | null | null | null | source/device_manager/tests/test_imports.py | ElsevierSoftwareX/SOFTX-D-21-00051 | 7ae5ad60971129286e2ee452a797c26810a9cb0f | [
"MIT"
] | null | null | null | source/device_manager/tests/test_imports.py | ElsevierSoftwareX/SOFTX-D-21-00051 | 7ae5ad60971129286e2ee452a797c26810a9cb0f | [
"MIT"
] | null | null | null | import unittest
import os
class TestImports(unittest.TestCase):
def test_imports(self):
self.assertTrue(os.path.exists("source/device_manager/command-line-interface/parser.py"))
self.assertTrue(os.path.exists("source/device_manager/device_layer/data_handler.py"))
self.assertTrue(os.path.exists("source/device_manager/device_layer/device_feature.py"))
self.assertTrue(os.path.exists("source/device_manager/device_layer/device_info.py"))
self.assertTrue(os.path.exists("source/device_manager/device_layer/device_interface.py"))
self.assertTrue(os.path.exists("source/device_manager/device_layer/dynamic_client.py"))
self.assertTrue(os.path.exists("source/device_manager/device_layer/sila_device.py"))
self.assertTrue(os.path.exists("source/device_manager/device_layer/sila_feature.py"))
self.assertTrue(os.path.exists("source/device_manager/hosts/hosts.py"))
self.assertTrue(os.path.exists("source/device_manager/sila_auto_discovery/sila_auto_discovery.py"))
self.assertTrue(os.path.exists("source/device_manager/data_directories.py"))
self.assertTrue(os.path.exists("source/device_manager/database.py"))
self.assertTrue(os.path.exists("source/device_manager/device_log.py"))
self.assertTrue(os.path.exists("source/device_manager/device_manager.py"))
self.assertTrue(os.path.exists("source/device_manager/sila_server.py"))
self.assertTrue(os.path.exists("source/device_manager/thread_local_storage.py"))
self.assertTrue(os.path.exists("source/device_manager/user.py"))
self.assertTrue(os.path.exists("source/backend/device_manager_service.py"))
| 65.192308 | 107 | 0.756932 | 229 | 1,695 | 5.41048 | 0.183406 | 0.199354 | 0.232446 | 0.290557 | 0.781275 | 0.781275 | 0.781275 | 0.753834 | 0.717514 | 0.565779 | 0 | 0 | 0.104425 | 1,695 | 25 | 108 | 67.8 | 0.816206 | 0 | 0 | 0 | 0 | 0 | 0.476696 | 0.476696 | 0 | 0 | 0 | 0 | 0.818182 | 1 | 0.045455 | false | 0 | 0.181818 | 0 | 0.272727 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
1a46df242117ff51572a224941f3113d11c8642c | 104 | py | Python | hippy/module/spl/__init__.py | jweinraub/hippyvm | 09c7643aaa1c4ade566e8681abd2543f12bf874c | [
"MIT"
] | 289 | 2015-01-01T15:36:55.000Z | 2022-03-27T00:22:27.000Z | hippy/module/spl/__init__.py | jweinraub/hippyvm | 09c7643aaa1c4ade566e8681abd2543f12bf874c | [
"MIT"
] | 26 | 2015-01-21T16:34:41.000Z | 2020-08-26T15:12:54.000Z | hippy/module/spl/__init__.py | jweinraub/hippyvm | 09c7643aaa1c4ade566e8681abd2543f12bf874c | [
"MIT"
] | 35 | 2015-01-05T12:09:41.000Z | 2022-03-16T09:30:16.000Z | from hippy.module.spl.funcs import *
import hippy.module.spl.arrayiter
import hippy.module.spl.iterator
| 26 | 36 | 0.826923 | 16 | 104 | 5.375 | 0.5 | 0.383721 | 0.488372 | 0.465116 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.076923 | 104 | 3 | 37 | 34.666667 | 0.895833 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
46c673470f59b11543a9ac7ffa1fb94f7519a462 | 8,929 | py | Python | quantarhei/models/chlorophylls.py | slamavl/quantarhei | d822bc2db86152c418e330a9152e7866869776f7 | [
"MIT"
] | 14 | 2016-10-16T13:26:05.000Z | 2021-11-09T11:40:52.000Z | quantarhei/models/chlorophylls.py | slamavl/quantarhei | d822bc2db86152c418e330a9152e7866869776f7 | [
"MIT"
] | 61 | 2016-09-19T10:45:56.000Z | 2021-11-10T13:53:06.000Z | quantarhei/models/chlorophylls.py | slamavl/quantarhei | d822bc2db86152c418e330a9152e7866869776f7 | [
"MIT"
] | 21 | 2016-08-30T09:09:28.000Z | 2022-03-30T03:16:35.000Z | # -*- coding: utf-8 -*-
"""
Created on Thu Sep 14 13:50:05 2017
@author: Johan
"""
# -*- coding: utf-8 -*-
from ..core.units import cm2int
from ..core.managers import EnergyUnitsManaged
from .molecularmodel import MolecularModel
from ..builders import pdb
from ..utils.vectors import normalize2
class ChlorophyllA(MolecularModel, EnergyUnitsManaged):
def __init__(self, model_type=None, dp_length=4.582):
super().__init__(model_type=model_type)
self.pdbname = "CLA"
self.default_energies[1] = 15200.0*cm2int
#These values are taken from Muh, Lindorfer, et al. Physical Chemistry Chemical Physics, 2014. Chla: 4.58, Chlb 3.83:
self.default_dipole_lengths[0,1] = dp_length
self.default_dipole_lengths[1,0] = dp_length
def set_default_energies(self, elenergies):
k = 0
for en in elenergies:
self.default_energies[k] = self.convert_2_internal_u(en)
k += 1
def set_default_dipole_length(self,transition, val):
self.default_dipole_lengths[transition[0],transition[1]] = val
self.default_dipole_lengths[transition[1],transition[0]] = val
def transition_dipole(self, transition=(0,1), data_type=None, data=None):
""" Returns transition dipole moment vector
"""
data_type = self._check_data_type(data_type)
if data_type == "PDB":
k1 = 0
k2 = 0
for line in data:
if pdb.line_matches(line, by_atmName="ND"):
xyz1 = pdb.line_xyz(line)
k1 += 1
if pdb.line_matches(line, by_atmName="NB"):
xyz2 = pdb.line_xyz(line)
k2 += 1
# FIXME: what to do with alternate locations???
if (k1 >= 1) and (k2 >= 1):
d = xyz1 - xyz2
d = normalize2(d, norm=self.default_dipole_lengths[0,1])
else:
#print(k1,k2)
raise Exception("No unique direction of"
+" a molecule's dipole found")
else:
raise Exception("Unknown data type")
return d
def position_of_center(self, data_type=None, data=None):
""" Returns the position of the molecular center
"""
data_type = self._check_data_type(data_type)
if data_type == "PDB":
k1 = 0
k2 = 0
k3 = 0
k4 = 0
for line in data:
if pdb.line_matches(line, by_atmName="NA"):
xyz1 = pdb.line_xyz(line)
k1 += 1
if pdb.line_matches(line, by_atmName="NB"):
xyz2 = pdb.line_xyz(line)
k2 += 1
if pdb.line_matches(line, by_atmName="NC"):
xyz3 = pdb.line_xyz(line)
k3 += 1
if pdb.line_matches(line, by_atmName="ND"):
xyz4 = pdb.line_xyz(line)
k4 += 1
if (k1 >= 1) and (k2 >= 1) and (k3 >= 1) and (k4 >= 1):
pos = (xyz1 + xyz2 + xyz3 + xyz4)/4.0
else:
#print(k1, k2, k3, k4)
raise Exception("No unique possition of a molecule found")
else:
raise Exception("Unknown data type")
return pos
def pi_conjugated_system(self, data_type=None, data=None):
"""Returns the atoms and atom types in the pi-conjugated system
Calculates and returns positions of all atoms in the pi-conjugated
system of the molecule and the types of the atoms.
Parameters
----------
data_type : string
Type of the data; can be e.g. PDB
data :
Data corresponding to the data type
"""
data_type = self._check_data_type(data_type)
if data_type == "PDB":
pass
else:
raise Exception("Unknown data type")
def _check_data_type(self, data_type):
"""If non data_type is specified, the default is taken (if known)
"""
if data_type is None:
if self.model_type is None:
raise Exception()
else:
return self.model_type
else:
return data_type
class ChlorophyllB(MolecularModel, EnergyUnitsManaged):
def __init__(self, model_type=None, dp_length=3.834):
super().__init__(model_type=model_type)
self.pdbname = "CHL"
self.default_energies[1] = 15700.0*cm2int
#These values are taken from Muh, Lindorfer, et al. Physical Chemistry Chemical Physics, 2014
self.default_dipole_lengths[0,1] = dp_length
self.default_dipole_lengths[1,0] = dp_length
def set_default_energies(self, elenergies):
k = 0
for en in elenergies:
self.default_energies[k] = self.convert_2_internal_u(en)
k += 1
def set_default_dipole_length(self,transition, val):
self.default_dipole_lengths[transition[0],transition[1]] = val
self.default_dipole_lengths[transition[1],transition[0]] = val
def transition_dipole(self, transition=(0,1), data_type=None, data=None):
""" Returns transition dipole moment vector
"""
data_type = self._check_data_type(data_type)
if data_type == "PDB":
k1 = 0
k2 = 0
for line in data:
if pdb.line_matches(line, by_atmName="ND"):
xyz1 = pdb.line_xyz(line)
k1 += 1
if pdb.line_matches(line, by_atmName="NB"):
xyz2 = pdb.line_xyz(line)
k2 += 1
# FIXME: what to do with alternate locations???
if (k1 >= 1) and (k2 >= 1):
d = xyz1 - xyz2
d = normalize2(d, norm=self.default_dipole_lengths[0,1])
else:
#print(k1,k2)
raise Exception("No unique direction of"
+" a molecule's dipole found")
else:
raise Exception("Unknown data type")
return d
def position_of_center(self, data_type=None, data=None):
""" Returns the position of the molecular center
"""
data_type = self._check_data_type(data_type)
if data_type == "PDB":
k1 = 0
k2 = 0
k3 = 0
k4 = 0
for line in data:
if pdb.line_matches(line, by_atmName="NA"):
xyz1 = pdb.line_xyz(line)
k1 += 1
if pdb.line_matches(line, by_atmName="NB"):
xyz2 = pdb.line_xyz(line)
k2 += 1
if pdb.line_matches(line, by_atmName="NC"):
xyz3 = pdb.line_xyz(line)
k3 += 1
if pdb.line_matches(line, by_atmName="ND"):
xyz4 = pdb.line_xyz(line)
k4 += 1
if (k1 >= 1) and (k2 >= 1) and (k3 >= 1) and (k4 >= 1):
pos = (xyz1 + xyz2 + xyz3 + xyz4)/4.0
else:
#print(k1, k2, k3, k4)
raise Exception("No unique possition of a molecule found")
else:
raise Exception("Unknown data type")
return pos
def pi_conjugated_system(self, data_type=None, data=None):
"""Returns the atoms and atom types in the pi-conjugated system
Calculates and returns positions of all atoms in the pi-conjugated
system of the molecule and the types of the atoms.
Parameters
----------
data_type : string
Type of the data; can be e.g. PDB
data :
Data corresponding to the data type
"""
data_type = self._check_data_type(data_type)
if data_type == "PDB":
pass
else:
raise Exception("Unknown data type")
def _check_data_type(self, data_type):
"""If non data_type is specified, the default is taken (if known)
"""
if data_type is None:
if self.model_type is None:
raise Exception()
else:
return self.model_type
else:
return data_type
| 32.234657 | 126 | 0.501288 | 1,028 | 8,929 | 4.182879 | 0.152724 | 0.093023 | 0.025116 | 0.044651 | 0.924186 | 0.924186 | 0.924186 | 0.924186 | 0.906512 | 0.906512 | 0 | 0.0376 | 0.410236 | 8,929 | 276 | 127 | 32.351449 | 0.778959 | 0.16788 | 0 | 0.918239 | 0 | 0 | 0.045372 | 0 | 0 | 0 | 0 | 0.007246 | 0 | 1 | 0.08805 | false | 0.012579 | 0.031447 | 0 | 0.18239 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
204d38be881acd59f7f7c7a820e4bd19a53aecd3 | 3,676 | py | Python | Drop Inventory (old).py | rmolin/improved-parakeet | d4f1210b9f80bc6f938712aa5dfaf2ac3a1b026c | [
"MIT"
] | null | null | null | Drop Inventory (old).py | rmolin/improved-parakeet | d4f1210b9f80bc6f938712aa5dfaf2ac3a1b026c | [
"MIT"
] | null | null | null | Drop Inventory (old).py | rmolin/improved-parakeet | d4f1210b9f80bc6f938712aa5dfaf2ac3a1b026c | [
"MIT"
] | null | null | null | import pyautogui as py
import time
import random
py.FAILSAFE = True
#indefinite loop MAKE SURE FAILSAFE IS ON /
#end of script is "x += 1"
x = 1
while True:
#drop inventory
#row 1
py.moveTo(1643, 615, duration=.44)
py.PAUSE = .2
py.keyDown('shift')
py.PAUSE
py.click(button='left')
py.keyUp('shift')
py.moveTo(1705, 615, duration=.44)
py.PAUSE = .2
py.keyDown('shift')
py.click(button='left')
py.keyUp('shift')
py.moveTo(1770, 615, duration=.44)
py.PAUSE = .2
py.keyDown('shift')
py.click(button='left')
py.keyUp('shift')
py.moveTo(1830, 615, duration=.44)
py.PAUSE = .2
py.keyDown('shift')
py.click(button='left')
py.keyUp('shift')
#row 2
py.moveTo(1643, 667, duration=.44)
py.PAUSE = .2
py.keyDown('shift')
py.click(button='left')
py.keyUp('shift')
py.moveTo(1705, 667, duration=.44)
py.PAUSE = .2
py.keyDown('shift')
py.click(button='left')
py.keyUp('shift')
py.moveTo(1770, 667, duration=.44)
py.PAUSE = .2
py.keyDown('shift')
py.click(button='left')
py.keyUp('shift')
py.moveTo(1830, 667, duration=.44)
py.PAUSE = .2
py.keyDown('shift')
py.click(button='left')
py.keyUp('shift')
#row 3
py.moveTo(1643, 720, duration=.43)
py.PAUSE = .2
py.keyDown('shift')
py.click(button='left')
py.keyUp('shift')
py.moveTo(1705, 720, duration=.43)
py.PAUSE = .2
py.keyDown('shift')
py.click(button='left')
py.keyUp('shift')
py.moveTo(1770, 720, duration=.43)
py.PAUSE = .2
py.keyDown('shift')
py.click(button='left')
py.keyUp('shift')
py.moveTo(1830, 720, duration=.43)
py.PAUSE = .2
py.keyDown('shift')
py.click(button='left')
py.keyUp('shift')
#row 4
py.moveTo(1643, 777, duration=.41)
py.PAUSE = .2
py.keyDown('shift')
py.click(button='left')
py.keyUp('shift')
py.moveTo(1705, 777, duration=.41)
py.PAUSE = .2
py.keyDown('shift')
py.click(button='left')
py.keyUp('shift')
py.moveTo(1770, 777, duration=.41)
py.PAUSE = .2
py.keyDown('shift')
py.click(button='left')
py.keyUp('shift')
py.moveTo(1830, 777, duration=.41)
py.PAUSE = .2
py.keyDown('shift')
py.click(button='left')
py.keyUp('shift')
#row 5
py.moveTo(1643, 827, duration=.40)
py.PAUSE = .2
py.keyDown('shift')
py.click(button='left')
py.keyUp('shift')
py.moveTo(1705, 827, duration=.40)
py.PAUSE = .2
py.keyDown('shift')
py.click(button='left')
py.keyUp('shift')
py.moveTo(1770, 827, duration=.40)
py.PAUSE = .2
py.keyDown('shift')
py.click(button='left')
py.keyUp('shift')
py.moveTo(1830, 827, duration=.40)
py.PAUSE = .2
py.keyDown('shift')
py.click(button='left')
py.keyUp('shift')
#row 6
py.moveTo(1643, 882, duration=.29)
py.PAUSE = .2
py.keyDown('shift')
py.click(button='left')
py.keyUp('shift')
py.moveTo(1705, 882, duration=.29)
py.PAUSE = .2
py.keyDown('shift')
py.click(button='left')
py.keyUp('shift')
py.moveTo(1770, 882, duration=.29)
py.PAUSE = .2
py.keyDown('shift')
py.click(button='left')
py.keyUp('shift')
py.moveTo(1830, 882, duration=.29)
py.PAUSE = .2
py.keyDown('shift')
py.click(button='left')
py.keyUp('shift')
#row 7
py.moveTo(1643, 940, duration=.25)
py.PAUSE = .2
py.keyDown('shift')
py.click(button='left')
py.keyUp('shift')
py.moveTo(1705, 940, duration=.25)
py.PAUSE = .2
py.keyDown('shift')
py.click(button='left')
py.keyUp('shift')
py.moveTo(1770, 940, duration=.25)
py.PAUSE = .2
py.keyDown('shift')
py.click(button='left')
py.keyUp('shift')
py.moveTo(1830, 940, duration=.21)
py.PAUSE = .2
py.keyDown('shift')
py.click(button='left')
py.keyUp('shift')
x += 1
| 21.623529 | 44 | 0.623776 | 580 | 3,676 | 3.953448 | 0.096552 | 0.149586 | 0.097689 | 0.122111 | 0.894461 | 0.894461 | 0.894461 | 0.894461 | 0.894461 | 0.894461 | 0 | 0.095301 | 0.172198 | 3,676 | 169 | 45 | 21.751479 | 0.658232 | 0.031556 | 0 | 0.756757 | 0 | 0 | 0.115942 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.02027 | 0 | 0.02027 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
64876473c84bfa5e350fa91df7e78f86b5e5e43f | 1,040 | py | Python | tests/test.py | notkar1m/randfacts | 84f45650f440f1118544e91f617f36637a2e71ec | [
"MIT"
] | null | null | null | tests/test.py | notkar1m/randfacts | 84f45650f440f1118544e91f617f36637a2e71ec | [
"MIT"
] | null | null | null | tests/test.py | notkar1m/randfacts | 84f45650f440f1118544e91f617f36637a2e71ec | [
"MIT"
] | null | null | null | import randfacts
print("")
print("")
print(u"\u001b[33m===================================================================\u001b[0m")
print(u"\u001b[36mInitializing test! (1/1)")
try:
print(u"\u001b[33m===================================================================\u001b[0m")
print(randfacts.getFact())
print(u"\u001b[33m===================================================================\u001b[0m")
print("")
except AttributeError:
print(u"\u001b[31m===================================================================\u001b[0m")
print(u"\u001b[31mWhoops! Executing a getFact() call got an AttributeError!\u001b[0m")
print(u"\u001b[31m===================================================================\u001b[0m")
exit(2)
print(u"\u001b[33m===================================================================\u001b[0m")
print(u"\u001b[32mSuccessful Test! (1/1 COMPLETE)\u001b[0m")
print(u"\u001b[35mTerminating!\u001b[0m")
print(u"\u001b[33m===================================================================\u001b[0m")
exit()
| 47.272727 | 97 | 0.390385 | 92 | 1,040 | 4.413043 | 0.282609 | 0.162562 | 0.29803 | 0.192118 | 0.546798 | 0.440887 | 0.285714 | 0.157635 | 0.157635 | 0 | 0 | 0.100301 | 0.041346 | 1,040 | 21 | 98 | 49.52381 | 0.306921 | 0 | 0 | 0.5 | 0 | 0 | 0.7625 | 0.652885 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.05 | 0 | 0.05 | 0.75 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
649be1d96aceaa72304319c1c3fc4666aaa5e7ae | 55,719 | py | Python | tests/test_forms.py | moshthepitt/small-small-hr | cecfe4f072c913f2f3685eef2c45141fdd68d6c3 | [
"MIT"
] | 27 | 2018-05-29T11:02:19.000Z | 2022-03-23T16:43:37.000Z | tests/test_forms.py | moshthepitt/small-small-hr | cecfe4f072c913f2f3685eef2c45141fdd68d6c3 | [
"MIT"
] | 21 | 2018-06-17T21:27:43.000Z | 2022-02-10T07:59:00.000Z | tests/test_forms.py | moshthepitt/small-small-hr | cecfe4f072c913f2f3685eef2c45141fdd68d6c3 | [
"MIT"
] | 13 | 2019-06-25T06:10:24.000Z | 2021-09-12T19:30:29.000Z | """Module to test small_small_hr models."""
# pylint: disable=too-many-lines,hard-coded-auth-user
import os
from datetime import date, datetime, timedelta
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from django.contrib.contenttypes.models import ContentType
from django.core.files.uploadedfile import SimpleUploadedFile
from django.test import RequestFactory, TestCase, override_settings
import pytz
from model_mommy import mommy
from model_mommy.recipe import Recipe
from model_reviews.models import ModelReview
from small_small_hr.forms import (
AnnualLeaveForm,
ApplyLeaveForm,
ApplyOverTimeForm,
FreeDayForm,
LeaveForm,
OverTimeForm,
RoleForm,
StaffDocumentForm,
StaffProfileAdminCreateForm,
StaffProfileAdminForm,
StaffProfileUserForm,
UserStaffDocumentForm,
)
from small_small_hr.models import Leave, OverTime, StaffProfile, get_taken_leave_days
from small_small_hr.serializers import StaffProfileSerializer
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
class TestForms(TestCase): # pylint: disable=too-many-public-methods
"""Test class for forms."""
def setUp(self):
"""Set up test class."""
self.factory = RequestFactory()
StaffProfile.objects.rebuild()
self.manager = mommy.make(
"auth.User", first_name="Jane", last_name="Ndoe", email="jane@example.com"
)
self.user = mommy.make(
"auth.User", first_name="Bob", last_name="Ndoe", email="bob@example.com"
)
manager_mommy = Recipe(StaffProfile, lft=None, rght=None, user=self.manager)
staff_mommy = Recipe(StaffProfile, lft=None, rght=None, user=self.user)
self.manager_profile = manager_mommy.make()
self.staffprofile = staff_mommy.make()
def test_annual_leave_form(self):
"""Test AnnualLeaveForm."""
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
data = {
"staff": staffprofile.id,
"year": 2018,
"leave_type": Leave.REGULAR,
"allowed_days": 21,
"carried_over_days": 10,
}
form = AnnualLeaveForm(data=data)
self.assertTrue(form.is_valid())
annual_leave = form.save()
self.assertEqual(staffprofile, annual_leave.staff)
self.assertEqual(2018, annual_leave.year)
self.assertEqual(21, annual_leave.allowed_days)
self.assertEqual(10, annual_leave.carried_over_days)
self.assertEqual(Leave.REGULAR, annual_leave.leave_type)
data2 = {
"staff": staffprofile.id,
"year": 2017,
"leave_type": Leave.REGULAR,
"allowed_days": 21,
"carried_over_days": 5,
}
form = AnnualLeaveForm(data=data2, instance=annual_leave)
self.assertTrue(form.is_valid())
form.save()
annual_leave.refresh_from_db()
self.assertEqual(staffprofile, annual_leave.staff)
self.assertEqual(2017, annual_leave.year)
self.assertEqual(21, annual_leave.allowed_days)
self.assertEqual(5, annual_leave.carried_over_days)
self.assertEqual(Leave.REGULAR, annual_leave.leave_type)
def test_role_form(self):
"""Test RoleForm."""
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
data = {"name": "Accountant", "description": "Keep accounts"}
form = RoleForm(data=data)
self.assertTrue(form.is_valid())
role = form.save()
self.assertEqual("Accountant", role.name)
self.assertEqual("Keep accounts", role.description)
def test_freeday_form(self):
"""Test FreeDayForm."""
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
data = {"name": "Mosh Day", "date": "1/1/2017"}
form = FreeDayForm(data=data)
self.assertTrue(form.is_valid())
free_day = form.save()
self.assertEqual("Mosh Day", free_day.name)
self.assertEqual(date(2017, 1, 1), free_day.date)
# has to be unique
form2 = FreeDayForm(data=data)
self.assertFalse(form2.is_valid())
self.assertEqual(1, len(form2.errors.keys()))
self.assertEqual(
"Free Day with this Date already exists.", form2.errors["date"][0]
)
def test_overtime_form_apply(self):
"""Test OverTimeForm."""
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
# 6 hours of overtime
start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
end = datetime(2017, 6, 5, 6, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
data = {
"staff": staffprofile.id,
"date": start.date(),
"start": start.time(),
"end": end.time(),
"review_reason": "Extra work",
}
form = ApplyOverTimeForm(data=data)
self.assertTrue(form.is_valid())
overtime = form.save()
self.assertEqual(staffprofile, overtime.staff)
self.assertEqual(start.date(), overtime.date)
self.assertEqual(start.time(), overtime.start)
self.assertEqual(end.time(), overtime.end)
self.assertEqual(
timedelta(seconds=3600 * 6).seconds, overtime.get_duration().seconds
)
self.assertEqual("Extra work", overtime.review_reason)
self.assertEqual(OverTime.PENDING, overtime.review_status)
def test_overtime_form_apply_no_overlap(self):
"""Test no overlaps on OverTime."""
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
# 6 hours of overtime
start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
end = datetime(2017, 6, 5, 6, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
mommy.make(
"small_small_hr.OverTime",
start=start.time(),
end=end.time(),
review_status=OverTime.APPROVED,
date=start.date,
staff=staffprofile,
)
data = {
"staff": staffprofile.id,
"date": start.date(),
"start": start.time(),
"end": end.time(),
"review_reason": "Extra work",
}
form = ApplyOverTimeForm(data=data)
self.assertFalse(form.is_valid())
self.assertEqual(3, len(form.errors.keys()))
self.assertEqual(
"you cannot have overlapping overtime hours on the same day",
form.errors["start"][0],
)
self.assertEqual(
"you cannot have overlapping overtime hours on the same day",
form.errors["date"][0],
)
self.assertEqual(
"you cannot have overlapping overtime hours on the same day",
form.errors["end"][0],
)
def test_overtime_form_process(self):
"""Test OverTimeForm."""
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
# 6 hours of overtime
start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
end = datetime(2017, 6, 5, 6, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
data = {
"staff": staffprofile.id,
"date": start.date(),
"start": start.time(),
"end": end.time(),
"review_reason": "Extra work",
"review_status": OverTime.APPROVED,
}
form = OverTimeForm(data=data)
self.assertTrue(form.is_valid())
overtime = form.save()
self.assertEqual(staffprofile, overtime.staff)
self.assertEqual(start.date(), overtime.date)
self.assertEqual(start.time(), overtime.start)
self.assertEqual(end.time(), overtime.end)
self.assertEqual(
timedelta(seconds=3600 * 6).seconds, overtime.get_duration().seconds
)
self.assertEqual("Extra work", overtime.review_reason)
self.assertEqual(OverTime.APPROVED, overtime.review_status)
def test_overtime_form_process_with_overlap(self):
"""Test OverTimeForm with overlap for existing objects."""
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
# 6 hours of overtime
start = datetime(2017, 6, 5, 18, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
end = datetime(2017, 6, 5, 19, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
# make sure object already exists
mommy.make(
"small_small_hr.OverTime",
start=start.time(),
end=end.time(),
review_status=OverTime.APPROVED,
date=start.date,
staff=staffprofile,
)
data = {
"staff": staffprofile.id,
"date": start.date(),
"start": start.time(),
"end": end.time(),
"review_reason": "Extra work",
"review_status": OverTime.REJECTED,
}
form = OverTimeForm(data=data)
self.assertTrue(form.is_valid())
overtime = form.save()
self.assertEqual(staffprofile, overtime.staff)
self.assertEqual(start.date(), overtime.date)
self.assertEqual(start.time(), overtime.start)
self.assertEqual(end.time(), overtime.end)
self.assertEqual(
timedelta(seconds=3600).seconds, overtime.get_duration().seconds
)
self.assertEqual("Extra work", overtime.review_reason)
self.assertEqual(OverTime.REJECTED, overtime.review_status)
def test_overtime_form_start_end(self):
"""Test OverTimeForm start end fields."""
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
start = datetime(2017, 6, 5, 6, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
end = datetime(2017, 6, 5, 5, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
data = {
"staff": staffprofile.id,
"date": start.date(),
"start": start.time(),
"end": end.time(),
"review_reason": "Extra work",
}
form = OverTimeForm(data=data)
self.assertFalse(form.is_valid())
self.assertEqual(1, len(form.errors.keys()))
self.assertEqual("end must be greater than start", form.errors["end"][0])
@override_settings(SSHR_DEFAULT_TIME=7)
def test_leaveform_apply(self):
"""Test LeaveForm apply for leave."""
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
staffprofile.leave_days = 21
staffprofile.sick_days = 10
staffprofile.save()
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
# 6 days of leave
start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
mommy.make(
"small_small_hr.AnnualLeave",
staff=staffprofile,
year=2017,
leave_type=Leave.REGULAR,
carried_over_days=12,
)
data = {
"staff": staffprofile.id,
"leave_type": Leave.REGULAR,
"start": start,
"end": end,
"review_reason": "Need a break",
}
form = ApplyLeaveForm(data=data)
self.assertTrue(form.is_valid())
leave = form.save()
self.assertEqual(staffprofile, leave.staff)
self.assertEqual(Leave.REGULAR, leave.leave_type)
self.assertEqual(start, leave.start)
self.assertEqual(end, leave.end)
self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)
self.assertEqual("Need a break", leave.review_reason)
self.assertEqual(Leave.PENDING, leave.review_status)
@override_settings(
SSHR_DEFAULT_TIME=7,
SSHR_ALLOW_OVERSUBSCRIBE=True,
SSHR_DAY_LEAVE_VALUES={
1: 1, # Monday
2: 1, # Tuesday
3: 1, # Wednesday
4: 1, # Thursday
5: 1, # Friday
6: 1, # Saturday
7: 1, # Sunday
},
)
def test_leave_oversubscribe(self):
"""Test leave oversubscribe works as expected."""
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
staffprofile.leave_days = 21
staffprofile.sick_days = 10
staffprofile.save()
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
# 40 days of leave
start = datetime(2017, 6, 1, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
end = datetime(2017, 7, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
mommy.make(
"small_small_hr.AnnualLeave",
staff=staffprofile,
year=2017,
leave_type=Leave.REGULAR,
carried_over_days=0,
)
data = {
"staff": staffprofile.id,
"leave_type": Leave.REGULAR,
"start": start,
"end": end,
"review_reason": "Mini retirement",
}
form = ApplyLeaveForm(data=data)
self.assertTrue(form.is_valid())
leave = form.save()
# make it approved
obj_type = ContentType.objects.get_for_model(leave)
review = ModelReview.objects.get(content_type=obj_type, object_id=leave.id)
review.review_status = ModelReview.APPROVED
review.save()
leave.refresh_from_db()
self.assertEqual(staffprofile, leave.staff)
self.assertEqual(Leave.REGULAR, leave.leave_type)
self.assertEqual(start, leave.start)
self.assertEqual(end, leave.end)
self.assertEqual(timedelta(days=39).days, (leave.end - leave.start).days)
self.assertEqual("Mini retirement", leave.review_reason)
self.assertEqual(Leave.APPROVED, leave.review_status)
self.assertEqual(
40,
get_taken_leave_days(
staffprofile, Leave.APPROVED, Leave.REGULAR, 2017, 2017
),
)
self.assertEqual(-19, staffprofile.get_available_leave_days(year=2017))
@override_settings(
SSHR_DEFAULT_TIME=7,
SSHR_ALLOW_OVERSUBSCRIBE=False,
SSHR_DAY_LEAVE_VALUES={
1: 1, # Monday
2: 1, # Tuesday
3: 1, # Wednesday
4: 1, # Thursday
5: 1, # Friday
6: 1, # Saturday
7: 1, # Sunday
},
)
def test_leave_oversubscribe_off(self):
"""Test leave oversubscribe when SSHR_ALLOW_OVERSUBSCRIBE is False."""
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
staffprofile.leave_days = 21
staffprofile.sick_days = 10
staffprofile.save()
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
# 40 days of leave
start = datetime(2017, 6, 1, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
end = datetime(2017, 7, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
mommy.make(
"small_small_hr.AnnualLeave",
staff=staffprofile,
year=2017,
leave_type=Leave.REGULAR,
carried_over_days=0,
)
data = {
"staff": staffprofile.id,
"leave_type": Leave.REGULAR,
"start": start,
"end": end,
"review_reason": "Mini retirement",
}
form = ApplyLeaveForm(data=data)
self.assertFalse(form.is_valid())
self.assertEqual(2, len(form.errors.keys()))
self.assertEqual(
"Not enough leave days. Available leave days are 21.00",
form.errors["start"][0],
)
self.assertEqual(
"Not enough leave days. Available leave days are 21.00",
form.errors["end"][0],
)
@override_settings(SSHR_DEFAULT_TIME=7)
def test_one_day_leave(self):
"""Test application for one day leave."""
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
staffprofile.leave_days = 21
staffprofile.sick_days = 10
staffprofile.save()
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
# 1 day of leave
start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
end = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
mommy.make(
"small_small_hr.AnnualLeave",
staff=staffprofile,
year=2017,
leave_type=Leave.REGULAR,
carried_over_days=12,
)
data = {
"staff": staffprofile.id,
"leave_type": Leave.REGULAR,
"start": start,
"end": end,
"review_reason": "Need a break",
}
form = ApplyLeaveForm(data=data)
self.assertTrue(form.is_valid())
leave = form.save()
self.assertEqual(staffprofile, leave.staff)
self.assertEqual(Leave.REGULAR, leave.leave_type)
self.assertEqual(start, leave.start)
self.assertEqual(end, leave.end)
self.assertEqual(timedelta(days=0).days, (leave.end - leave.start).days)
self.assertEqual("Need a break", leave.review_reason)
self.assertEqual(Leave.PENDING, leave.review_status)
self.assertEqual(
1,
get_taken_leave_days(
staffprofile, Leave.PENDING, Leave.REGULAR, 2017, 2017
),
)
@override_settings(SSHR_DEFAULT_TIME=7)
def test_leaveform_no_overlap(self):
"""Test LeaveForm no overlap."""
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
staffprofile.leave_days = 21
staffprofile.sick_days = 10
staffprofile.save()
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
# 6 days of leave
start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
mommy.make(
"small_small_hr.AnnualLeave",
staff=staffprofile,
year=2017,
leave_type=Leave.REGULAR,
carried_over_days=12,
)
mommy.make(
"small_small_hr.Leave",
leave_type=Leave.REGULAR,
start=start,
end=end,
review_status=Leave.APPROVED,
staff=staffprofile,
)
data = {
"staff": staffprofile.id,
"leave_type": Leave.REGULAR,
"start": start,
"end": end,
"review_reason": "Need a break",
}
form = ApplyLeaveForm(data=data)
self.assertFalse(form.is_valid())
self.assertEqual(2, len(form.errors.keys()))
self.assertEqual(
"you cannot have overlapping leave days", form.errors["start"][0]
)
self.assertEqual(
"you cannot have overlapping leave days", form.errors["end"][0]
)
@override_settings(SSHR_DEFAULT_TIME=7)
def test_leaveform_admin(self):
"""Test LeaveForm apply for leave."""
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
staffprofile.leave_days = 21
staffprofile.sick_days = 10
staffprofile.save()
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
# 6 days of leave
start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
mommy.make(
"small_small_hr.AnnualLeave",
staff=staffprofile,
year=2017,
leave_type=Leave.REGULAR,
carried_over_days=12,
)
data = {
"staff": staffprofile.id,
"leave_type": Leave.REGULAR,
"start": start,
"end": end,
"review_reason": "Need a break",
"review_status": Leave.APPROVED,
}
form = LeaveForm(data=data)
self.assertTrue(form.is_valid())
leave = form.save()
self.assertEqual(staffprofile, leave.staff)
self.assertEqual(Leave.REGULAR, leave.leave_type)
self.assertEqual(start, leave.start)
self.assertEqual(end, leave.end)
self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)
self.assertEqual("Need a break", leave.review_reason)
self.assertEqual(Leave.APPROVED, leave.review_status)
@override_settings(SSHR_DEFAULT_TIME=7)
def test_leaveform_process(self):
"""Test LeaveForm process."""
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
staffprofile.leave_days = 21
staffprofile.sick_days = 10
staffprofile.save()
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
# 6 days of leave
start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
mommy.make(
"small_small_hr.AnnualLeave",
staff=staffprofile,
year=2017,
leave_type=Leave.REGULAR,
carried_over_days=4,
)
data = {
"staff": staffprofile.id,
"leave_type": Leave.REGULAR,
"start": start,
"end": end,
"review_reason": "Need a break",
"review_status": Leave.REJECTED,
}
form = LeaveForm(data=data)
self.assertTrue(form.is_valid())
leave = form.save()
self.assertEqual(staffprofile, leave.staff)
self.assertEqual(Leave.REGULAR, leave.leave_type)
self.assertEqual(start, leave.start)
self.assertEqual(end, leave.end)
self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)
self.assertEqual("Need a break", leave.review_reason)
self.assertEqual(Leave.REJECTED, leave.review_status)
@override_settings(SSHR_DEFAULT_TIME=7)
def test_leaveform_process_with_overlap(self):
"""Test LeaveForm process works even if leave object exists."""
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
staffprofile.leave_days = 21
staffprofile.sick_days = 10
staffprofile.save()
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
# 6 days of leave
start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
# make sure leave obj already exists for said dates
mommy.make(
"small_small_hr.Leave",
staff=staffprofile,
start=start,
end=end,
leave_type=Leave.REGULAR,
review_status=Leave.APPROVED,
)
mommy.make(
"small_small_hr.AnnualLeave",
staff=staffprofile,
year=2017,
leave_type=Leave.REGULAR,
carried_over_days=4,
)
data = {
"staff": staffprofile.id,
"leave_type": Leave.REGULAR,
"start": start,
"end": end,
"review_reason": "Need a break",
"review_status": Leave.REJECTED,
}
form = LeaveForm(data=data)
self.assertTrue(form.is_valid())
leave = form.save()
self.assertEqual(staffprofile, leave.staff)
self.assertEqual(Leave.REGULAR, leave.leave_type)
self.assertEqual(start, leave.start)
self.assertEqual(end, leave.end)
self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)
self.assertEqual("Need a break", leave.review_reason)
self.assertEqual(Leave.REJECTED, leave.review_status)
@override_settings(SSHR_DEFAULT_TIME=7)
def test_sickleave_apply(self):
"""Test LeaveForm apply for sick leave."""
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
staffprofile.leave_days = 21
staffprofile.sick_days = 10
staffprofile.save()
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
# 6 days of leave
start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
mommy.make(
"small_small_hr.AnnualLeave",
staff=staffprofile,
year=2017,
leave_type=Leave.SICK,
carried_over_days=4,
)
data = {
"staff": staffprofile.id,
"leave_type": Leave.SICK,
"start": start,
"end": end,
"review_reason": "Need a break",
}
form = ApplyLeaveForm(data=data)
self.assertTrue(form.is_valid())
leave = form.save()
self.assertEqual(staffprofile, leave.staff)
self.assertEqual(Leave.SICK, leave.leave_type)
self.assertEqual(start, leave.start)
self.assertEqual(end, leave.end)
self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)
self.assertEqual("Need a break", leave.review_reason)
self.assertEqual(Leave.PENDING, leave.review_status)
@override_settings(SSHR_DEFAULT_TIME=7)
def test_sickleave_process(self):
"""Test LeaveForm process sick leave."""
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
staffprofile.leave_days = 21
staffprofile.sick_days = 10
staffprofile.save()
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
# 6 days of leave
start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
mommy.make(
"small_small_hr.AnnualLeave",
staff=staffprofile,
year=2017,
leave_type=Leave.SICK,
carried_over_days=4,
)
data = {
"staff": staffprofile.id,
"leave_type": Leave.SICK,
"start": start,
"end": end,
"review_reason": "Need a break",
"review_status": Leave.REJECTED,
}
form = LeaveForm(data=data)
self.assertTrue(form.is_valid())
leave = form.save()
self.assertEqual(staffprofile, leave.staff)
self.assertEqual(Leave.SICK, leave.leave_type)
self.assertEqual(start, leave.start)
self.assertEqual(end, leave.end)
self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)
self.assertEqual("Need a break", leave.review_reason)
self.assertEqual(Leave.REJECTED, leave.review_status)
def test_leaveform_start_end(self):
"""Test start and end."""
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
staffprofile.leave_days = 21
staffprofile.sick_days = 10
staffprofile.save()
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
# 6 days of leave
start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
end = datetime(2017, 6, 1, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
mommy.make(
"small_small_hr.AnnualLeave",
staff=staffprofile,
year=2017,
leave_type=Leave.SICK,
carried_over_days=4,
)
data = {
"staff": staffprofile.id,
"leave_type": Leave.SICK,
"start": start,
"end": end,
"review_reason": "Need a break",
}
form = LeaveForm(data=data)
self.assertFalse(form.is_valid())
self.assertEqual(1, len(form.errors.keys()))
self.assertEqual("end must be greater than start", form.errors["end"][0])
# end year and start year must be the same
end = datetime(2018, 6, 1, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
data2 = {
"staff": staffprofile.id,
"leave_type": Leave.SICK,
"start": start,
"end": end,
"review_reason": "Need a break",
}
form2 = LeaveForm(data=data2)
self.assertFalse(form2.is_valid())
self.assertEqual(2, len(form2.errors.keys()))
self.assertEqual(
"start and end must be from the same year", form2.errors["start"][0]
)
self.assertEqual(
"start and end must be from the same year", form2.errors["end"][0]
)
@override_settings(SSHR_ALLOW_OVERSUBSCRIBE=False)
def test_leaveform_max_days(self):
"""Test leave days sufficient."""
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
staffprofile.leave_days = 21
staffprofile.sick_days = 10
staffprofile.save()
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
# 6 days of leave
start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
end = datetime(2017, 7, 10, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
mommy.make(
"small_small_hr.AnnualLeave",
staff=staffprofile,
year=2017,
leave_type=Leave.REGULAR,
allowed_days=21,
)
data = {
"staff": staffprofile.id,
"leave_type": Leave.REGULAR,
"start": start,
"end": end,
"review_reason": "Need a break",
}
form = LeaveForm(data=data)
self.assertFalse(form.is_valid())
self.assertEqual(2, len(form.errors.keys()))
self.assertEqual(
"Not enough leave days. Available leave days are 21.00",
form.errors["start"][0],
)
self.assertEqual(
"Not enough leave days. Available leave days are 21.00",
form.errors["end"][0],
)
@override_settings(SSHR_ALLOW_OVERSUBSCRIBE=False)
def test_leaveform_max_sick_days(self):
"""Test sick days sufficient."""
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
staffprofile.leave_days = 21
staffprofile.sick_days = 10
staffprofile.save()
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
# 6 days of leave
start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
end = datetime(2017, 6, 20, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
mommy.make(
"small_small_hr.AnnualLeave",
staff=staffprofile,
year=2017,
leave_type=Leave.SICK,
carried_over_days=0,
allowed_days=10,
)
data = {
"staff": staffprofile.id,
"leave_type": Leave.SICK,
"start": start,
"end": end,
"review_reason": "Need a break",
}
form = LeaveForm(data=data)
self.assertFalse(form.is_valid())
self.assertEqual(2, len(form.errors.keys()))
self.assertEqual(
"Not enough sick days. Available sick days are 10.00",
form.errors["start"][0],
)
self.assertEqual(
"Not enough sick days. Available sick days are 10.00", form.errors["end"][0]
)
@override_settings(PRIVATE_STORAGE_ROOT="/tmp/")
def test_staffdocumentform(self):
"""Test StaffDocumentForm."""
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
path = os.path.join(BASE_DIR, "tests", "fixtures", "contract.pdf")
with open(path, "r+b") as contract_file:
data = {
"staff": staffprofile.id,
"name": "Employment Contract",
"description": "This is the employment contract!",
"file": contract_file,
"public": True,
}
file_dict = {
"file": SimpleUploadedFile(
name=contract_file.name,
content=contract_file.read(),
content_type="application/pdf",
)
}
form = StaffDocumentForm(data, file_dict)
self.assertTrue(form.is_valid())
doc = form.save()
self.assertEqual(staffprofile, doc.staff)
self.assertEqual("Employment Contract", doc.name)
self.assertEqual(True, doc.public)
self.assertEqual("This is the employment contract!", doc.description)
with open(path, "r+b") as contract_file:
self.assertTrue(contract_file.read(), doc.file.read())
# on updating it, check that file is not required
data2 = {
"staff": staffprofile.id,
"name": "Employment Contract",
"description": "This is the employment contract!",
}
form2 = StaffDocumentForm(data=data2, instance=doc, request=request)
self.assertTrue(form2.is_valid())
@override_settings(PRIVATE_STORAGE_ROOT="/tmp/")
def test_userstaffdocumentform(self):
"""Test UserStaffDocumentForm."""
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
request = self.factory.get("/")
request.session = {}
request.user = user
path = os.path.join(BASE_DIR, "tests", "fixtures", "contract.pdf")
with open(path, "r+b") as contract_file:
data = {
"staff": staffprofile.id,
"name": "Employment Contract",
"description": "This is the employment contract!",
"file": contract_file,
}
file_dict = {
"file": SimpleUploadedFile(
name=contract_file.name,
content=contract_file.read(),
content_type="application/pdf",
)
}
form = UserStaffDocumentForm(data=data, files=file_dict, request=request)
self.assertTrue(form.is_valid())
doc = form.save()
self.assertEqual(staffprofile, doc.staff)
self.assertEqual("Employment Contract", doc.name)
self.assertEqual(False, doc.public)
self.assertEqual("This is the employment contract!", doc.description)
with open(path, "r+b") as contract_file:
self.assertTrue(contract_file.read(), doc.file.read())
# on updating it, check that file is not required
data2 = {
"staff": staffprofile.id,
"name": "Employment Contract",
"description": "This is the employment contract!",
}
form2 = StaffDocumentForm(data=data2, instance=doc, request=request)
self.assertTrue(form2.is_valid())
def test_staff_profile_user_form(self):
"""Test StaffProfileUserForm."""
user = mommy.make("auth.User")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
path = os.path.join(BASE_DIR, "tests", "fixtures", "profile.png")
with open(path, "r+b") as image_file:
data = {
"first_name": "Bob",
"last_name": "Mbugua",
"id_number": "123456789",
"sex": StaffProfile.MALE,
"nhif": "111111",
"nssf": "222222",
"pin_number": "A0000000Y",
"emergency_contact_name": "Bob Father",
"emergency_contact_relationship": "Father",
"emergency_contact_number": "+254722111111",
"phone": "+254722111111",
"address": "This is the address.",
"birthday": "1996-01-27",
"image": image_file,
}
file_dict = {
"image": SimpleUploadedFile(
name=image_file.name,
content=image_file.read(),
content_type="image/png",
)
}
form = StaffProfileUserForm(
data=data, instance=staffprofile, request=request, files=file_dict
)
self.assertTrue(form.is_valid())
form.save()
user.refresh_from_db()
self.assertEqual("Bob Mbugua", user.staffprofile.get_name())
self.assertEqual(StaffProfile.MALE, staffprofile.sex)
self.assertEqual("+254722111111", staffprofile.phone.as_e164)
self.assertEqual("This is the address.", staffprofile.address)
self.assertEqual("1996-01-27", str(staffprofile.birthday))
self.assertEqual("123456789", staffprofile.data["id_number"])
self.assertEqual("111111", staffprofile.data["nhif"])
self.assertEqual("222222", staffprofile.data["nssf"])
self.assertEqual("A0000000Y", staffprofile.data["pin_number"])
self.assertEqual("Bob Father", staffprofile.data["emergency_contact_name"])
self.assertEqual(
"Father", staffprofile.data["emergency_contact_relationship"]
)
self.assertEqual(
"+254722111111", staffprofile.data["emergency_contact_number"]
)
with open(path, "r+b") as image_file:
self.assertTrue(image_file.read(), staffprofile.image.read())
def test_staffprofile_user_form_no_image(self):
"""Test StaffProfileUserForm image not required on update."""
user = mommy.make("auth.User")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
path = os.path.join(BASE_DIR, "tests", "fixtures", "profile.png")
with open(path, "r+b") as image_file:
data = {
"first_name": "Bob",
"last_name": "Mbugua",
"id_number": "123456789",
"sex": StaffProfile.MALE,
"nhif": "111111",
"nssf": "222222",
"pin_number": "A0000000Y",
"emergency_contact_name": "Bob Father",
"emergency_contact_relationship": "Father",
"emergency_contact_number": "+254722111111",
"phone": "+254722111111",
"address": "This is the address.",
"birthday": "1996-01-27",
"image": image_file,
}
file_dict = {
"image": SimpleUploadedFile(
name=image_file.name,
content=image_file.read(),
content_type="image/png",
)
}
form = StaffProfileUserForm(
data=data, instance=staffprofile, request=request, files=file_dict
)
self.assertTrue(form.is_valid())
form.save()
staffprofile.refresh_from_db()
data2 = {
"first_name": "Bobbie",
"last_name": "B",
"id_number": 6666,
}
form2 = StaffProfileUserForm(data=data2, instance=staffprofile, request=request)
self.assertTrue(form2.is_valid())
form2.save()
staffprofile.refresh_from_db()
self.assertEqual("Bobbie B", user.staffprofile.get_name())
def test_staff_profile_admin_create_form(self):
"""Test StaffProfileAdminCreateForm."""
user = mommy.make("auth.User")
manager = mommy.make("auth.User", username="manager")
managerprofile = mommy.make("small_small_hr.StaffProfile", user=manager)
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
path = os.path.join(BASE_DIR, "tests", "fixtures", "profile.png")
with open(path, "r+b") as image_file:
data = {
"user": user.id,
"first_name": "Bob",
"last_name": "Mbugua",
"id_number": "123456789",
"sex": StaffProfile.MALE,
"nhif": "111111",
"nssf": "222222",
"pin_number": "A0000000Y",
"emergency_contact_name": "Bob Father",
"emergency_contact_number": "+254722111111",
"phone": "+254722111111",
"address": "This is the address.",
"birthday": "1996-01-27",
"leave_days": 21,
"sick_days": 9,
"overtime_allowed": True,
"start_date": "2017-09-25",
"end_date": "2018-12-31",
"image": image_file,
"supervisor": managerprofile.pk,
}
file_dict = {
"image": SimpleUploadedFile(
name=image_file.name,
content=image_file.read(),
content_type="image/png",
)
}
form = StaffProfileAdminCreateForm(
data=data, files=file_dict, request=request
)
self.assertTrue(form.is_valid())
staffprofile = form.save()
user.refresh_from_db()
self.assertEqual("Bob Mbugua", user.staffprofile.get_name())
self.assertEqual(StaffProfile.MALE, staffprofile.sex)
self.assertEqual("+254722111111", staffprofile.phone.as_e164)
self.assertEqual(21, staffprofile.leave_days)
self.assertEqual(9, staffprofile.sick_days)
self.assertEqual(True, staffprofile.overtime_allowed)
self.assertEqual(managerprofile, staffprofile.supervisor)
self.assertEqual("This is the address.", staffprofile.address)
self.assertEqual("1996-01-27", str(staffprofile.birthday))
self.assertEqual("2017-09-25", str(staffprofile.start_date))
self.assertEqual("2018-12-31", str(staffprofile.end_date))
self.assertEqual("123456789", staffprofile.data["id_number"])
self.assertEqual("111111", staffprofile.data["nhif"])
self.assertEqual("222222", staffprofile.data["nssf"])
self.assertEqual("A0000000Y", staffprofile.data["pin_number"])
self.assertEqual("Bob Father", staffprofile.data["emergency_contact_name"])
self.assertEqual(
"+254722111111", staffprofile.data["emergency_contact_number"]
)
with open(path, "r+b") as image_file:
self.assertTrue(image_file.read(), staffprofile.image.read())
def test_staff_profile_admin_form(self):
"""Test StaffProfileAdminForm."""
managerprofile = self.manager_profile
user = self.user
staffprofile = self.staffprofile
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
path = os.path.join(BASE_DIR, "tests", "fixtures", "profile.png")
with open(path, "r+b") as image_file:
data = {
"user": user.id,
"first_name": "Bob",
"last_name": "Mbugua",
"id_number": "123456789",
"sex": StaffProfile.MALE,
"nhif": "111111",
"nssf": "222222",
"pin_number": "A0000000Y",
"emergency_contact_name": "Bob Father",
"emergency_contact_number": "+254722111111",
"phone": "+254722111111",
"address": "This is the address.",
"birthday": "1996-01-27",
"leave_days": 21,
"sick_days": 9,
"overtime_allowed": True,
"start_date": "2017-09-25",
"end_date": "2018-12-31",
"image": image_file,
"supervisor": managerprofile.pk,
}
file_dict = {
"image": SimpleUploadedFile(
name=image_file.name,
content=image_file.read(),
content_type="image/png",
)
}
form = StaffProfileAdminForm(
data=data, instance=staffprofile, request=request, files=file_dict
)
self.assertTrue(form.is_valid())
form.save()
user.refresh_from_db()
self.assertEqual("Bob Mbugua", user.staffprofile.get_name())
self.assertEqual(StaffProfile.MALE, staffprofile.sex)
self.assertEqual("+254722111111", staffprofile.phone.as_e164)
self.assertEqual(21, staffprofile.leave_days)
self.assertEqual(9, staffprofile.sick_days)
self.assertEqual(True, staffprofile.overtime_allowed)
self.assertEqual(managerprofile, staffprofile.supervisor)
self.assertEqual("This is the address.", staffprofile.address)
self.assertEqual("1996-01-27", str(staffprofile.birthday))
self.assertEqual("2017-09-25", str(staffprofile.start_date))
self.assertEqual("2018-12-31", str(staffprofile.end_date))
self.assertEqual("123456789", staffprofile.data["id_number"])
self.assertEqual("111111", staffprofile.data["nhif"])
self.assertEqual("222222", staffprofile.data["nssf"])
self.assertEqual("A0000000Y", staffprofile.data["pin_number"])
self.assertEqual("Bob Father", staffprofile.data["emergency_contact_name"])
self.assertEqual(
"+254722111111", staffprofile.data["emergency_contact_number"]
)
with open(path, "r+b") as image_file:
self.assertTrue(image_file.read(), staffprofile.image.read())
def test_staffprofile_admin_form_no_image(self):
"""Test StaffProfileAdminForm image not required when editting."""
user = mommy.make("auth.User")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
path = os.path.join(BASE_DIR, "tests", "fixtures", "profile.png")
with open(path, "r+b") as image_file:
data = {
"user": user.id,
"first_name": "Bob",
"last_name": "Mbugua",
"id_number": "123456789",
"sex": StaffProfile.MALE,
"nhif": "111111",
"nssf": "222222",
"pin_number": "A0000000Y",
"emergency_contact_name": "Bob Father",
"emergency_contact_number": "+254722111111",
"phone": "+254722111111",
"address": "This is the address.",
"birthday": "1996-01-27",
"leave_days": 21,
"sick_days": 9,
"overtime_allowed": True,
"start_date": "2017-09-25",
"end_date": "2018-12-31",
"image": image_file,
}
file_dict = {
"image": SimpleUploadedFile(
name=image_file.name,
content=image_file.read(),
content_type="image/png",
)
}
form = StaffProfileAdminForm(
data=data, instance=staffprofile, request=request, files=file_dict
)
self.assertTrue(form.is_valid())
form.save()
staffprofile.refresh_from_db()
data2 = {
"user": user.id,
"first_name": "Bobbie",
"last_name": "B",
"id_number": 6666,
}
form2 = StaffProfileAdminForm(
data=data2, instance=staffprofile, request=request
)
self.assertTrue(form2.is_valid())
form2.save()
staffprofile.refresh_from_db()
self.assertEqual("Bobbie B", user.staffprofile.get_name())
def test_staffprofile_unique_pin_number(self):
"""Test unique pin_number."""
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
staffprofile.data["id_number"] = "123456789"
staffprofile.data["pin_number"] = "123456789"
staffprofile.save()
user2 = mommy.make("auth.User", first_name="Kyle", last_name="Ndoe")
staffprofile2 = mommy.make("small_small_hr.StaffProfile", user=user2)
staffprofile2.data["id_number"] = "9999999"
staffprofile2.save()
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
data = StaffProfileSerializer(staffprofile2).data
data["pin_number"] = "123456789"
form = StaffProfileAdminForm(data=data, instance=staffprofile2, request=request)
self.assertFalse(form.is_valid())
self.assertEqual(1, len(form.errors.keys()))
self.assertEqual(
"This PIN number is already in use.", form.errors["pin_number"][0]
)
def test_staffprofile_unique_id_number(self):
"""Test unique id_number."""
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
staffprofile.data["id_number"] = "123456789"
staffprofile.save()
user2 = mommy.make("auth.User", first_name="Kyle", last_name="Ndoe")
staffprofile2 = mommy.make("small_small_hr.StaffProfile", user=user2)
staffprofile2.save()
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
data = StaffProfileSerializer(staffprofile2).data
data["id_number"] = "123456789"
form = StaffProfileAdminForm(data=data, instance=staffprofile2, request=request)
self.assertFalse(form.is_valid())
self.assertEqual(1, len(form.errors.keys()))
self.assertEqual(
"This id number is already in use.", form.errors["id_number"][0]
)
def test_staffprofile_unique_nssf(self):
"""Test unique NSSF."""
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
staffprofile.data["id_number"] = "123456789"
staffprofile.data["nssf"] = "123456789"
staffprofile.save()
user2 = mommy.make("auth.User", first_name="Kyle", last_name="Ndoe")
staffprofile2 = mommy.make("small_small_hr.StaffProfile", user=user2)
staffprofile2.data["id_number"] = "9999999"
staffprofile2.save()
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
data = StaffProfileSerializer(staffprofile2).data
data["nssf"] = "123456789"
form = StaffProfileAdminForm(data=data, instance=staffprofile2, request=request)
self.assertFalse(form.is_valid())
self.assertEqual(1, len(form.errors.keys()))
self.assertEqual("This NSSF number is already in use.", form.errors["nssf"][0])
def test_staffprofile_unique_nhif(self):
"""Test unique NHIF."""
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
staffprofile.data["id_number"] = "123456789"
staffprofile.data["nhif"] = "123456789"
staffprofile.save()
user2 = mommy.make("auth.User", first_name="Kyle", last_name="Ndoe")
staffprofile2 = mommy.make("small_small_hr.StaffProfile", user=user2)
staffprofile2.data["id_number"] = "9999999"
staffprofile2.save()
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
data = StaffProfileSerializer(staffprofile2).data
data["nhif"] = "123456789"
form = StaffProfileAdminForm(data=data, instance=staffprofile2, request=request)
self.assertFalse(form.is_valid())
self.assertEqual(1, len(form.errors.keys()))
self.assertEqual("This NHIF number is already in use.", form.errors["nhif"][0])
| 36.633136 | 88 | 0.580915 | 5,888 | 55,719 | 5.359545 | 0.05163 | 0.086985 | 0.020534 | 0.030104 | 0.888709 | 0.87578 | 0.865386 | 0.848084 | 0.840321 | 0.830085 | 0 | 0.036795 | 0.295662 | 55,719 | 1,520 | 89 | 36.657237 | 0.767308 | 0.033113 | 0 | 0.782538 | 0 | 0 | 0.135616 | 0.032297 | 0 | 0 | 0 | 0 | 0.183508 | 1 | 0.026677 | false | 0 | 0.011318 | 0 | 0.038804 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
64d0091141c9a2d04c4e02395c0fd4752f5787a3 | 6,882 | py | Python | players/agents.py | Abdelrhman-Yasser/Risk-Game | db76437a01bbda984ce712629121dbf17dceb848 | [
"MIT"
] | 1 | 2020-12-22T07:16:00.000Z | 2020-12-22T07:16:00.000Z | players/agents.py | Abdelrhman-Yasser/Risk-Game | db76437a01bbda984ce712629121dbf17dceb848 | [
"MIT"
] | null | null | null | players/agents.py | Abdelrhman-Yasser/Risk-Game | db76437a01bbda984ce712629121dbf17dceb848 | [
"MIT"
] | 1 | 2019-02-21T13:21:45.000Z | 2019-02-21T13:21:45.000Z | from players.player import Player
from environment.GameEnums import AgentType,MoveType
from players.state import EnvState
import sys
import copy
class AggressiveAgent(Player):
def __init__(self, player_id):
Player.__init__(self, player_id)
self.__player_type = AgentType.AGGRESSIVE
@property
def player_type(self):
return self.__player_type
def expand(self, state):
state = self.deploy_reserve_troops(state)
state = state.expand_same(MoveType.MARCH, self.player_id)
return self.invade(state)
def deploy_reserve_troops(self, state):
try:
env_c = copy.deepcopy(state.env)
max_troops_country_id = -1
max_troops_count = 0
for country in env_c.country_list:
if country.owner_id == self.player_id and country.troops_count > max_troops_count:
max_troops_count = country.troops_count
max_troops_country_id = country.id
env_c.deploy_reserve_troops(self.player_id, max_troops_country_id)
return EnvState(env_c, state, MoveType.DEPLOY, self.player_id)
except Exception as e:
return state.expand_same(MoveType.DEPLOY, self.player_id)
def march_troops(self, state):
return state.expand_same(MoveType.MARCH, self.player_id)
def __get_most_damage(self, env):
max_enemy = 0
max_enemy_id = -1
own_id = -1
for border in env.border_list:
country1 = env.country_list[border.country1 - 1]
country2 = env.country_list[border.country2 - 1]
if country1.owner_id == self.player_id and country2.owner_id == self.player_id:
continue
if country1.owner_id == self.player_id and \
country1.troops_count - country2.troops_count > 1 and \
country2.troops_count > max_enemy:
max_enemy = country2.troops_count
max_enemy_id = country2.id
own_id = country1.id
if country2.owner_id == self.player_id and \
country2.troops_count - country1.troops_count > 1 and \
country1.troops_count > max_enemy:
max_enemy = country1.troops_count
max_enemy_id = country1.id
own_id = country2.id
return own_id, max_enemy_id
def invade(self, state):
try:
env_c = copy.deepcopy(state.env)
from_country, to_country = self.__get_most_damage(env_c)
env_c.invade(self.player_id, from_country, to_country, 0.9)
return EnvState(env_c, state, MoveType.INVADE, self.player_id)
except Exception as e:
return state.expand_same(MoveType.INVADE, self.player_id)
class PacifistAgent(Player):
def __init__(self, player_id):
Player.__init__(self, player_id)
self.__player_type = AgentType.PACIFIST
@property
def player_type(self):
return self.__player_type
def expand(self, state):
state = self.deploy_reserve_troops(state)
state = self.march_troops(state)
return self.invade(state)
def deploy_reserve_troops(self, state):
try:
env_c = copy.deepcopy(state.env)
min_troops_country_id = -1
min_troops_count = sys.maxsize
for country in env_c.country_list:
if country.owner_id == self.player_id and country.troops_count < min_troops_count:
min_troops_count = country.troops_count
min_troops_country_id = country.id
env_c.deploy_reserve_troops(self.player_id, min_troops_country_id)
return EnvState(env_c, state, MoveType.DEPLOY, self.player_id)
except Exception as e:
return state.expand_same(MoveType.DEPLOY, self.player_id)
def march_troops(self, state):
return state.expand_same(MoveType.MARCH, self.player_id)
def __get_least_damage(self, env):
min_enemy = sys.maxsize
min_enemy_id = -1
own_id = -1
for border in env.border_list:
country1 = env.country_list[border.country1 - 1]
country2 = env.country_list[border.country2 - 1]
if country1.owner_id == self.player_id and country2.owner_id == self.player_id:
continue
if country1.owner_id == self.player_id and \
country1.troops_count - country2.troops_count > 1 and \
country2.troops_count < min_enemy:
min_enemy = country2.troops_count
min_enemy_id = country2.id
own_id = country1.id
if country2.owner_id == self.player_id and \
country2.troops_count - country1.troops_count > 1 and\
country1.troops_count < min_enemy:
min_enemy = country1.troops_count
min_enemy_id = country1.id
own_id = country2.id
return own_id, min_enemy_id
def invade(self, state):
try:
env_c = copy.deepcopy(state.env)
from_country, to_country = self.__get_least_damage(env_c)
env_c.invade(self.player_id, from_country, to_country, 0.9)
return EnvState(env_c, state, MoveType.INVADE, self.player_id)
except Exception as e:
return state.expand_same(MoveType.INVADE, self.player_id)
class PassiveAgent(Player):
def __init__(self, player_id):
Player.__init__(self, player_id)
self.__player_type = AgentType.PASSIVE
@property
def player_type(self):
return self.__player_type
def expand(self, state):
state = self.deploy_reserve_troops(state)
state = self.march_troops(state)
return self.invade(state)
def deploy_reserve_troops(self, state):
try:
env_c = copy.deepcopy(state.env)
min_troops_country_id = -1
min_troops_count = sys.maxsize
for country in env_c.country_list:
if country.owner_id == self.player_id and country.troops_count < min_troops_count:
min_troops_count = country.troops_count
min_troops_country_id = country.id
if min_troops_country_id == -1:
return state.expand_state(MoveType.DEPLOY, self.player_id)
env_c.deploy_reserve_troops(self.player_id, min_troops_country_id)
return EnvState(env_c, state, MoveType.DEPLOY, self.player_id)
except Exception as e:
return state.expand_same(MoveType.DEPLOY, self.player_id)
def march_troops(self, state):
return state.expand_same(MoveType.MARCH, self.player_id)
def invade(self, state):
return state.expand_same(MoveType.INVADE, self.player_id)
| 38.881356 | 98 | 0.632084 | 871 | 6,882 | 4.667049 | 0.075775 | 0.108241 | 0.112177 | 0.046002 | 0.917835 | 0.877983 | 0.855105 | 0.852891 | 0.84305 | 0.831488 | 0 | 0.012749 | 0.293374 | 6,882 | 176 | 99 | 39.102273 | 0.823154 | 0 | 0 | 0.714286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.136054 | false | 0.013605 | 0.034014 | 0.047619 | 0.346939 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b39a7d185847b06b257bfd909f00430aacbe4b57 | 339 | py | Python | arithmetic.py | GarvTambi/OpenCV-Beginners-to-Expert | 1b07bc264546d91f8c1644c9ba31a5ae745b4064 | [
"MIT"
] | 2 | 2019-11-04T18:10:19.000Z | 2019-12-11T12:41:39.000Z | arithmetic.py | GarvTambi/OpenCV-Beginners-to-Expert | 1b07bc264546d91f8c1644c9ba31a5ae745b4064 | [
"MIT"
] | null | null | null | arithmetic.py | GarvTambi/OpenCV-Beginners-to-Expert | 1b07bc264546d91f8c1644c9ba31a5ae745b4064 | [
"MIT"
] | null | null | null | import numpy as np
import cv2
print("max of 255 by cv2: {}".format(cv2.add(np.uint8([200]), np.uint8([100]))))
print("max of 255 by cv2: {}".format(cv2.subtract(np.uint8([50]), np.uint8([100]))))
print("max of 255 by np: {}".format(np.uint8([200])+ np.uint8([100])))
print("max of 255 by np: {}".format(np.uint8([50])- np.uint8([100])))
| 37.666667 | 84 | 0.628319 | 62 | 339 | 3.435484 | 0.274194 | 0.262911 | 0.187793 | 0.244131 | 0.835681 | 0.835681 | 0.737089 | 0.737089 | 0.553991 | 0.553991 | 0 | 0.155116 | 0.106195 | 339 | 8 | 85 | 42.375 | 0.547855 | 0 | 0 | 0 | 0 | 0 | 0.241888 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.333333 | 0 | 0.333333 | 0.666667 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 11 |
b3c40bbcc4a8ef44c80eae0d2e22a52fde6bff26 | 2,925 | py | Python | metarecord/migrations/0044_user_relation_texts.py | kerkkoheiskanen/helerm | bdaf801a940d42325a1076b42bb0edef831fbac9 | [
"MIT"
] | 2 | 2017-04-21T15:36:23.000Z | 2020-12-04T09:32:39.000Z | metarecord/migrations/0044_user_relation_texts.py | kerkkoheiskanen/helerm | bdaf801a940d42325a1076b42bb0edef831fbac9 | [
"MIT"
] | 168 | 2016-10-05T12:58:41.000Z | 2021-08-31T14:29:56.000Z | metarecord/migrations/0044_user_relation_texts.py | kerkkoheiskanen/helerm | bdaf801a940d42325a1076b42bb0edef831fbac9 | [
"MIT"
] | 7 | 2016-10-13T12:51:36.000Z | 2021-01-21T13:05:04.000Z | # Generated by Django 2.2.12 on 2020-04-28 10:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('metarecord', '0043_add_name_and_help_text_to_attribute_value'),
]
operations = [
migrations.AddField(
model_name='action',
name='_created_by',
field=models.CharField(blank=True, editable=False, max_length=200, verbose_name='created by (text)'),
),
migrations.AddField(
model_name='action',
name='_modified_by',
field=models.CharField(blank=True, editable=False, max_length=200, verbose_name='modified by (text)'),
),
migrations.AddField(
model_name='bulkupdate',
name='_approved_by',
field=models.CharField(blank=True, editable=False, max_length=200, verbose_name='approved by (text)'),
),
migrations.AddField(
model_name='bulkupdate',
name='_created_by',
field=models.CharField(blank=True, editable=False, max_length=200, verbose_name='created by (text)'),
),
migrations.AddField(
model_name='bulkupdate',
name='_modified_by',
field=models.CharField(blank=True, editable=False, max_length=200, verbose_name='modified by (text)'),
),
migrations.AddField(
model_name='function',
name='_created_by',
field=models.CharField(blank=True, editable=False, max_length=200, verbose_name='created by (text)'),
),
migrations.AddField(
model_name='function',
name='_modified_by',
field=models.CharField(blank=True, editable=False, max_length=200, verbose_name='modified by (text)'),
),
migrations.AddField(
model_name='metadataversion',
name='_modified_by',
field=models.CharField(blank=True, editable=False, max_length=200, verbose_name='modified by (text)'),
),
migrations.AddField(
model_name='phase',
name='_created_by',
field=models.CharField(blank=True, editable=False, max_length=200, verbose_name='created by (text)'),
),
migrations.AddField(
model_name='phase',
name='_modified_by',
field=models.CharField(blank=True, editable=False, max_length=200, verbose_name='modified by (text)'),
),
migrations.AddField(
model_name='record',
name='_created_by',
field=models.CharField(blank=True, editable=False, max_length=200, verbose_name='created by (text)'),
),
migrations.AddField(
model_name='record',
name='_modified_by',
field=models.CharField(blank=True, editable=False, max_length=200, verbose_name='modified by (text)'),
),
]
| 39.527027 | 114 | 0.603419 | 310 | 2,925 | 5.474194 | 0.16129 | 0.127283 | 0.16264 | 0.190925 | 0.880377 | 0.880377 | 0.857395 | 0.857395 | 0.795522 | 0.795522 | 0 | 0.026328 | 0.272821 | 2,925 | 73 | 115 | 40.068493 | 0.771509 | 0.015727 | 0 | 0.850746 | 1 | 0 | 0.17414 | 0.015989 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.014925 | 0 | 0.059701 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
b3e0509a0be75b14e728cc8f03019113dfe2afe5 | 165 | py | Python | Codefights/arcade/intro/level-9/38.growingPlant/Python/solution1.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | 7 | 2017-09-20T16:40:39.000Z | 2021-08-31T18:15:08.000Z | Codefights/arcade/intro/level-9/38.growingPlant/Python/solution1.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | null | null | null | Codefights/arcade/intro/level-9/38.growingPlant/Python/solution1.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | null | null | null | # Python3
from math import ceil
def growingPlant(upSpeed, downSpeed, desiredHeight):
return max(1, ceil((desiredHeight - upSpeed) / (upSpeed - downSpeed)) + 1)
| 27.5 | 78 | 0.727273 | 19 | 165 | 6.315789 | 0.684211 | 0.266667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.021583 | 0.157576 | 165 | 5 | 79 | 33 | 0.841727 | 0.042424 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
b3e8e783f9288d1370c52f301aa50a02a3f13def | 2,528 | py | Python | kotti/tests/url_normalizer.py | mete0r/Kotti | e89103cc57d5d2af8d60eb8208ae9d04c068f6e7 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null | kotti/tests/url_normalizer.py | mete0r/Kotti | e89103cc57d5d2af8d60eb8208ae9d04c068f6e7 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null | kotti/tests/url_normalizer.py | mete0r/Kotti | e89103cc57d5d2af8d60eb8208ae9d04c068f6e7 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null | # encoding=utf-8
from kotti import get_settings
from kotti.url_normalizer import url_normalizer
class URLNormalizerTests:
def test_normalizer(self):
assert url_normalizer(u'simpleandsafe') == u'simpleandsafe'
assert url_normalizer(u' Whitespace and capital Letters ') == \
u'whitespace-and-capital-letters'
assert url_normalizer(u">here's another!") == u'heres-another'
assert url_normalizer(u">>>here'!--s yet another!!!") == \
u"here-s-yet-another"
assert url_normalizer(u"Doe, Joe") == u"doe-joe"
assert url_normalizer(u"umläut.doc") == u"umläut.doc"
assert url_normalizer(u"ZAŻÓŁĆ GĘŚLĄ JAŹŃ") == u"zażółć-gęślą-jaźń"
assert url_normalizer(u"zażółć gęślą jaźń") == u"zażółć-gęślą-jaźń"
assert url_normalizer(u'quote-this') == u'quote-this'
assert url_normalizer(u"quote 'this'!") == u"quote-this"
assert url_normalizer(u"I'm not a FILE.txt") == u"im-not-a-file.txt"
assert url_normalizer(u"I'm a big file.TXT") == u"im-a-big-file.txt"
assert url_normalizer(u"rest `n` peace") == u"rest-n-peace"
assert (len(url_normalizer(u"aa" * 2000))) == 255
assert url_normalizer(u"short-hello-version", max_length=10) == u"short"
def test_normalizer_map_non_ascii_characters(self):
get_settings()['kotti.url_normalizer.map_non_ascii_characters'] = True
assert url_normalizer(u'simpleandsafe') == u'simpleandsafe'
assert url_normalizer(u' Whitespace and capital Letters ') == \
u'whitespace-and-capital-letters'
assert url_normalizer(u">here's another!") == u'heres-another'
assert url_normalizer(u">>>here'!--s yet another!!!") == \
u"here-s-yet-another"
assert url_normalizer(u"Doe, Joe") == u"doe-joe"
assert url_normalizer(u"umläut.doc") == u"umlaut.doc"
assert url_normalizer(u"ZAŻÓŁĆ GĘŚLĄ JAŹŃ") == u"zazolc-gesla-jazn"
assert url_normalizer(u"zażółć gęślą jaźń") == u"zazolc-gesla-jazn"
assert url_normalizer(u'quote-this') == u'quote-this'
assert url_normalizer(u"quote 'this'!") == u"quote-this"
assert url_normalizer(u"I'm not a FILE.txt") == u"im-not-a-file.txt"
assert url_normalizer(u"I'm a big file.TXT") == u"im-a-big-file.txt"
assert url_normalizer(u"rest `n` peace") == u"rest-n-peace"
assert (len(url_normalizer(u"aa" * 2000))) == 255
assert url_normalizer(u"short-hello-version", max_length=10) == u"short"
| 54.956522 | 80 | 0.651503 | 367 | 2,528 | 4.362398 | 0.179837 | 0.267958 | 0.262336 | 0.349781 | 0.894441 | 0.855715 | 0.855715 | 0.855715 | 0.855715 | 0.851968 | 0 | 0.009383 | 0.198972 | 2,528 | 45 | 81 | 56.177778 | 0.781235 | 0.005538 | 0 | 0.7 | 0 | 0 | 0.361067 | 0.041799 | 0 | 0 | 0 | 0 | 0.75 | 1 | 0.05 | false | 0 | 0.05 | 0 | 0.125 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
b3eb64d596e35a2f876481a22fb0ec7965f37824 | 30,589 | py | Python | sdk/python/pulumi_oci/email/outputs.py | EladGabay/pulumi-oci | 6841e27d4a1a7e15c672306b769912efbfd3ba99 | [
"ECL-2.0",
"Apache-2.0"
] | 5 | 2021-08-17T11:14:46.000Z | 2021-12-31T02:07:03.000Z | sdk/python/pulumi_oci/email/outputs.py | pulumi-oci/pulumi-oci | 6841e27d4a1a7e15c672306b769912efbfd3ba99 | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2021-09-06T11:21:29.000Z | 2021-09-06T11:21:29.000Z | sdk/python/pulumi_oci/email/outputs.py | pulumi-oci/pulumi-oci | 6841e27d4a1a7e15c672306b769912efbfd3ba99 | [
"ECL-2.0",
"Apache-2.0"
] | 2 | 2021-08-24T23:31:30.000Z | 2022-01-02T19:26:54.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetDkimsDkimCollectionResult',
'GetDkimsDkimCollectionItemResult',
'GetDkimsFilterResult',
'GetEmailDomainsEmailDomainCollectionResult',
'GetEmailDomainsEmailDomainCollectionItemResult',
'GetEmailDomainsFilterResult',
'GetSendersFilterResult',
'GetSendersSenderResult',
'GetSuppressionsFilterResult',
'GetSuppressionsSuppressionResult',
]
@pulumi.output_type
class GetDkimsDkimCollectionResult(dict):
def __init__(__self__, *,
items: Sequence['outputs.GetDkimsDkimCollectionItemResult']):
pulumi.set(__self__, "items", items)
@property
@pulumi.getter
def items(self) -> Sequence['outputs.GetDkimsDkimCollectionItemResult']:
return pulumi.get(self, "items")
@pulumi.output_type
class GetDkimsDkimCollectionItemResult(dict):
def __init__(__self__, *,
cname_record_value: str,
compartment_id: str,
defined_tags: Mapping[str, Any],
description: str,
dns_subdomain_name: str,
email_domain_id: str,
freeform_tags: Mapping[str, Any],
id: str,
lifecycle_details: str,
name: str,
state: str,
system_tags: Mapping[str, Any],
time_created: str,
time_updated: str,
txt_record_value: str):
"""
:param str cname_record_value: The DNS CNAME record value to provision to the DKIM DNS subdomain, when using the CNAME method for DKIM setup (preferred).
:param str compartment_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment that contains this DKIM.
:param Mapping[str, Any] defined_tags: Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
:param str description: The description of the DKIM. Avoid entering confidential information.
:param str dns_subdomain_name: The name of the DNS subdomain that must be provisioned to enable email recipients to verify DKIM signatures. It is usually created with a CNAME record set to the cnameRecordValue
:param str email_domain_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the email domain to which this DKIM belongs.
:param Mapping[str, Any] freeform_tags: Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param str id: A filter to only return resources that match the given id exactly.
:param str lifecycle_details: A message describing the current state in more detail. For example, can be used to provide actionable information for a resource.
:param str name: A filter to only return resources that match the given name exactly.
:param str state: Filter returned list by specified lifecycle state. This parameter is case-insensitive.
:param Mapping[str, Any] system_tags: Usage of system tag keys. These predefined keys are scoped to namespaces. Example: `{"orcl-cloud.free-tier-retained": "true"}`
:param str time_created: The time the DKIM was created. Times are expressed in [RFC 3339](https://tools.ietf.org/html/rfc3339) timestamp format, "YYYY-MM-ddThh:mmZ". Example: `2021-02-12T22:47:12.613Z`
:param str time_updated: The time of the last change to the DKIM configuration, due to a state change or an update operation. Times are expressed in [RFC 3339](https://tools.ietf.org/html/rfc3339) timestamp format, "YYYY-MM-ddThh:mmZ".
:param str txt_record_value: The DNS TXT record value to provision to the DKIM DNS subdomain in place of using a CNAME record. This is used in cases where a CNAME can not be used, such as when the cnameRecordValue would exceed the maximum length for a DNS entry. This can also be used by customers who have an existing procedure to directly provision TXT records for DKIM. Be aware that many DNS APIs will require you to break this string into segments of less than 255 characters.
"""
pulumi.set(__self__, "cname_record_value", cname_record_value)
pulumi.set(__self__, "compartment_id", compartment_id)
pulumi.set(__self__, "defined_tags", defined_tags)
pulumi.set(__self__, "description", description)
pulumi.set(__self__, "dns_subdomain_name", dns_subdomain_name)
pulumi.set(__self__, "email_domain_id", email_domain_id)
pulumi.set(__self__, "freeform_tags", freeform_tags)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "lifecycle_details", lifecycle_details)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "system_tags", system_tags)
pulumi.set(__self__, "time_created", time_created)
pulumi.set(__self__, "time_updated", time_updated)
pulumi.set(__self__, "txt_record_value", txt_record_value)
@property
@pulumi.getter(name="cnameRecordValue")
def cname_record_value(self) -> str:
"""
The DNS CNAME record value to provision to the DKIM DNS subdomain, when using the CNAME method for DKIM setup (preferred).
"""
return pulumi.get(self, "cname_record_value")
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment that contains this DKIM.
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> Mapping[str, Any]:
"""
Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
"""
return pulumi.get(self, "defined_tags")
@property
@pulumi.getter
def description(self) -> str:
"""
The description of the DKIM. Avoid entering confidential information.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="dnsSubdomainName")
def dns_subdomain_name(self) -> str:
"""
The name of the DNS subdomain that must be provisioned to enable email recipients to verify DKIM signatures. It is usually created with a CNAME record set to the cnameRecordValue
"""
return pulumi.get(self, "dns_subdomain_name")
@property
@pulumi.getter(name="emailDomainId")
def email_domain_id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the email domain to which this DKIM belongs.
"""
return pulumi.get(self, "email_domain_id")
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> Mapping[str, Any]:
"""
Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
"""
return pulumi.get(self, "freeform_tags")
@property
@pulumi.getter
def id(self) -> str:
"""
A filter to only return resources that match the given id exactly.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="lifecycleDetails")
def lifecycle_details(self) -> str:
"""
A message describing the current state in more detail. For example, can be used to provide actionable information for a resource.
"""
return pulumi.get(self, "lifecycle_details")
@property
@pulumi.getter
def name(self) -> str:
"""
A filter to only return resources that match the given name exactly.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def state(self) -> str:
"""
Filter returned list by specified lifecycle state. This parameter is case-insensitive.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="systemTags")
def system_tags(self) -> Mapping[str, Any]:
"""
Usage of system tag keys. These predefined keys are scoped to namespaces. Example: `{"orcl-cloud.free-tier-retained": "true"}`
"""
return pulumi.get(self, "system_tags")
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> str:
"""
The time the DKIM was created. Times are expressed in [RFC 3339](https://tools.ietf.org/html/rfc3339) timestamp format, "YYYY-MM-ddThh:mmZ". Example: `2021-02-12T22:47:12.613Z`
"""
return pulumi.get(self, "time_created")
@property
@pulumi.getter(name="timeUpdated")
def time_updated(self) -> str:
"""
The time of the last change to the DKIM configuration, due to a state change or an update operation. Times are expressed in [RFC 3339](https://tools.ietf.org/html/rfc3339) timestamp format, "YYYY-MM-ddThh:mmZ".
"""
return pulumi.get(self, "time_updated")
@property
@pulumi.getter(name="txtRecordValue")
def txt_record_value(self) -> str:
"""
The DNS TXT record value to provision to the DKIM DNS subdomain in place of using a CNAME record. This is used in cases where a CNAME can not be used, such as when the cnameRecordValue would exceed the maximum length for a DNS entry. This can also be used by customers who have an existing procedure to directly provision TXT records for DKIM. Be aware that many DNS APIs will require you to break this string into segments of less than 255 characters.
"""
return pulumi.get(self, "txt_record_value")
@pulumi.output_type
class GetDkimsFilterResult(dict):
def __init__(__self__, *,
name: str,
values: Sequence[str],
regex: Optional[bool] = None):
"""
:param str name: A filter to only return resources that match the given name exactly.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
if regex is not None:
pulumi.set(__self__, "regex", regex)
@property
@pulumi.getter
def name(self) -> str:
"""
A filter to only return resources that match the given name exactly.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def values(self) -> Sequence[str]:
return pulumi.get(self, "values")
@property
@pulumi.getter
def regex(self) -> Optional[bool]:
return pulumi.get(self, "regex")
@pulumi.output_type
class GetEmailDomainsEmailDomainCollectionResult(dict):
def __init__(__self__, *,
items: Sequence['outputs.GetEmailDomainsEmailDomainCollectionItemResult']):
pulumi.set(__self__, "items", items)
@property
@pulumi.getter
def items(self) -> Sequence['outputs.GetEmailDomainsEmailDomainCollectionItemResult']:
return pulumi.get(self, "items")
@pulumi.output_type
class GetEmailDomainsEmailDomainCollectionItemResult(dict):
def __init__(__self__, *,
active_dkim_id: str,
compartment_id: str,
defined_tags: Mapping[str, Any],
description: str,
freeform_tags: Mapping[str, Any],
id: str,
is_spf: bool,
name: str,
state: str,
system_tags: Mapping[str, Any],
time_created: str):
"""
:param str active_dkim_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the DKIM key that will be used to sign mail sent from this email domain.
:param str compartment_id: The OCID for the compartment.
:param Mapping[str, Any] defined_tags: Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
:param str description: The description of a email domain.
:param Mapping[str, Any] freeform_tags: Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param str id: A filter to only return resources that match the given id exactly.
:param bool is_spf: Value of the SPF field. For more information about SPF, please see [SPF Authentication](https://docs.cloud.oracle.com/iaas/Content/Email/Concepts/overview.htm#components).
:param str name: A filter to only return resources that match the given name exactly.
:param str state: Filter returned list by specified lifecycle state. This parameter is case-insensitive.
:param Mapping[str, Any] system_tags: Usage of system tag keys. These predefined keys are scoped to namespaces. Example: `{"orcl-cloud.free-tier-retained": "true"}`
:param str time_created: The time the email domain was created, expressed in [RFC 3339](https://tools.ietf.org/html/rfc3339) timestamp format, "YYYY-MM-ddThh:mmZ". Example: `2021-02-12T22:47:12.613Z`
"""
pulumi.set(__self__, "active_dkim_id", active_dkim_id)
pulumi.set(__self__, "compartment_id", compartment_id)
pulumi.set(__self__, "defined_tags", defined_tags)
pulumi.set(__self__, "description", description)
pulumi.set(__self__, "freeform_tags", freeform_tags)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "is_spf", is_spf)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "system_tags", system_tags)
pulumi.set(__self__, "time_created", time_created)
@property
@pulumi.getter(name="activeDkimId")
def active_dkim_id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the DKIM key that will be used to sign mail sent from this email domain.
"""
return pulumi.get(self, "active_dkim_id")
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> str:
"""
The OCID for the compartment.
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> Mapping[str, Any]:
"""
Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
"""
return pulumi.get(self, "defined_tags")
@property
@pulumi.getter
def description(self) -> str:
"""
The description of a email domain.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> Mapping[str, Any]:
"""
Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
"""
return pulumi.get(self, "freeform_tags")
@property
@pulumi.getter
def id(self) -> str:
"""
A filter to only return resources that match the given id exactly.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="isSpf")
def is_spf(self) -> bool:
"""
Value of the SPF field. For more information about SPF, please see [SPF Authentication](https://docs.cloud.oracle.com/iaas/Content/Email/Concepts/overview.htm#components).
"""
return pulumi.get(self, "is_spf")
@property
@pulumi.getter
def name(self) -> str:
"""
A filter to only return resources that match the given name exactly.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def state(self) -> str:
"""
Filter returned list by specified lifecycle state. This parameter is case-insensitive.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="systemTags")
def system_tags(self) -> Mapping[str, Any]:
"""
Usage of system tag keys. These predefined keys are scoped to namespaces. Example: `{"orcl-cloud.free-tier-retained": "true"}`
"""
return pulumi.get(self, "system_tags")
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> str:
"""
The time the email domain was created, expressed in [RFC 3339](https://tools.ietf.org/html/rfc3339) timestamp format, "YYYY-MM-ddThh:mmZ". Example: `2021-02-12T22:47:12.613Z`
"""
return pulumi.get(self, "time_created")
@pulumi.output_type
class GetEmailDomainsFilterResult(dict):
def __init__(__self__, *,
name: str,
values: Sequence[str],
regex: Optional[bool] = None):
"""
:param str name: A filter to only return resources that match the given name exactly.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
if regex is not None:
pulumi.set(__self__, "regex", regex)
@property
@pulumi.getter
def name(self) -> str:
"""
A filter to only return resources that match the given name exactly.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def values(self) -> Sequence[str]:
return pulumi.get(self, "values")
@property
@pulumi.getter
def regex(self) -> Optional[bool]:
return pulumi.get(self, "regex")
@pulumi.output_type
class GetSendersFilterResult(dict):
def __init__(__self__, *,
name: str,
values: Sequence[str],
regex: Optional[bool] = None):
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
if regex is not None:
pulumi.set(__self__, "regex", regex)
@property
@pulumi.getter
def name(self) -> str:
return pulumi.get(self, "name")
@property
@pulumi.getter
def values(self) -> Sequence[str]:
return pulumi.get(self, "values")
@property
@pulumi.getter
def regex(self) -> Optional[bool]:
return pulumi.get(self, "regex")
@pulumi.output_type
class GetSendersSenderResult(dict):
def __init__(__self__, *,
compartment_id: str,
defined_tags: Mapping[str, Any],
email_address: str,
email_domain_id: str,
freeform_tags: Mapping[str, Any],
id: str,
is_spf: bool,
state: str,
time_created: str):
"""
:param str compartment_id: The OCID for the compartment.
:param Mapping[str, Any] defined_tags: Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
:param str email_address: The email address of the approved sender.
:param str email_domain_id: The email domain used to assert responsibility for emails sent from this sender.
:param Mapping[str, Any] freeform_tags: Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param str id: The unique OCID of the sender.
:param bool is_spf: Value of the SPF field. For more information about SPF, please see [SPF Authentication](https://docs.cloud.oracle.com/iaas/Content/Email/Concepts/overview.htm#components).
:param str state: The current state of a sender.
:param str time_created: The date and time the approved sender was added in "YYYY-MM-ddThh:mmZ" format with a Z offset, as defined by RFC 3339.
"""
pulumi.set(__self__, "compartment_id", compartment_id)
pulumi.set(__self__, "defined_tags", defined_tags)
pulumi.set(__self__, "email_address", email_address)
pulumi.set(__self__, "email_domain_id", email_domain_id)
pulumi.set(__self__, "freeform_tags", freeform_tags)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "is_spf", is_spf)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "time_created", time_created)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> str:
"""
The OCID for the compartment.
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> Mapping[str, Any]:
"""
Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
"""
return pulumi.get(self, "defined_tags")
@property
@pulumi.getter(name="emailAddress")
def email_address(self) -> str:
"""
The email address of the approved sender.
"""
return pulumi.get(self, "email_address")
@property
@pulumi.getter(name="emailDomainId")
def email_domain_id(self) -> str:
"""
The email domain used to assert responsibility for emails sent from this sender.
"""
return pulumi.get(self, "email_domain_id")
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> Mapping[str, Any]:
"""
Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
"""
return pulumi.get(self, "freeform_tags")
@property
@pulumi.getter
def id(self) -> str:
"""
The unique OCID of the sender.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="isSpf")
def is_spf(self) -> bool:
"""
Value of the SPF field. For more information about SPF, please see [SPF Authentication](https://docs.cloud.oracle.com/iaas/Content/Email/Concepts/overview.htm#components).
"""
return pulumi.get(self, "is_spf")
@property
@pulumi.getter
def state(self) -> str:
"""
The current state of a sender.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> str:
"""
The date and time the approved sender was added in "YYYY-MM-ddThh:mmZ" format with a Z offset, as defined by RFC 3339.
"""
return pulumi.get(self, "time_created")
@pulumi.output_type
class GetSuppressionsFilterResult(dict):
def __init__(__self__, *,
name: str,
values: Sequence[str],
regex: Optional[bool] = None):
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
if regex is not None:
pulumi.set(__self__, "regex", regex)
@property
@pulumi.getter
def name(self) -> str:
return pulumi.get(self, "name")
@property
@pulumi.getter
def values(self) -> Sequence[str]:
return pulumi.get(self, "values")
@property
@pulumi.getter
def regex(self) -> Optional[bool]:
return pulumi.get(self, "regex")
@pulumi.output_type
class GetSuppressionsSuppressionResult(dict):
def __init__(__self__, *,
compartment_id: str,
email_address: str,
error_detail: str,
error_source: str,
id: str,
message_id: str,
reason: str,
time_created: str,
time_last_suppressed: str):
"""
:param str compartment_id: The OCID for the compartment.
:param str email_address: The email address of the suppression.
:param str error_detail: The specific error message returned by a system that resulted in the suppression. This message is usually an SMTP error code with additional descriptive text. Not provided for all types of suppressions.
:param str error_source: DNS name of the source of the error that caused the suppression. Will be set to either the remote-mta or reporting-mta field from a delivery status notification (RFC 3464) when available. Not provided for all types of suppressions, and not always known.
:param str id: The unique OCID of the suppression.
:param str message_id: The value of the Message-ID header from the email that triggered a suppression. This value is as defined in RFC 5322 section 3.6.4, excluding angle-brackets. Not provided for all types of suppressions.
:param str reason: The reason that the email address was suppressed. For more information on the types of bounces, see [Suppression List](https://docs.cloud.oracle.com/iaas/Content/Email/Concepts/overview.htm#components).
:param str time_created: The date and time a recipient's email address was added to the suppression list, in "YYYY-MM-ddThh:mmZ" format with a Z offset, as defined by RFC 3339.
:param str time_last_suppressed: The last date and time the suppression prevented submission in "YYYY-MM-ddThh:mmZ" format with a Z offset, as defined by RFC 3339.
"""
pulumi.set(__self__, "compartment_id", compartment_id)
pulumi.set(__self__, "email_address", email_address)
pulumi.set(__self__, "error_detail", error_detail)
pulumi.set(__self__, "error_source", error_source)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "message_id", message_id)
pulumi.set(__self__, "reason", reason)
pulumi.set(__self__, "time_created", time_created)
pulumi.set(__self__, "time_last_suppressed", time_last_suppressed)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> str:
"""
The OCID for the compartment.
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter(name="emailAddress")
def email_address(self) -> str:
"""
The email address of the suppression.
"""
return pulumi.get(self, "email_address")
@property
@pulumi.getter(name="errorDetail")
def error_detail(self) -> str:
"""
The specific error message returned by a system that resulted in the suppression. This message is usually an SMTP error code with additional descriptive text. Not provided for all types of suppressions.
"""
return pulumi.get(self, "error_detail")
@property
@pulumi.getter(name="errorSource")
def error_source(self) -> str:
"""
DNS name of the source of the error that caused the suppression. Will be set to either the remote-mta or reporting-mta field from a delivery status notification (RFC 3464) when available. Not provided for all types of suppressions, and not always known.
"""
return pulumi.get(self, "error_source")
@property
@pulumi.getter
def id(self) -> str:
"""
The unique OCID of the suppression.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="messageId")
def message_id(self) -> str:
"""
The value of the Message-ID header from the email that triggered a suppression. This value is as defined in RFC 5322 section 3.6.4, excluding angle-brackets. Not provided for all types of suppressions.
"""
return pulumi.get(self, "message_id")
@property
@pulumi.getter
def reason(self) -> str:
"""
The reason that the email address was suppressed. For more information on the types of bounces, see [Suppression List](https://docs.cloud.oracle.com/iaas/Content/Email/Concepts/overview.htm#components).
"""
return pulumi.get(self, "reason")
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> str:
"""
The date and time a recipient's email address was added to the suppression list, in "YYYY-MM-ddThh:mmZ" format with a Z offset, as defined by RFC 3339.
"""
return pulumi.get(self, "time_created")
@property
@pulumi.getter(name="timeLastSuppressed")
def time_last_suppressed(self) -> str:
"""
The last date and time the suppression prevented submission in "YYYY-MM-ddThh:mmZ" format with a Z offset, as defined by RFC 3339.
"""
return pulumi.get(self, "time_last_suppressed")
| 44.917768 | 489 | 0.657687 | 3,855 | 30,589 | 5.073152 | 0.085344 | 0.026691 | 0.038554 | 0.056348 | 0.876822 | 0.872271 | 0.85867 | 0.849568 | 0.841591 | 0.820525 | 0 | 0.00777 | 0.238452 | 30,589 | 680 | 490 | 44.983824 | 0.831766 | 0.459806 | 0 | 0.785 | 1 | 0 | 0.130574 | 0.03106 | 0 | 0 | 0 | 0 | 0 | 1 | 0.17 | false | 0 | 0.015 | 0.03 | 0.355 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
373a6075d2010867b86aac0bf4b0df1f784da783 | 23,232 | py | Python | pymtl3/dsl/test/Slicing_test.py | mondO/pymtl3 | 9869dda28c01926cee6da94ebdeac2a210150c62 | [
"BSD-3-Clause"
] | 1 | 2022-01-03T06:22:11.000Z | 2022-01-03T06:22:11.000Z | pymtl3/dsl/test/Slicing_test.py | mondO/pymtl3 | 9869dda28c01926cee6da94ebdeac2a210150c62 | [
"BSD-3-Clause"
] | null | null | null | pymtl3/dsl/test/Slicing_test.py | mondO/pymtl3 | 9869dda28c01926cee6da94ebdeac2a210150c62 | [
"BSD-3-Clause"
] | null | null | null | """
========================================================================
Slicing_test.py
========================================================================
Author : Shunning Jiang
Date : Aug 23, 2018
"""
from pymtl3.datatypes import Bits2, Bits4, Bits14, Bits16, Bits24, Bits32
from pymtl3.dsl.ComponentLevel3 import ComponentLevel3, connect
from pymtl3.dsl.Connectable import Wire
from pymtl3.dsl.errors import MultiWriterError, NoWriterError
from .sim_utils import simple_sim_pass
def _test_model( cls ):
A = cls()
A.elaborate()
simple_sim_pass( A, 0x123 )
for i in range(10):
A.tick()
# write two disjoint slices
def test_write_two_disjoint_slices():
class Top( ComponentLevel3 ):
def construct( s ):
s.A = Wire( Bits32 )
@s.update
def up_wr_0_16():
s.A[0:16] = Bits16( 0xff )
@s.update
def up_wr_16_30():
s.A[16:30] = Bits14( 0xff )
@s.update
def up_rd_12_30():
assert s.A[12:30] == 0xff0
_test_model( Top )
# write two disjoint slices, but one slice is not read at all
def test_write_two_disjoint_slices_no_reader():
class Top( ComponentLevel3 ):
def construct( s ):
s.A = Wire( Bits32 )
@s.update
def up_wr_0_16():
s.A[0:16] = Bits16( 0xff )
@s.update
def up_wr_16_30():
s.A[16:30] = Bits14( 0xff )
@s.update
def up_rd_17_30():
assert s.A[16:30] == 0xff
m = Top()
m.elaborate()
simple_sim_pass( m, 0x123 )
# assert len(m._all_constraints) == 1
# x, y = list(m._all_constraints)[0]
# assert m._all_id_upblk[x].__name__ == "up_wr_16_30" and \
# m._all_id_upblk[y].__name__ == "up_rd_17_30" # only one constraint
# write two overlapping slices
def test_write_two_overlapping_slices():
class Top( ComponentLevel3 ):
def construct( s ):
s.A = Wire( Bits32 )
@s.update
def up_wr_0_24():
s.A[0:24] = Bits24( 0xff )
@s.update
def up_wr_8_32():
s.A[8:32] = Bits24( 0xff )
@s.update
def up_rd_A():
x = s.A
try:
_test_model( Top )
except MultiWriterError as e:
print("{} is thrown\n{}".format( e.__class__.__name__, e ))
return
raise Exception("Should've thrown MultiWriterError.")
# write two slices and a single bit
def test_write_two_slices_and_bit():
class Top( ComponentLevel3 ):
def construct( s ):
s.A = Wire( Bits32 )
@s.update
def up_wr_0_16():
s.A[0:16] = Bits16( 0xff )
@s.update
def up_wr_16_30():
s.A[16:30] = Bits14( 0xff )
@s.update
def up_wr_30_31():
s.A[30] = Bits1( 1 )
@s.update
def up_rd_A():
print(s.A[0:17])
m = Top()
m.elaborate()
simple_sim_pass( m, 0x123 )
# assert len(m._all_constraints) == 2
# _, x = list(m._all_constraints)[0]
# _, y = list(m._all_constraints)[1]
# two constraints are: up_wr_0_16 < up_rd_A and up_wr_16_30 < up_rd_A
# assert m._all_id_upblk[x].__name__ == "up_rd_A" and \
# m._all_id_upblk[y].__name__ == "up_rd_A"
# write a slice and a single bit, but they are overlapped
def test_write_slices_and_bit_overlapped():
class Top( ComponentLevel3 ):
def construct( s ):
s.A = Wire( Bits32 )
@s.update
def up_wr_0_16():
s.A[0:16] = Bits16( 0xff )
@s.update
def up_wr_15():
s.A[15] = Bits1( 1 )
@s.update
def up_rd_A():
print(s.A[0:17])
try:
_test_model( Top )
except MultiWriterError as e:
print("{} is thrown\n{}".format( e.__class__.__name__, e ))
return
raise Exception("Should've thrown MultiWriterError.")
# write a slice and there are two reader
def test_multiple_readers():
class Top( ComponentLevel3 ):
def construct( s ):
s.A = Wire( Bits32 )
@s.update
def up_wr_8_24():
s.A[8:24] = Bits16( 0x1234 )
@s.update
def up_rd_0_12():
assert s.A[0:12] == 0x400
@s.update
def up_rd_bunch():
assert s.A[23] == 0
assert s.A[22] == 0
assert s.A[21] == 0
assert s.A[20] == 1
assert s.A[19] == 0
assert s.A[18] == 0
assert s.A[17] == 1
assert s.A[16] == 0
_test_model( Top )
# 1. WR A[s], RD A (A[s] (=) A, SAME AS data struct)
# WR A[s], WR A (detect 2-writer conflict, SAME AS data struct)
# WR A[s], RD A[t] (A[s] (=) A[t] if s intersects t)
# WR A[s], WR A[t] (detect 2-writer conflict if s intersects t)
# 2. WR A , RD A[s] (A[s] (=) A, SAME AS data struct)
# 3. WR A[s], A |=y, RD y (mark A as writer in net {A,y}, SAME AS data struct)
# WR A[s], A |=y, WR y (detect 2-writer conflict, SAME AS data struct)
# WR A[s], A[t]|=y, RD y (mark A[t] as writer in net {A[t],y} if s intersects t)
# WR A[s], A[t]|=y, WR y (detect 2-writer conflict if s intersects t)
# 4. WR A , A[s]|=y, RD y (mark A[s] as writer in net {A[s],y}, SAME AS data struct)
# WR A , A[s]|=y, WR y (detect 2-writer conflict, SAME AS data struct)
# 5. WR x, x|=A[s], RD A (A[s] (=) A, SAME AS data struct)
# WR x, x|=A[s], RD A[t] (A[s] (=) A[t] if s intersects t)
# 6. WR x, x|=A , RD A[s] (A[s] (=) A, SAME AS data struct)
# 7. WR x, x|=A[s], A |=y, RD y (mark A as writer and implicit constraint)
# WR x, x|=A[s], A |=y, WR y (detect 2-writer conflict)
# WR x, x|=A[s], A[t]|=y, RD y (mark A[t] as writer and implicit constraint if s intersects t)
# WR x, x|=A[s], A[t]|=y, WR y (detect 2-writer conflict if s intersects t)
# 8. WR x, x|=A , A[s]|=y, RD y (mark A[s] as writer in net {A[s],y}, SAME AS data struct)
# --------------------------------------------------------------------------
# RD A[s]
# - WR A (A[s] (=) A, SAME AS data struct)
# - WR A[t] (A[s] (=) A[t] if s intersects t)
# - A |=x, WR x (A[s] (=) A, SAME AS data struct)
# - A[t]|=x, WR x (A[s] (=) A[t] if s intersects t)
# WR A[s]
# - RD A (A[s] (=) A, SAME AS data struct)
# - WR A (detect 2-writer conflict, SAME AS data struct)
# - WR A[t] (detect 2-writer conflict if s intersects t)
# - A |=x (mark A as writer in net {A,x}, SAME AS data struct)
# - A |=x, WR x (detect 2-writer conflict, SAME AS data struct)
# - A[t]|=x (mark A[t] as writer in net {A[t],x} if s intersects t)
# - A[t]|=x, WR x (detect 2-writer conflict if s intersects t)
# A[s]|=x
# - WR A (mark A[s] as writer in net {A[s],x}, SAME AS data struct)
# - A|=y, WR y (mark A[s] as writer in net {A[s],x}, SAME AS data struct)
# - A[t]|=y, WR y (mark A[s] as writer in net {A[s],x}, if s intersects t)
# A[s]|=x, WR x
# - RD A (A[s] (=) A, SAME AS data struct)
# - WR A (detect 2-writer conflict, SAME AS data struct)
# - A |=y (mark A as writer in net {A,y} SAME AS data struct)
# - A |=y, WR y (detect 2-writer conflict, SAME AS data struct)
# - A[t]|=y, WR y (detect 2-writer conflict if s intersects t)
# RD A[s] - WR A
def test_rd_As_wr_A_impl():
class Top( ComponentLevel3 ):
def construct( s ):
s.A = Wire( Bits32 )
@s.update
def up_wr_A():
s.A = Bits32( 123 )
@s.update
def up_rd_As():
assert s.A[0:16] == 123
_test_model( Top )
# RD A[s] - WR A[t], intersect
def test_rd_As_wr_At_impl_intersect():
class Top( ComponentLevel3 ):
def construct( s ):
s.A = Wire( Bits32 )
@s.update
def up_wr_At():
s.A[8:24] = Bits16( 0xff )
@s.update
def up_rd_As():
assert s.A[0:16] == 0xff00
_test_model( Top )
# RD A[s] - WR A[t], not intersect
def test_rd_As_wr_At_impl_disjoint():
class Top( ComponentLevel3 ):
def construct( s ):
s.A = Wire( Bits32 )
@s.update
def up_wr_At():
s.A[16:32] = Bits16( 0xff )
@s.update
def up_rd_As():
assert s.A[0:16] == 0
m = Top()
m.elaborate()
simple_sim_pass( m, 0x123 )
# assert len(m._all_constraints) == 0 # no constraint at all!
# WR A[s] - WR A
def test_wr_As_wr_A_conflict():
class Top( ComponentLevel3 ):
def construct( s ):
s.A = Wire( Bits32 )
@s.update
def up_wr_As():
s.A[1:3] = Bits2( 2 )
@s.update
def up_wr_A():
s.A = Bits32( 123 )
try:
_test_model( Top )
except MultiWriterError as e:
print("{} is thrown\n{}".format( e.__class__.__name__, e ))
return
raise Exception("Should've thrown MultiWriterError.")
# WR A[s] - WR A[t], intersect
def test_wr_As_wr_At_intersect():
class Top( ComponentLevel3 ):
def construct( s ):
s.A = Wire( Bits32 )
@s.update
def up_wr_As():
s.A[1:3] = Bits2( 2 )
@s.update
def up_wr_At():
s.A[2:4] = Bits2( 2 )
@s.update
def up_rd_A():
z = s.A
try:
_test_model( Top )
except MultiWriterError as e:
print("{} is thrown\n{}".format( e.__class__.__name__, e ))
return
raise Exception("Should've thrown MultiWriterError.")
# WR A[s] - WR A[t], not intersect
def test_wr_As_wr_At_disjoint():
class Top( ComponentLevel3 ):
def construct( s ):
s.A = Wire( Bits32 )
@s.update
def up_wr_As():
s.A[1:3] = Bits2( 2 )
@s.update
def up_wr_At():
s.A[5:7] = Bits2( 2 )
@s.update
def up_rd_A():
z = s.A
_test_model( Top )
# WR A[s] - RD A
def test_wr_As_rd_A_impl():
class Top( ComponentLevel3 ):
def construct( s ):
s.A = Wire( Bits32 )
@s.update
def up_wr_As():
s.A[1:3] = Bits2( 2 )
@s.update
def up_rd_A():
z = s.A
_test_model( Top )
# WR A[s] - RD A, RD A[t], intersect
def test_wr_As_rd_A_rd_At_can_schedule():
class Top( ComponentLevel3 ):
def construct( s ):
s.A = Wire( Bits32 )
@s.update
def up_wr_As():
s.A[1:3] = Bits2( 2 )
@s.update
def up_rd_A():
z = s.A
@s.update
def up_rd_As():
assert s.A[2:4] == 1
_test_model( Top )
# WR A[s] - RD A, RD A[t], not intersect
def test_wr_As_rd_A_rd_At_cannot_schedule():
class Top( ComponentLevel3 ):
def construct( s ):
s.A = Wire( Bits32 )
@s.update
def up_wr_As():
s.A[1:3] = Bits2( 2 )
@s.update
def up_rd_A():
z = s.A
@s.update
def up_rd_At():
assert s.A[3:5] == 0
m = Top()
m.elaborate()
simple_sim_pass( m, 0x123 )
# assert len(m._all_constraints) == 1
# x, y = list(m._all_constraints)[0]
# assert m._all_id_upblk[x].__name__ == "up_wr_As" and \
# m._all_id_upblk[y].__name__ == "up_rd_A" # only one constraint
# WR A - RD A[s], RD A[t]
def test_wr_A_rd_slices_can_schedule():
class Top( ComponentLevel3 ):
def construct( s ):
s.A = Wire( Bits32 )
@s.update
def up_wr_A():
s.A = Bits32( 0x12345678 )
@s.update
def up_rd_As():
assert s.A[0:16] == 0x5678
@s.update
def up_rd_At():
assert s.A[8:24] == 0x3456
_test_model( Top )
# WR A[s] - RD A, RD A[t], not intersect
def test_wr_As_rd_A_rd_At_bit_cannot_schedule():
class Top( ComponentLevel3 ):
def construct( s ):
s.A = Wire( Bits32 )
@s.update
def up_wr_As():
s.A[0:16] = Bits16( 0x1234 )
@s.update
def up_rd_A():
z = s.A
@s.update
def up_rd_At():
assert s.A[16] == 0
m = Top()
m.elaborate()
simple_sim_pass( m, 0x123 )
# assert len(m._all_constraints) == 1
# x, y = list(m._all_constraints)[0]
# assert m._all_id_upblk[x].__name__ == "up_wr_As" and \
# m._all_id_upblk[y].__name__ == "up_rd_A" # only one constraint
# RD A[s] - A|=x, WR x
def test_connect_rd_As_wr_x_conn_A_impl():
class Top( ComponentLevel3 ):
def construct( s ):
s.x = Wire( Bits32 )
s.A = Wire( Bits32 )
connect( s.A, s.x )
@s.update
def up_wr_x():
s.x = Bits32( 123 )
@s.update
def up_rd_As():
assert s.A[0:16] == 123
_test_model( Top )
# RD A[s] - A[t]|=x, WR x, intersect
def test_connect_rd_As_wr_x_conn_At_impl():
class Top( ComponentLevel3 ):
def construct( s ):
s.x = Wire( Bits24 )
s.A = Wire( Bits32 )
connect( s.A[0:24], s.x )
@s.update
def up_wr_x():
s.x = Bits24( 0x123456 )
@s.update
def up_rd_As():
assert s.A[0:16] == 0x3456
_test_model( Top )
# RD A[s] - A[t]|=x, WR x, not intersect
def test_connect_rd_As_wr_x_conn_At_disjoint():
class Top( ComponentLevel3 ):
def construct( s ):
s.x = Wire( Bits24 )
s.A = Wire( Bits32 )
connect( s.A[0:24], s.x )
@s.update
def up_wr_x():
s.x = Bits24( 0x123456 )
@s.update
def up_rd_As():
assert s.A[24:32] == 0
m = Top()
m.elaborate()
simple_sim_pass( m, 0x123 )
# assert len(m._all_constraints) == 1
# x, y = list(m._all_constraints)[0]
# assert m._all_id_upblk[x].__name__ == "up_wr_x" and \
# m._all_id_upblk[y].__name__ == "s_x__1" # connection block
# WR A[s] - A|=x
def test_connect_wr_As_rd_x_conn_A_mark_writer():
class Top( ComponentLevel3 ):
def construct( s ):
s.x = Wire( Bits32 )
s.A = Wire( Bits32 )
connect( s.x, s.A )
@s.update
def up_wr_As():
s.A[0:24] = Bits24( 0x123456 )
_test_model( Top )
# WR A[s] - A|=x, WR x
def test_connect_wr_As_wr_x_conn_A_conflict():
class Top( ComponentLevel3 ):
def construct( s ):
s.x = Wire( Bits32 )
s.A = Wire( Bits32 )
connect( s.x, s.A )
@s.update
def up_wr_As():
s.A[0:24] = Bits24( 0x123456 )
@s.update
def up_wr_x():
s.x = Bits32( 0x87654321 )
try:
_test_model( Top )
except MultiWriterError as e:
print("{} is thrown\n{}".format( e.__class__.__name__, e ))
return
raise Exception("Should've thrown MultiWriterError.")
# WR A[s] - A[t]|=x, intersect
def test_connect_wr_As_rd_x_conn_At_mark_writer():
class Top( ComponentLevel3 ):
def construct( s ):
s.x = Wire( Bits24 )
s.A = Wire( Bits32 )
connect( s.x, s.A[8:32] )
@s.update
def up_wr_As():
s.A[0:24] = Bits24( 0x123456 )
_test_model( Top )
# WR A[s] - A[t]|=x, not intersect
def test_connect_wr_As_rd_x_conn_At_no_driver():
class Top( ComponentLevel3 ):
def construct( s ):
s.x = Wire( Bits24 )
s.A = Wire( Bits32 )
connect( s.x, s.A[8:32] )
@s.update
def up_wr_As():
s.A[0:4] = Bits4( 0xf )
try:
_test_model( Top )
except NoWriterError as e:
print("{} is thrown\n{}".format( e.__class__.__name__, e ))
return
raise Exception("Should've thrown NoWriterError.")
# WR A[s] - A[t]|=x, WR x, intersect
def test_connect_wr_As_wr_x_conn_At_conflict():
class Top( ComponentLevel3 ):
def construct( s ):
s.x = Wire( Bits24 )
s.A = Wire( Bits32 )
connect( s.x, s.A[8:32] )
@s.update
def up_wr_As():
s.A[0:24] = Bits24( 0x123456 )
@s.update
def up_wr_x():
s.x = Bits24( 0x654321 )
try:
_test_model( Top )
except MultiWriterError as e:
print("{} is thrown\n{}".format( e.__class__.__name__, e ))
return
raise Exception("Should've thrown MultiWriterError.")
# WR A[s] - A[t]|=x, WR x, not intersect
def test_connect_wr_As_wr_x_conn_At_disjoint():
class Top( ComponentLevel3 ):
def construct( s ):
s.x = Wire( Bits24 )
s.A = Wire( Bits32 )
connect( s.x, s.A[8:32] )
@s.update
def up_wr_As():
s.A[0:4] = Bits4( 0xf )
@s.update
def up_wr_x():
s.x = Bits24( 0x654321 )
@s.update
def up_rd_A():
assert s.A == 0x6543210f
_test_model( Top )
# A[s]|=x, WR x - RD A
def test_connect_wr_x_conn_As_rd_A_impl():
class Top( ComponentLevel3 ):
def construct( s ):
s.x = Wire( Bits24 )
s.A = Wire( Bits32 )
connect( s.A[8:32], s.x )
@s.update
def up_wr_x():
s.x = Bits24( 0x123456 )
@s.update
def up_rd_A():
assert s.A == 0x12345600
_test_model( Top )
# A[s]|=x, WR x - WR A
def test_connect_wr_x_conn_As_wr_A_conflict():
class Top( ComponentLevel3 ):
def construct( s ):
s.x = Wire( Bits24 )
s.y = Wire( Bits24 )
s.A = Wire( Bits32 )
connect( s.A[8:32], s.x )
connect( s.x, s.y )
@s.update
def up_wr_x():
s.x = Bits24( 0x123456 )
@s.update
def up_wr_A():
s.A = Bits32( 0x12345678 )
try:
_test_model( Top )
except MultiWriterError as e:
print("{} is thrown\n{}".format( e.__class__.__name__, e ))
return
raise Exception("Should've thrown MultiWriterError.")
# A[s]|=x - WR A
def test_connect_rd_x_conn_As_wr_A_mark_writer():
class Top( ComponentLevel3 ):
def construct( s ):
s.x = Wire( Bits24 )
s.A = Wire( Bits32 )
connect( s.A[8:32], s.x )
@s.update
def up_wr_A():
s.A = Bits32( 0x12345678 )
@s.update
def up_rd_x():
assert s.x == 0x123456
_test_model( Top )
# A[s]|=x, WR x - A|=y, WR y
def test_connect_wr_x_conn_As_wr_y_conn_A_conflict():
class Top( ComponentLevel3 ):
def construct( s ):
s.x = Wire( Bits24 )
s.A = Wire( Bits32 )
s.y = Wire( Bits32 )
connect( s.A[8:32], s.x )
connect( s.A , s.y )
@s.update
def up_wr_x():
s.x = Bits24( 0x123456 )
@s.update
def up_wr_y():
s.y = Bits32( 0x12345678 )
try:
_test_model( Top )
except MultiWriterError as e:
print("{} is thrown\n{}".format( e.__class__.__name__, e ))
return
raise Exception("Should've thrown MultiWriterError.")
# A[s]|=x, WR x - A[t]|=y, WR y, intersect
def test_connect_wr_x_conn_As_wr_y_conn_At_conflict():
class Top( ComponentLevel3 ):
def construct( s ):
s.x = Wire( Bits24 )
s.A = Wire( Bits32 )
s.y = Wire( Bits16 )
connect( s.A[8:32], s.x )
connect( s.A[0:16], s.y )
@s.update
def up_wr_x():
s.x = Bits24( 0x123456 )
@s.update
def up_wr_y():
s.y = Bits16( 0x1234 )
try:
_test_model( Top )
except MultiWriterError as e:
print("{} is thrown\n{}".format( e.__class__.__name__, e ))
return
raise Exception("Should've thrown MultiWriterError.")
# A[s]|=x, WR x - A[t]|=y, WR y, not intersect
def test_connect_wr_x_conn_As_wr_y_conn_At_disjoint():
class Top( ComponentLevel3 ):
def construct( s ):
s.x = Wire( Bits24 )
s.A = Wire( Bits32 )
s.y = Wire( Bits4 )
connect( s.A[8:32], s.x )
connect( s.A[0:4], s.y )
@s.update
def up_wr_x():
s.x = Bits24( 0x123456 )
@s.update
def up_wr_y():
s.y = Bits4( 0xf )
@s.update
def up_rd_A():
assert s.A == 0x1234560f
_test_model( Top )
# A[s]|=x, WR x - A|=y, RD y
def test_connect_wr_x_conn_As_rd_y_conn_A_mark_writer():
class Top( ComponentLevel3 ):
def construct( s ):
s.x = Wire( Bits24 )
s.A = Wire( Bits32 )
s.y = Wire( Bits32 )
connect( s.A[8:32], s.x )
connect( s.A, s.y )
@s.update
def up_wr_x():
s.x = Bits24( 0x123456 )
@s.update
def up_rd_y():
assert s.y == 0x12345600
_test_model( Top )
# A[s]|=x - A|=y, WR y
def test_connect_rd_x_conn_As_wr_y_conn_A_mark_writer():
class Top( ComponentLevel3 ):
def construct( s ):
s.x = Wire( Bits24 )
s.A = Wire( Bits32 )
s.y = Wire( Bits32 )
connect( s.A[8:32], s.x )
connect( s.A, s.y )
@s.update
def up_rd_x():
assert s.x == 0x123456
@s.update
def up_wr_y():
s.y = Bits32( 0x12345678 )
_test_model( Top )
# A[s]|=x - A[t]|=y, WR y, intersect
def test_connect_rd_x_conn_As_wr_y_conn_At_mark_writer():
class Top( ComponentLevel3 ):
def construct( s ):
s.x = Wire( Bits24 )
s.A = Wire( Bits32 )
s.y = Wire( Bits16 )
connect( s.A[8:32], s.x )
connect( s.A[0:16], s.y )
@s.update
def up_rd_x():
assert s.x == 0x12
@s.update
def up_wr_y():
s.y = Bits16( 0x1234 )
_test_model( Top )
# A[s]|=x - A[t]|=y, WR y, not intersect
def test_connect_rd_x_conn_As_wr_y_conn_no_driver():
class Top( ComponentLevel3 ):
def construct( s ):
s.x = Wire( Bits24 )
s.A = Wire( Bits32 )
s.y = Wire( Bits4 )
connect( s.A[8:32], s.x )
connect( s.A[0:4 ], s.y )
@s.update
def up_rd_x():
assert s.x == 0
@s.update
def up_wr_y():
s.y = Bits4( 0xf )
try:
_test_model( Top )
except NoWriterError as e:
print("{} is thrown\n{}".format( e.__class__.__name__, e ))
return
raise Exception("Should've thrown NoWriterError.")
def test_iterative_find_nets():
class Top( ComponentLevel3 ):
def construct( s ):
s.w = Wire( Bits32 )
s.x = Wire( Bits32 )
s.y = Wire( Bits32 )
s.z = Wire( Bits32 )
connect( s.w[0:16], s.x[8:24] ) # net1
connect( s.x[16:32], s.y[0:16] ) # net2
connect( s.y[8:24], s.z[0:16] ) # net3
@s.update
def up_wr_s_w():
s.w = Bits32( 0x12345678 )
_test_model( Top )
def test_multiple_sibling_slices():
class Top( ComponentLevel3 ):
def construct( s ):
s.A = Wire( Bits32 )
s.x = Wire( Bits16 )
s.y = Wire( Bits16 )
s.z = Wire( Bits16 )
connect( s.A[0:16], s.x ) # net1
connect( s.A[8:24], s.y ) # net2
connect( s.A[16:32], s.z ) # net3
@s.update
def up_wr_s_w():
s.x = Bits16( 0x1234 )
try:
_test_model( Top )
except NoWriterError as e:
print("{} is thrown\n{}".format( e.__class__.__name__, e ))
return
raise Exception("Should've thrown NoWriterError.")
def test_multiple_write_same_slice():
class A( ComponentLevel3 ):
def construct( s ):
s.out = Wire( Bits32 )
s.wire = Wire( Bits32 )
connect( s.out, s.wire[0:32] )
@s.update
def comb_upblk():
s.wire[0:16] = 0
s.wire[16:32] = 1
a = A()
a.elaborate()
def test_multiple_write_same_slice_with_overlap():
class A( ComponentLevel3 ):
def construct( s ):
s.out = Wire( Bits32 )
s.wire = Wire( Bits32 )
connect( s.out, s.wire[0:32] )
s.wire2 = Wire( Bits24 )
connect( s.wire2[0:24], s.wire[8:32] )
@s.update
def comb_upblk():
s.wire[0:16] = 0
s.wire2[0:24] = 1
try:
_test_model( A )
except MultiWriterError as e:
print("{} is thrown\n{}".format( e.__class__.__name__, e ))
return
raise Exception("Should've thrown MultiWriterError.")
| 22.621227 | 97 | 0.548769 | 3,745 | 23,232 | 3.19279 | 0.047263 | 0.027599 | 0.073597 | 0.086309 | 0.889604 | 0.867609 | 0.844861 | 0.824956 | 0.799699 | 0.769758 | 0 | 0.062738 | 0.300189 | 23,232 | 1,026 | 98 | 22.643275 | 0.672715 | 0.229425 | 0 | 0.810726 | 0 | 0 | 0.036048 | 0 | 0 | 0 | 0.022551 | 0 | 0.047319 | 1 | 0.266562 | false | 0.012618 | 0.007886 | 0 | 0.358044 | 0.023659 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
375cd6b61247d348f26cd7169226ee4d1a806787 | 4,908 | py | Python | tests/test_consumer.py | trellis-ldp/py-ldn | 3e4c5709beadf9e31b38e46bf30923f8267aad5b | [
"Apache-2.0"
] | 1 | 2017-02-25T22:10:49.000Z | 2017-02-25T22:10:49.000Z | tests/test_consumer.py | trellis-ldp/py-ldn | 3e4c5709beadf9e31b38e46bf30923f8267aad5b | [
"Apache-2.0"
] | null | null | null | tests/test_consumer.py | trellis-ldp/py-ldn | 3e4c5709beadf9e31b38e46bf30923f8267aad5b | [
"Apache-2.0"
] | null | null | null | import json
import unittest
from unittest.mock import Mock, patch
from ldnlib import Consumer
class TestConsumer(unittest.TestCase):
@patch('requests.get')
def test_notifications_ntriples(self, mock_get):
mock_res = Mock()
mock_res.headers = {"content-type": "application/n-triples"}
with open("tests/inbox.nt", "r") as f:
mock_res.text = f.read()
mock_get.return_value = mock_res
notifications = Consumer().notifications("http://example.org/inbox")
self.assertEqual(5, len(notifications))
self.assertTrue("http://example.org/inbox/1" in notifications)
self.assertTrue("http://example.org/inbox/2" in notifications)
self.assertTrue("http://example.org/inbox/3" in notifications)
self.assertTrue("http://example.org/inbox/4" in notifications)
self.assertTrue("http://example.org/inbox/5" in notifications)
@patch('requests.get')
def test_notifications_jsonld_compacted(self, mock_get):
mock_res = Mock()
mock_res.headers = {"content-type": "application/ld+json"}
with open("tests/inbox.jsonld", "r") as f:
mock_res.text = f.read()
mock_get.return_value = mock_res
notifications = Consumer().notifications("http://example.org/inbox")
self.assertEqual(5, len(notifications))
self.assertTrue("http://example.org/inbox/1" in notifications)
self.assertTrue("http://example.org/inbox/2" in notifications)
self.assertTrue("http://example.org/inbox/3" in notifications)
self.assertTrue("http://example.org/inbox/4" in notifications)
self.assertTrue("http://example.org/inbox/5" in notifications)
@patch('requests.get')
def test_notifications_jsonld_expanded(self, mock_get):
mock_res = Mock()
mock_res.headers = {"content-type": "application/ld+json"}
with open("tests/inbox_expanded.jsonld", "r") as f:
mock_res.text = f.read()
mock_get.return_value = mock_res
notifications = Consumer().notifications("http://example.org/inbox")
self.assertEqual(5, len(notifications))
self.assertTrue("http://example.org/inbox/1" in notifications)
self.assertTrue("http://example.org/inbox/2" in notifications)
self.assertTrue("http://example.org/inbox/3" in notifications)
self.assertTrue("http://example.org/inbox/4" in notifications)
self.assertTrue("http://example.org/inbox/5" in notifications)
@patch('requests.get')
def test_notifications_turtle(self, mock_get):
mock_res = Mock()
mock_res.headers = {"content-type": "text/turtle; charset=utf-8"}
with open("tests/inbox.ttl", "r") as f:
mock_res.text = f.read()
mock_get.return_value = mock_res
notifications = Consumer().notifications("http://example.org/inbox")
self.assertEqual(5, len(notifications))
self.assertTrue("http://example.org/inbox/1" in notifications)
self.assertTrue("http://example.org/inbox/2" in notifications)
self.assertTrue("http://example.org/inbox/3" in notifications)
self.assertTrue("http://example.org/inbox/4" in notifications)
self.assertTrue("http://example.org/inbox/5" in notifications)
@patch('requests.get')
def test_notification_turtle(self, mock_get):
mock_res = Mock()
mock_res.headers = {"content-type": "text/turtle; charset=utf-8"}
with open("tests/notification.ttl", "r") as f:
mock_res.text = f.read()
mock_get.return_value = mock_res
notification = Consumer().notification("http://example.org/inbox/1")
self.assertTrue(1, len(notification))
self.assertTrue("@id" in notification[0])
self.assertEqual("http://example.org/inbox/1", notification[0]["@id"])
prefLabel = "http://www.w3.org/2004/02/skos/core#prefLabel"
self.assertTrue(prefLabel in notification[0])
self.assertEqual("First notification",
notification[0][prefLabel][0]["@value"])
@patch('requests.get')
def test_notification_jsonld(self, mock_get):
mock_res = Mock()
mock_res.headers = {"content-type": "application/ld+json"}
with open("tests/notification1.json", "r") as f:
attrs = {'json.return_value': json.loads("[" + f.read() + "]")}
mock_res.configure_mock(**attrs)
mock_get.return_value = mock_res
notification = Consumer().notification("http://example.org/inbox/1")
self.assertTrue(1, len(notification))
self.assertTrue("@id" in notification[0])
self.assertEqual("http://example.org/inbox/1", notification[0]["@id"])
self.assertTrue("creator" in notification[0])
self.assertEqual("http://example.org/user",
notification[0]["creator"])
| 41.59322 | 78 | 0.647718 | 599 | 4,908 | 5.208681 | 0.125209 | 0.102244 | 0.130128 | 0.170513 | 0.861218 | 0.851603 | 0.825 | 0.825 | 0.810897 | 0.810897 | 0 | 0.012529 | 0.203138 | 4,908 | 117 | 79 | 41.948718 | 0.785221 | 0 | 0 | 0.703297 | 0 | 0 | 0.256112 | 0.019152 | 0 | 0 | 0 | 0 | 0.373626 | 1 | 0.065934 | false | 0 | 0.043956 | 0 | 0.120879 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
376333a72f8b3139dd642e8430adb5f1442a8313 | 113,048 | py | Python | wordnik/_methods.py | zeke/wordnik-python | 6eccc5119ecee2c503911307608650d65eb94d3d | [
"PSF-2.0"
] | 1 | 2021-01-05T17:35:20.000Z | 2021-01-05T17:35:20.000Z | wordnik/_methods.py | zeke/wordnik-python | 6eccc5119ecee2c503911307608650d65eb94d3d | [
"PSF-2.0"
] | null | null | null | wordnik/_methods.py | zeke/wordnik-python | 6eccc5119ecee2c503911307608650d65eb94d3d | [
"PSF-2.0"
] | 1 | 2016-06-12T06:12:02.000Z | 2016-06-12T06:12:02.000Z | api_methods = {'account': {u'endPoints': [{u'description': u'',
u'operations': [{u'httpMethod': u'GET',
u'open': False,
u'parameters': [{u'description': u'A confirmed Wordnik username',
u'name': u'username',
u'paramType': u'path',
u'required': True},
{u'description': u"The user's password",
u'name': u'password',
u'paramType': u'query',
u'required': True},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 403,
u'reason': u'Account not available.'},
{u'code': 404,
u'reason': u'User not found.'}],
u'occurs': u'1',
u'valueType': u'AuthenticationToken'}],
u'summary': u'Authenticates a User'}],
u'path': u'/account.{format}/authenticate/{username}'},
{u'description': u'',
u'operations': [{u'httpMethod': u'GET',
u'open': True,
u'parameters': [{u'description': u'Wordnik authentication token',
u'name': u'api_key',
u'paramType': u'header',
u'required': True},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'No token supplied.'},
{u'code': 404,
u'reason': u'No API account with supplied token.'}],
u'occurs': u'1',
u'valueType': u'ApiTokenStatus'}],
u'summary': u'Returns usage statistics for the API account.'}],
u'path': u'/account.{format}/apiTokenStatus'},
{u'description': u'',
u'operations': [{u'httpMethod': u'GET',
u'open': False,
u'parameters': [{u'description': u'Username',
u'name': u'username',
u'paramType': u'path',
u'required': True},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid username supplied.'},
{u'code': 404,
u'reason': u'No activation code available.'}],
u'occurs': u'1',
u'valueType': u'ApiResponse'}],
u'summary': u'Returns an ApiResponse indicating whether or not a username is available'}],
u'path': u'/account.{format}/usernameAvailable/{username}'},
{u'description': u'',
u'operations': [{u'httpMethod': u'GET',
u'open': False,
u'parameters': [{u'description': u'API Key',
u'name': u'api_key',
u'paramType': u'header',
u'required': True},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid token supplied.'}],
u'occurs': u'1',
u'valueType': u'void'}],
u'summary': u'Regenerates an API Token. Currently not supported or tested.'}],
u'path': u'/account.{format}/regenerateApiToken'},
{u'description': u'',
u'operations': [{u'httpMethod': u'POST',
u'open': False,
u'parameters': [{u'description': u'A confirmed Wordnik username',
u'name': u'username',
u'paramType': u'path',
u'required': True},
{u'description': u"The user's password",
u'paramType': u'body',
u'required': True},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 403,
u'reason': u'Account not available.'},
{u'code': 404,
u'reason': u'User not found.'}],
u'occurs': u'1',
u'valueType': u'AuthenticationToken'}],
u'summary': u'Authenticates a user'}],
u'path': u'/account.{format}/authenticate/{username}'},
{u'description': u'',
u'operations': [{u'httpMethod': u'GET',
u'notes': u'Requires a valid auth_token to be set.',
u'open': False,
u'parameters': [{u'description': u'The auth token of the logged-in user, obtained by calling /account.{format}/authenticate/{username} (described above)',
u'name': u'auth_token',
u'paramType': u'header',
u'required': True},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 403,
u'reason': u'Not logged in.'},
{u'code': 404,
u'reason': u'User not found.'}],
u'occurs': u'1',
u'valueType': u'User'}],
u'summary': u'Returns the logged-in User'}],
u'path': u'/account.{format}/user'}]},
'user': {u'endPoints': [{u'description': u'',
u'operations': [{u'httpMethod': u'GET',
u'open': True,
u'parameters': [{u'description': u'Username of the WordOfTheDay owner',
u'name': u'username',
u'paramType': u'path',
u'required': True},
{u'description': u'Date of the WordOfTheDay to retrieve (yyyy-MM-dd) format',
u'name': u'date',
u'paramType': u'path',
u'required': True},
{u'description': u'Include WordOfTheDay items for future dates (owner-only)',
u'name': u'includeAll',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid username or dateString supplied'},
{u'code': 404,
u'reason': u'User not found or no list available'}],
u'occurs': u'1',
u'valueType': u'WordOfTheDay'}],
u'summary': u'Returns the WordOfTheDay for a given user on a given date'}],
u'path': u'/user.{format}/{username}/wordOfTheDay/{date}'},
{u'description': u'',
u'operations': [{u'httpMethod': u'GET',
u'open': True,
u'parameters': [{u'description': u'Username of the WordOfTheDayList owner',
u'name': u'username',
u'paramType': u'path',
u'required': True},
{u'description': u'Include future words (owner-only)',
u'name': u'includeAll',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid username supplied'},
{u'code': 404,
u'reason': u'No WordOfTheDayList available'}],
u'occurs': u'1',
u'valueType': u'WordOfTheDayList'}],
u'summary': u"Returns a user's WordOfTheDayList"}],
u'path': u'/user.{format}/{username}/wordOfTheDayList'},
{u'description': u'',
u'operations': [{u'httpMethod': u'PUT',
u'open': True,
u'parameters': [{u'description': u'Username of the WordOfTheDayList owner',
u'name': u'username',
u'paramType': u'path',
u'required': True},
{u'description': u'Updated WordOfTheDayList data in the format specified by the URL',
u'paramType': u'body',
u'required': True},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid username supplied'},
{u'code': 403,
u'reason': u'Not authorized to perform update'},
{u'code': 404,
u'reason': u'No WordOfTheDayList available'}],
u'occurs': u'1',
u'valueType': u'void'}],
u'summary': u"Updates a user's WordOfTheDayList"}],
u'path': u'/user.{format}/{username}/wordOfTheDayList'},
{u'description': u'',
u'operations': [{u'httpMethod': u'POST',
u'notes': u'A user can have only one WordOfTheDayList.',
u'open': True,
u'parameters': [{u'description': u'Username of the WordOfTheDayList owner',
u'name': u'username',
u'paramType': u'path',
u'required': True},
{u'description': u'WordOfTheDayList to create, provided in the format specified by the URL',
u'paramType': u'body',
u'required': True},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'User already has a list or list is invalid'},
{u'code': 404,
u'reason': u'User not found'}],
u'occurs': u'1',
u'valueType': u'WordOfTheDayList'}],
u'summary': u'Creates a WordOfTheDayList'}],
u'path': u'/user.{format}/{username}/wordOfTheDayList'},
{u'description': u'',
u'operations': [{u'httpMethod': u'PUT',
u'open': True,
u'parameters': [{u'description': u'Username of the WordOfTheDayList owner',
u'name': u'username',
u'paramType': u'path',
u'required': True},
{u'description': u'Permalink of the WordOfTheDayList to add the WordOfTheDay to',
u'name': u'permalink',
u'paramType': u'path',
u'required': True},
{u'description': u'WordOfTheDay to add, in the format specified by the URL',
u'paramType': u'body',
u'required': True},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'WordOfTheDay already scheduled on this date'},
{u'code': 403,
u'reason': u'Not authorized to perform modification'},
{u'code': 404,
u'reason': u'No WordOfTheDayList available to add to'}],
u'occurs': u'1',
u'valueType': u'void'}],
u'summary': u"Adds a WordOfTheDay to a user's WordOfTheDayList"}],
u'path': u'/user.{format}/{username}/wordOfTheDayList/{permalink}'},
{u'description': u'',
u'operations': [{u'httpMethod': u'DELETE',
u'open': True,
u'parameters': [{u'description': u'Username of the WordOfTheDayList owner',
u'name': u'username',
u'paramType': u'path',
u'required': True},
{u'description': u'Permalink of WordOfTheDayList to delete',
u'name': u'permalink',
u'paramType': u'path',
u'required': True},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid username supplied'},
{u'code': 403,
u'reason': u'Not authorized to perform update'},
{u'code': 404,
u'reason': u'No WordOfTheDayList available'}],
u'occurs': u'1',
u'valueType': u'void'}],
u'summary': u"Deletes a user's WordOfTheDayList"}],
u'path': u'/user.{format}/{username}/wordOfTheDayList/{permalink}'},
{u'description': u'',
u'operations': [{u'httpMethod': u'DELETE',
u'open': True,
u'parameters': [{u'description': u'Username of the WordOfTheDayList owner',
u'name': u'username',
u'paramType': u'path',
u'required': True},
{u'description': u'Permalink of WordOfTheDayList to delete a word from',
u'name': u'permalink',
u'paramType': u'path',
u'required': True},
{u'description': u'Word to delete',
u'name': u'wordToDelete',
u'paramType': u'path',
u'required': True},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'WordOfTheDay already scheduled on this date'},
{u'code': 403,
u'reason': u'Not authorized to perform modification'},
{u'code': 404,
u'reason': u'No WordOfTheDayList available to add to'}],
u'occurs': u'1',
u'valueType': u'void'}],
u'summary': u"Deletes a specific word from a user's WordOfTheDayList"}],
u'path': u'/user.{format}/{username}/wordOfTheDayList/{permalink}/{wordToDelete}'},
{u'description': u'',
u'operations': [{u'httpMethod': u'PUT',
u'open': True,
u'parameters': [{u'description': u'Username of the WordOfTheDayList owner',
u'name': u'username',
u'paramType': u'path',
u'required': True},
{u'description': u'WordOfTheDayList to modify',
u'name': u'permalink',
u'paramType': u'path',
u'required': True},
{u'description': u'WordOfTheDay to add',
u'paramType': u'body',
u'required': True},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'WordOfTheDay already scheduled on this date'},
{u'code': 403,
u'reason': u'Not authorized to perform modification'},
{u'code': 404,
u'reason': u'No WordOfTheDayList available to add to'}],
u'occurs': u'1',
u'valueType': u'void'}],
u'summary': u"Adds an item to a user's WordOfTheDayList"}],
u'path': u'/user.{format}/{username}/wordOfTheDayList/{permalink}/add'}]},
'word': {u'endPoints': [{u'description': u'',
u'operations': [{u'httpMethod': u'GET',
u'open': False,
u'parameters': [{u'description': u'String value of WordObject to return',
u'name': u'word',
u'paramType': u'path',
u'required': True},
{u'description': u"If true will try to return the correct word root ('cats' -> 'cat'). If false returns exactly what was requested.",
u'name': u'useCanonical',
u'paramType': u'query',
u'required': False},
{u'description': u'Return suggestions (for correct spelling, case variants, etc.)',
u'name': u'includeSuggestions',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid word supplied.'}],
u'occurs': u'1',
u'valueType': u'Word'}],
u'summary': u'Given a word as a string, returns the WordObject that represents it'}],
u'path': u'/word.{format}/{word}'},
{u'description': u'',
u'operations': [{u'httpMethod': u'GET',
u'open': False,
u'parameters': [{u'description': u'Word to return examples for',
u'name': u'word',
u'paramType': u'path',
u'required': True},
{u'description': u'Maximum number of results to return',
u'name': u'limit',
u'paramType': u'query',
u'required': False},
{u'description': u'Show duplicate examples from different sources',
u'name': u'includeDuplicates',
u'paramType': u'query',
u'required': False},
{u'description': u'Return results from a specific ContentProvider',
u'name': u'contentProvider',
u'paramType': u'query',
u'required': False},
{u'description': u"If true will try to return the correct word root ('cats' -> 'cat'). If false returns exactly what was requested.",
u'name': u'useCanonical',
u'paramType': u'query',
u'required': False},
{u'description': u'Results to skip',
u'name': u'skip',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid word supplied.'}],
u'occurs': u'1',
u'valueType': u'ExampleSearchResults'}],
u'summary': u'Returns examples for a word'}],
u'path': u'/word.{format}/{word}/examples'},
{u'description': u'',
u'operations': [{u'httpMethod': u'GET',
u'open': False,
u'parameters': [{u'description': u'Word to return definitions for',
u'name': u'word',
u'paramType': u'path',
u'required': True},
{u'description': u'Maximum number of results to return',
u'name': u'limit',
u'paramType': u'query',
u'required': False},
{u'description': u'CSV list of part-of-speech types',
u'name': u'partOfSpeech',
u'paramType': u'query',
u'required': False},
{u'description': u'Return related words with definitions',
u'name': u'includeRelated',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'ahd, century, wiktionary, webster, wordnet',
u'description': u'Gets from dictionaries in the supplied order of precedence',
u'name': u'sourceDictionaries',
u'paramType': u'query',
u'required': False},
{u'description': u"If true will try to return the correct word root ('cats' -> 'cat'). If false returns exactly what was requested.",
u'name': u'useCanonical',
u'paramType': u'query',
u'required': False},
{u'description': u'Return a closed set of XML tags in response',
u'name': u'includeTags',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid word supplied.'},
{u'code': 404,
u'reason': u'No definitions found.'}],
u'occurs': u'1',
u'valueType': u'Definition[]'}],
u'summary': u'Return definitions for a word'}],
u'path': u'/word.{format}/{word}/definitions'},
{u'description': u'',
u'operations': [{u'httpMethod': u'GET',
u'open': False,
u'parameters': [{u'description': u'Word to return',
u'name': u'word',
u'paramType': u'path',
u'required': True},
{u'description': u"If true will try to return the correct word root ('cats' -> 'cat'). If false returns exactly what was requested.",
u'name': u'useCanonical',
u'paramType': u'query',
u'required': False},
{u'description': u'Starting Year',
u'name': u'startYear',
u'paramType': u'query',
u'required': False},
{u'description': u'Ending Year',
u'name': u'endYear',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid word supplied.'},
{u'code': 404,
u'reason': u'No results.'}],
u'occurs': u'1',
u'valueType': u'FrequencySummary'},
{u'condition': u'errorBars=true',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid word supplied.'},
{u'code': 404,
u'reason': u'No results.'}],
u'occurs': u'1',
u'valueType': u'FrequencySummaryWithErrorBars'}],
u'summary': u'Returns word usage over time'}],
u'path': u'/word.{format}/{word}/frequency'},
{u'description': u'',
u'operations': [{u'httpMethod': u'GET',
u'open': False,
u'parameters': [{u'description': u'Word to fetch examples for',
u'name': u'word',
u'paramType': u'path',
u'required': True},
{u'description': u'Return results from a specific ContentProvider',
u'name': u'contentProvider',
u'paramType': u'query',
u'required': False},
{u'description': u"If true will try to return the correct word root ('cats' -> 'cat'). If false returns exactly what was requested.",
u'name': u'useCanonical',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid word supplied.'}],
u'occurs': u'1',
u'valueType': u'Example'}],
u'summary': u'Returns a top example for a word'}],
u'path': u'/word.{format}/{word}/topExample'},
{u'description': u'',
u'operations': [{u'httpMethod': u'GET',
u'open': False,
u'parameters': [{u'description': u'Word for which to return related words',
u'name': u'word',
u'paramType': u'path',
u'required': True},
{u'description': u'CSV list of part-of-speech types',
u'name': u'partOfSpeech',
u'paramType': u'query',
u'required': False},
{u'description': u'Get data from a single dictionary. Valid options are ahd, century, wiktionary, webster, and wordnet.',
u'name': u'sourceDictionary',
u'paramType': u'query',
u'required': False},
{u'description': u'Maximum number of results to return',
u'name': u'limit',
u'paramType': u'query',
u'required': False},
{u'description': u"If true will try to return the correct word root ('cats' -> 'cat'). If false returns exactly what was requested.",
u'name': u'useCanonical',
u'paramType': u'query',
u'required': False},
{u'description': u'Relationship type',
u'name': u'type',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid word supplied.'},
{u'code': 404,
u'reason': u'No definitions found.'}],
u'occurs': u'1',
u'valueType': u'Related[]'}],
u'summary': u'Return related words (thesaurus data) for a word'}],
u'path': u'/word.{format}/{word}/related'},
{u'description': u'',
u'operations': [{u'httpMethod': u'GET',
u'open': False,
u'parameters': [{u'description': u'Word to fetch phrases for',
u'name': u'word',
u'paramType': u'path',
u'required': True},
{u'description': u'Maximum number of results to return',
u'name': u'limit',
u'paramType': u'query',
u'required': False},
{u'description': u'Minimum WLMI for the phrase',
u'name': u'wlmi',
u'paramType': u'query',
u'required': False},
{u'description': u"If true will try to return the correct word root ('cats' -> 'cat'). If false returns exactly what was requested.",
u'name': u'useCanonical',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid word supplied.'}],
u'occurs': u'1',
u'valueType': u'Bigram[]'}],
u'summary': u'Fetches bi-gram phrases for a word'}],
u'path': u'/word.{format}/{word}/phrases'},
{u'description': u'',
u'operations': [{u'httpMethod': u'GET',
u'open': False,
u'parameters': [{u'description': u'Word to get syllables for',
u'name': u'word',
u'paramType': u'path',
u'required': True},
{u'description': u"If true will try to return a correct word root ('cats' -> 'cat'). If false returns exactly what was requested.",
u'name': u'useCanonical',
u'paramType': u'query',
u'required': False},
{u'description': u'Get from a single dictionary. Valid options: ahd, century, wiktionary, webster, and wordnet.',
u'name': u'sourceDictionary',
u'paramType': u'query',
u'required': False},
{u'description': u'Maximum number of results to return',
u'name': u'limit',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid word supplied.'}],
u'occurs': u'1',
u'valueType': u'Syllable[]'}],
u'summary': u'Returns syllable information for a word'}],
u'path': u'/word.{format}/{word}/hyphenation'},
{u'description': u'',
u'operations': [{u'httpMethod': u'GET',
u'open': False,
u'parameters': [{u'description': u'Word to get pronunciations for',
u'name': u'word',
u'paramType': u'path',
u'required': True},
{u'description': u"If true will try to return a correct word root ('cats' -> 'cat'). If false returns exactly what was requested.",
u'name': u'useCanonical',
u'paramType': u'query',
u'required': False},
{u'description': u'Get from a single dictionary. Valid options: ahd, century, wiktionary, webster, and wordnet.',
u'name': u'sourceDictionary',
u'paramType': u'query',
u'required': False},
{u'description': u'Text pronunciation type',
u'name': u'typeFormat',
u'paramType': u'query',
u'required': False},
{u'description': u'Maximum number of results to return',
u'name': u'limit',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid word supplied.'}],
u'occurs': u'1',
u'valueType': u'TextPron[]'}],
u'summary': u'Returns text pronunciations for a given word'}],
u'path': u'/word.{format}/{word}/pronunciations'},
{u'description': u'',
u'operations': [{u'httpMethod': u'GET',
u'open': False,
u'parameters': [{u'description': u'Word to fetch forms for',
u'name': u'word',
u'paramType': u'path',
u'required': True},
{u'description': u"If true will try to return a correct word root ('cats' -> 'cat'). If false returns exactly what was requested.",
u'name': u'useCanonical',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid word supplied.'},
{u'code': 404,
u'reason': u'No results.'}],
u'occurs': u'1',
u'valueType': u'RelationshipMap'}],
u'summary': u'Returns other forms of a word'}],
u'path': u'/word.{format}/{word}/wordForms'},
{u'description': u'',
u'operations': [{u'httpMethod': u'GET',
u'notes': u'The metadata includes a time-expiring fileUrl which allows reading the audio file directly from the API. Currently only audio pronunciations from the American Heritage Dictionary in mp3 format are supported.',
u'open': False,
u'parameters': [{u'description': u'Word to get audio for.',
u'name': u'word',
u'paramType': u'path',
u'required': True},
{u'description': u'Use the canonical form of the word.',
u'name': u'useCanonical',
u'paramType': u'query',
u'required': False},
{u'description': u'Maximum number of results to return',
u'name': u'limit',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid word supplied.'}],
u'occurs': u'1',
u'valueType': u'AudioPron[]'}],
u'summary': u'Fetches audio metadata for a word.'}],
u'path': u'/word.{format}/{word}/audio'}]},
'wordList': {u'endPoints': [{u'description': u'',
u'operations': [{u'httpMethod': u'GET',
u'open': False,
u'parameters': [{u'description': u'ID of WordList to fetch',
u'name': u'wordListId',
u'paramType': u'path',
u'required': True},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid ID supplied'},
{u'code': 403,
u'reason': u'Not Authorized to access WordList'},
{u'code': 404,
u'reason': u'WordList not found'}],
u'occurs': u'1',
u'valueType': u'WordList'}],
u'summary': u'Fetches a WordList by ID'}],
u'path': u'/wordList.{format}/{wordListId}'},
{u'description': u'',
u'operations': [{u'httpMethod': u'GET',
u'open': False,
u'parameters': [{u'description': u'ID of WordList to use',
u'name': u'wordListId',
u'paramType': u'path',
u'required': True},
{u'allowableValues': u'createDate,alpha',
u'description': u'Field to sort by',
u'name': u'sortBy',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'asc,desc',
u'description': u'Direction to sort',
u'name': u'sortOrder',
u'paramType': u'query',
u'required': False},
{u'description': u'Results to skip',
u'name': u'skip',
u'paramType': u'query',
u'required': False},
{u'description': u'Maximum number of results to return',
u'name': u'limit',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid ID supplied'},
{u'code': 403,
u'reason': u'Not Authorized to access WordList'},
{u'code': 404,
u'reason': u'WordList not found'}],
u'occurs': u'1',
u'valueType': u'WordListWord[]'}],
u'summary': u'Fetches words in a WordList'}],
u'path': u'/wordList.{format}/{wordListId}/words'},
{u'description': u'',
u'operations': [{u'httpMethod': u'POST',
u'open': False,
u'parameters': [{u'description': u'ID of WordList to user',
u'name': u'wordListId',
u'paramType': u'path',
u'required': True},
{u'description': u'Words to add to WordList',
u'paramType': u'body',
u'required': False},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid ID supplied'},
{u'code': 403,
u'reason': u'Not Authorized to access WordList'},
{u'code': 404,
u'reason': u'WordList not found'}],
u'occurs': u'1',
u'valueType': u'void'}],
u'summary': u'Adds words to a WordList'}],
u'path': u'/wordList.{format}/{wordListId}/words'},
{u'description': u'',
u'operations': [{u'httpMethod': u'PUT',
u'open': False,
u'parameters': [{u'description': u'ID of WordList to update',
u'name': u'wordListId',
u'paramType': u'path',
u'required': True},
{u'description': u'Updated WordList',
u'paramType': u'body',
u'required': False},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid ID supplied'},
{u'code': 403,
u'reason': u'Not Authorized to update WordList'},
{u'code': 404,
u'reason': u'WordList not found'}],
u'occurs': u'1',
u'valueType': u'void'}],
u'summary': u'Updates an existing WordList'}],
u'path': u'/wordList.{format}/{wordListId}'},
{u'description': u'',
u'operations': [{u'httpMethod': u'DELETE',
u'open': False,
u'parameters': [{u'description': u'ID of WordList to delete',
u'name': u'wordListId',
u'paramType': u'path',
u'required': True},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid ID supplied'},
{u'code': 403,
u'reason': u'Not Authorized to delete WordList'},
{u'code': 404,
u'reason': u'WordList not found'}],
u'occurs': u'1',
u'valueType': u'void'}],
u'summary': u'Deletes an existing WordList'}],
u'path': u'/wordList.{format}/{wordListId}'},
{u'description': u'',
u'operations': [{u'httpMethod': u'POST',
u'open': False,
u'parameters': [{u'description': u'ID of WordList to use',
u'name': u'wordListId',
u'paramType': u'path',
u'required': True},
{u'description': u'Words to remove from WordList',
u'paramType': u'body',
u'required': False},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid ID supplied'},
{u'code': 403,
u'reason': u'Not Authorized to modify WordList'},
{u'code': 404,
u'reason': u'WordList not found'}],
u'occurs': u'1',
u'valueType': u'void'}],
u'summary': u'Removes words from a WordList'}],
u'path': u'/wordList.{format}/{wordListId}/deleteWords'}]},
'wordLists': {u'endPoints': [{u'description': u'',
u'operations': [{u'httpMethod': u'GET',
u'open': True,
u'parameters': [{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 404,
u'reason': u'No data available'}],
u'occurs': u'1',
u'valueType': u'Documentation'}],
u'summary': u'Returns information about API parameters'}],
u'path': u'/wordLists'},
{u'description': u'',
u'operations': [{u'httpMethod': u'POST',
u'open': False,
u'parameters': [{u'description': u'WordList to create',
u'paramType': u'body',
u'required': False},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid WordList supplied or mandatory fields are missing.'},
{u'code': 403,
u'reason': u'Not authenticated.'},
{u'code': 404,
u'reason': u'WordList owner not found.'}],
u'occurs': u'1',
u'valueType': u'WordList'}],
u'summary': u'Creates a WordList.'}],
u'path': u'/wordLists'}]},
'words': {u'endPoints': [{u'description': u'',
u'operations': [{u'httpMethod': u'GET',
u'open': False,
u'parameters': [{u'defaultValue': u'true',
u'description': u'Only return words with dictionary definitions',
u'name': u'hasDictionaryDef',
u'paramType': u'query',
u'required': False},
{u'description': u'CSV part-of-speech values to include',
u'name': u'includePartOfSpeech',
u'paramType': u'query',
u'required': False},
{u'description': u'CSV part-of-speech values to exclude',
u'name': u'excludePartOfSpeech',
u'paramType': u'query',
u'required': False},
{u'description': u'Minimum corpus frequency for terms',
u'name': u'minCorpusCount',
u'paramType': u'query',
u'required': False},
{u'description': u'Maximum corpus frequency for terms',
u'name': u'maxCorpusCount',
u'paramType': u'query',
u'required': False},
{u'description': u'Minimum dictionary count',
u'name': u'minDictionaryCount',
u'paramType': u'query',
u'required': False},
{u'description': u'Maximum dictionary count',
u'name': u'maxDictionaryCount',
u'paramType': u'query',
u'required': False},
{u'description': u'Minimum word length',
u'name': u'minLength',
u'paramType': u'query',
u'required': False},
{u'description': u'Maximum word length',
u'name': u'maxLength',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 404,
u'reason': u'No word found.'}],
u'occurs': u'1',
u'valueType': u'Word'}],
u'summary': u'Returns a single random WordObject, in the format specified by the URL'}],
u'path': u'/words.{format}/randomWord'},
{u'description': u'',
u'operations': [{u'httpMethod': u'GET',
u'open': False,
u'parameters': [{u'description': u'Only return words with dictionary definitions',
u'name': u'hasDictionaryDef',
u'paramType': u'query',
u'required': False},
{u'description': u'CSV part-of-speech values to include',
u'name': u'includePartOfSpeech',
u'paramType': u'query',
u'required': False},
{u'description': u'CSV part-of-speech values to exclude',
u'name': u'excludePartOfSpeech',
u'paramType': u'query',
u'required': False},
{u'description': u'Minimum corpus frequency for terms (integer)',
u'name': u'minCorpusCount',
u'paramType': u'query',
u'required': False},
{u'description': u'Maximum corpus frequency for terms (integer)',
u'name': u'maxCorpusCount',
u'paramType': u'query',
u'required': False},
{u'description': u'Minimum dictionary count (integer)',
u'name': u'minDictionaryCount',
u'paramType': u'query',
u'required': False},
{u'description': u'Maximum dictionary count (integer)',
u'name': u'maxDictionaryCount',
u'paramType': u'query',
u'required': False},
{u'description': u'Minimum word length (characters)',
u'name': u'minLength',
u'paramType': u'query',
u'required': False},
{u'description': u'Maximum word length (characters)',
u'name': u'maxLength',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'alpha,count',
u'description': u'Attribute to sort by',
u'name': u'sortBy',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'asc,desc',
u'description': u'Sort direction',
u'name': u'sortOrder',
u'paramType': u'query',
u'required': False},
{u'description': u'Results to skip (integer)',
u'name': u'skip',
u'paramType': u'query',
u'required': False},
{u'description': u'Maximum number of results to return (integer)',
u'name': u'limit',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid term supplied.'},
{u'code': 404,
u'reason': u'No results.'}],
u'occurs': u'1',
u'valueType': u'WordFrequency[]'}],
u'summary': u'Returns an array of random WordObjects, in the format specified by the URL'}],
u'path': u'/words.{format}/randomWords'},
{u'description': u'',
u'operations': [{u'httpMethod': u'GET',
u'open': False,
u'parameters': [{u'description': u'Search term',
u'name': u'query',
u'paramType': u'query',
u'required': True},
{u'allowableValues': u'true | false',
u'defaultValue': u'true',
u'description': u'Search case sensitive',
u'name': u'caseSensitive',
u'paramType': u'query',
u'required': False},
{u'description': u'Only include these comma-delimited parts of speech',
u'name': u'includePartOfSpeech',
u'paramType': u'query',
u'required': False},
{u'description': u'Exclude these comma-delimited parts of speech',
u'name': u'excludePartOfSpeech',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'non-negative integer',
u'defaultValue': u'5',
u'description': u'Minimum corpus frequency for terms',
u'name': u'minCorpusCount',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'non-negative integer',
u'description': u'Maximum corpus frequency for terms',
u'name': u'maxCorpusCount',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'non-negative integer',
u'defaultValue': u'1',
u'description': u'Minimum number of dictionary entries',
u'name': u'minDictionaryCount',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'non-negative integer',
u'description': u'Maximum dictionary count',
u'name': u'maxDictionaryCount',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'0 to 1024',
u'defaultValue': u'1',
u'description': u'Minimum word length',
u'name': u'minLength',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'0 to 1024',
u'description': u'Maximum word length',
u'name': u'maxLength',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'0 to 1000',
u'defaultValue': u'0',
u'description': u'Results to skip',
u'name': u'skip',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'1 to 1000',
u'defaultValue': u'10',
u'description': u'Maximum number of results to return',
u'name': u'limit',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid term supplied.'},
{u'code': 404,
u'reason': u'No results.'}],
u'occurs': u'1',
u'valueType': u'WordFrequency[]'}],
u'summary': u'Searches words.'}],
u'path': u'/words.{format}/search'},
{u'description': u'',
u'operations': [{u'httpMethod': u'GET',
u'open': False,
u'parameters': [{u'description': u'ID of WordOfTheDayList',
u'name': u'containsWord',
u'paramType': u'query',
u'required': True},
{u'description': u'Returns future WordOfTheDay items',
u'name': u'includeAll',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid word supplied.'}],
u'occurs': u'1',
u'valueType': u'WordOfTheDayList[]'}],
u'summary': u'Fetches an array of WordOfTheDayList containing a Word'}],
u'path': u'/words.{format}/wordOfTheDayLists'},
{u'description': u'',
u'operations': [{u'httpMethod': u'GET',
u'open': False,
u'parameters': [{u'description': u'ID of WordOfTheDayList',
u'name': u'date',
u'paramType': u'path',
u'required': False},
{u'description': u'Returns future WordOfTheDay items',
u'name': u'includeAll',
u'paramType': u'query',
u'required': False},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid ID supplied'},
{u'code': 404,
u'reason': u'WordOfTheDayList or User not found'}],
u'occurs': u'1',
u'valueType': u'WordOfTheDay[]'}],
u'summary': u'Fetches WordOfTheDay objects for a specific date'}],
u'path': u'/words.{format}/wordOfTheDayLists/{date}'},
{u'description': u'',
u'operations': [{u'httpMethod': u'POST',
u'open': False,
u'parameters': [{u'description': u'auth_token of logged-in user',
u'name': u'auth_token',
u'paramType': u'header',
u'required': True},
{u'description': u'ID of WordOfTheDayList',
u'name': u'permalink',
u'paramType': u'path',
u'required': False},
{u'description': u'Medium to subscribe with',
u'name': u'medium',
u'paramType': u'query',
u'required': False},
{u'description': u'Username to subscribe',
u'paramType': u'body',
u'required': False},
{u'allowableValues': u'json,xml',
u'defaultValue': u'json',
u'description': u'API response format',
u'name': u'format',
u'paramType': u'path',
u'required': True}],
u'response': [{u'condition': u'any',
u'errorResponses': [{u'code': 400,
u'reason': u'Invalid ID supplied'},
{u'code': 403,
u'reason': u'Not authorized to subscribe'},
{u'code': 404,
u'reason': u'WordOfTheDayList or User not found'}],
u'occurs': u'1',
u'valueType': u'void'}],
u'summary': u'Subscribes a user to a WordOfTheDayList'}],
u'path': u'/words.{format}/wordOfTheDayList/{permalink}/subscription'}]}}
| 90.29393 | 265 | 0.249071 | 6,442 | 113,048 | 4.369761 | 0.052934 | 0.089094 | 0.096519 | 0.042274 | 0.887851 | 0.877442 | 0.869556 | 0.864369 | 0.854352 | 0.8427 | 0 | 0.008396 | 0.67022 | 113,048 | 1,251 | 266 | 90.366107 | 0.746681 | 0 | 0 | 0.819345 | 0 | 0.010392 | 0.233883 | 0.012747 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.002398 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
37a74c8c3c256299c9e9641f65a0af896a1dc9ab | 27,586 | py | Python | sdk/python/pulumi_alicloud/vpc/nat_ip.py | pulumi/pulumi-alicloud | 9c34d84b4588a7c885c6bec1f03b5016e5a41683 | [
"ECL-2.0",
"Apache-2.0"
] | 42 | 2019-03-18T06:34:37.000Z | 2022-03-24T07:08:57.000Z | sdk/python/pulumi_alicloud/vpc/nat_ip.py | pulumi/pulumi-alicloud | 9c34d84b4588a7c885c6bec1f03b5016e5a41683 | [
"ECL-2.0",
"Apache-2.0"
] | 152 | 2019-04-15T21:03:44.000Z | 2022-03-29T18:00:57.000Z | sdk/python/pulumi_alicloud/vpc/nat_ip.py | pulumi/pulumi-alicloud | 9c34d84b4588a7c885c6bec1f03b5016e5a41683 | [
"ECL-2.0",
"Apache-2.0"
] | 3 | 2020-08-26T17:30:07.000Z | 2021-07-05T01:37:45.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['NatIpArgs', 'NatIp']
@pulumi.input_type
class NatIpArgs:
def __init__(__self__, *,
nat_gateway_id: pulumi.Input[str],
dry_run: Optional[pulumi.Input[bool]] = None,
nat_ip: Optional[pulumi.Input[str]] = None,
nat_ip_cidr: Optional[pulumi.Input[str]] = None,
nat_ip_cidr_id: Optional[pulumi.Input[str]] = None,
nat_ip_description: Optional[pulumi.Input[str]] = None,
nat_ip_name: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a NatIp resource.
:param pulumi.Input[str] nat_gateway_id: The ID of the Virtual Private Cloud (VPC) NAT gateway for which you want to create the NAT IP address.
:param pulumi.Input[bool] dry_run: Specifies whether to check the validity of the request without actually making the request.
:param pulumi.Input[str] nat_ip: The NAT IP address that you want to create. If you do not specify an IP address, the system selects a random IP address from the specified CIDR block.
:param pulumi.Input[str] nat_ip_cidr: NAT IP ADDRESS of the address segment.
:param pulumi.Input[str] nat_ip_cidr_id: The ID of the CIDR block to which the NAT IP address belongs.
:param pulumi.Input[str] nat_ip_description: NAT IP ADDRESS description of information. Length is from `2` to `256` characters, must start with a letter or the Chinese at the beginning, but not at the` http://` Or `https://` at the beginning.
:param pulumi.Input[str] nat_ip_name: NAT IP ADDRESS the name of the root directory. Length is from `2` to `128` characters, must start with a letter or the Chinese at the beginning can contain numbers, half a period (.), underscore (_) and dash (-). But do not start with `http://` or `https://` at the beginning.
"""
pulumi.set(__self__, "nat_gateway_id", nat_gateway_id)
if dry_run is not None:
pulumi.set(__self__, "dry_run", dry_run)
if nat_ip is not None:
pulumi.set(__self__, "nat_ip", nat_ip)
if nat_ip_cidr is not None:
pulumi.set(__self__, "nat_ip_cidr", nat_ip_cidr)
if nat_ip_cidr_id is not None:
pulumi.set(__self__, "nat_ip_cidr_id", nat_ip_cidr_id)
if nat_ip_description is not None:
pulumi.set(__self__, "nat_ip_description", nat_ip_description)
if nat_ip_name is not None:
pulumi.set(__self__, "nat_ip_name", nat_ip_name)
@property
@pulumi.getter(name="natGatewayId")
def nat_gateway_id(self) -> pulumi.Input[str]:
"""
The ID of the Virtual Private Cloud (VPC) NAT gateway for which you want to create the NAT IP address.
"""
return pulumi.get(self, "nat_gateway_id")
@nat_gateway_id.setter
def nat_gateway_id(self, value: pulumi.Input[str]):
pulumi.set(self, "nat_gateway_id", value)
@property
@pulumi.getter(name="dryRun")
def dry_run(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies whether to check the validity of the request without actually making the request.
"""
return pulumi.get(self, "dry_run")
@dry_run.setter
def dry_run(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "dry_run", value)
@property
@pulumi.getter(name="natIp")
def nat_ip(self) -> Optional[pulumi.Input[str]]:
"""
The NAT IP address that you want to create. If you do not specify an IP address, the system selects a random IP address from the specified CIDR block.
"""
return pulumi.get(self, "nat_ip")
@nat_ip.setter
def nat_ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "nat_ip", value)
@property
@pulumi.getter(name="natIpCidr")
def nat_ip_cidr(self) -> Optional[pulumi.Input[str]]:
"""
NAT IP ADDRESS of the address segment.
"""
return pulumi.get(self, "nat_ip_cidr")
@nat_ip_cidr.setter
def nat_ip_cidr(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "nat_ip_cidr", value)
@property
@pulumi.getter(name="natIpCidrId")
def nat_ip_cidr_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the CIDR block to which the NAT IP address belongs.
"""
return pulumi.get(self, "nat_ip_cidr_id")
@nat_ip_cidr_id.setter
def nat_ip_cidr_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "nat_ip_cidr_id", value)
@property
@pulumi.getter(name="natIpDescription")
def nat_ip_description(self) -> Optional[pulumi.Input[str]]:
"""
NAT IP ADDRESS description of information. Length is from `2` to `256` characters, must start with a letter or the Chinese at the beginning, but not at the` http://` Or `https://` at the beginning.
"""
return pulumi.get(self, "nat_ip_description")
@nat_ip_description.setter
def nat_ip_description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "nat_ip_description", value)
@property
@pulumi.getter(name="natIpName")
def nat_ip_name(self) -> Optional[pulumi.Input[str]]:
"""
NAT IP ADDRESS the name of the root directory. Length is from `2` to `128` characters, must start with a letter or the Chinese at the beginning can contain numbers, half a period (.), underscore (_) and dash (-). But do not start with `http://` or `https://` at the beginning.
"""
return pulumi.get(self, "nat_ip_name")
@nat_ip_name.setter
def nat_ip_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "nat_ip_name", value)
@pulumi.input_type
class _NatIpState:
def __init__(__self__, *,
dry_run: Optional[pulumi.Input[bool]] = None,
nat_gateway_id: Optional[pulumi.Input[str]] = None,
nat_ip: Optional[pulumi.Input[str]] = None,
nat_ip_cidr: Optional[pulumi.Input[str]] = None,
nat_ip_cidr_id: Optional[pulumi.Input[str]] = None,
nat_ip_description: Optional[pulumi.Input[str]] = None,
nat_ip_id: Optional[pulumi.Input[str]] = None,
nat_ip_name: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering NatIp resources.
:param pulumi.Input[bool] dry_run: Specifies whether to check the validity of the request without actually making the request.
:param pulumi.Input[str] nat_gateway_id: The ID of the Virtual Private Cloud (VPC) NAT gateway for which you want to create the NAT IP address.
:param pulumi.Input[str] nat_ip: The NAT IP address that you want to create. If you do not specify an IP address, the system selects a random IP address from the specified CIDR block.
:param pulumi.Input[str] nat_ip_cidr: NAT IP ADDRESS of the address segment.
:param pulumi.Input[str] nat_ip_cidr_id: The ID of the CIDR block to which the NAT IP address belongs.
:param pulumi.Input[str] nat_ip_description: NAT IP ADDRESS description of information. Length is from `2` to `256` characters, must start with a letter or the Chinese at the beginning, but not at the` http://` Or `https://` at the beginning.
:param pulumi.Input[str] nat_ip_name: NAT IP ADDRESS the name of the root directory. Length is from `2` to `128` characters, must start with a letter or the Chinese at the beginning can contain numbers, half a period (.), underscore (_) and dash (-). But do not start with `http://` or `https://` at the beginning.
:param pulumi.Input[str] status: The status of the NAT IP address. Valid values: `Available`, `Deleting`, `Creating` and `Deleted`.
"""
if dry_run is not None:
pulumi.set(__self__, "dry_run", dry_run)
if nat_gateway_id is not None:
pulumi.set(__self__, "nat_gateway_id", nat_gateway_id)
if nat_ip is not None:
pulumi.set(__self__, "nat_ip", nat_ip)
if nat_ip_cidr is not None:
pulumi.set(__self__, "nat_ip_cidr", nat_ip_cidr)
if nat_ip_cidr_id is not None:
pulumi.set(__self__, "nat_ip_cidr_id", nat_ip_cidr_id)
if nat_ip_description is not None:
pulumi.set(__self__, "nat_ip_description", nat_ip_description)
if nat_ip_id is not None:
pulumi.set(__self__, "nat_ip_id", nat_ip_id)
if nat_ip_name is not None:
pulumi.set(__self__, "nat_ip_name", nat_ip_name)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter(name="dryRun")
def dry_run(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies whether to check the validity of the request without actually making the request.
"""
return pulumi.get(self, "dry_run")
@dry_run.setter
def dry_run(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "dry_run", value)
@property
@pulumi.getter(name="natGatewayId")
def nat_gateway_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Virtual Private Cloud (VPC) NAT gateway for which you want to create the NAT IP address.
"""
return pulumi.get(self, "nat_gateway_id")
@nat_gateway_id.setter
def nat_gateway_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "nat_gateway_id", value)
@property
@pulumi.getter(name="natIp")
def nat_ip(self) -> Optional[pulumi.Input[str]]:
"""
The NAT IP address that you want to create. If you do not specify an IP address, the system selects a random IP address from the specified CIDR block.
"""
return pulumi.get(self, "nat_ip")
@nat_ip.setter
def nat_ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "nat_ip", value)
@property
@pulumi.getter(name="natIpCidr")
def nat_ip_cidr(self) -> Optional[pulumi.Input[str]]:
"""
NAT IP ADDRESS of the address segment.
"""
return pulumi.get(self, "nat_ip_cidr")
@nat_ip_cidr.setter
def nat_ip_cidr(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "nat_ip_cidr", value)
@property
@pulumi.getter(name="natIpCidrId")
def nat_ip_cidr_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the CIDR block to which the NAT IP address belongs.
"""
return pulumi.get(self, "nat_ip_cidr_id")
@nat_ip_cidr_id.setter
def nat_ip_cidr_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "nat_ip_cidr_id", value)
@property
@pulumi.getter(name="natIpDescription")
def nat_ip_description(self) -> Optional[pulumi.Input[str]]:
"""
NAT IP ADDRESS description of information. Length is from `2` to `256` characters, must start with a letter or the Chinese at the beginning, but not at the` http://` Or `https://` at the beginning.
"""
return pulumi.get(self, "nat_ip_description")
@nat_ip_description.setter
def nat_ip_description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "nat_ip_description", value)
@property
@pulumi.getter(name="natIpId")
def nat_ip_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "nat_ip_id")
@nat_ip_id.setter
def nat_ip_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "nat_ip_id", value)
@property
@pulumi.getter(name="natIpName")
def nat_ip_name(self) -> Optional[pulumi.Input[str]]:
"""
NAT IP ADDRESS the name of the root directory. Length is from `2` to `128` characters, must start with a letter or the Chinese at the beginning can contain numbers, half a period (.), underscore (_) and dash (-). But do not start with `http://` or `https://` at the beginning.
"""
return pulumi.get(self, "nat_ip_name")
@nat_ip_name.setter
def nat_ip_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "nat_ip_name", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
"""
The status of the NAT IP address. Valid values: `Available`, `Deleting`, `Creating` and `Deleted`.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
class NatIp(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
dry_run: Optional[pulumi.Input[bool]] = None,
nat_gateway_id: Optional[pulumi.Input[str]] = None,
nat_ip: Optional[pulumi.Input[str]] = None,
nat_ip_cidr: Optional[pulumi.Input[str]] = None,
nat_ip_cidr_id: Optional[pulumi.Input[str]] = None,
nat_ip_description: Optional[pulumi.Input[str]] = None,
nat_ip_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides a VPC Nat Ip resource.
For information about VPC Nat Ip and how to use it, see [What is Nat Ip](https://www.alibabacloud.com/help/doc-detail/281976.htm).
> **NOTE:** Available in v1.136.0+.
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
example_zones = alicloud.get_zones(available_resource_creation="VSwitch")
example_network = alicloud.vpc.Network("exampleNetwork",
vpc_name="example_value",
cidr_block="172.16.0.0/12")
example_switch = alicloud.vpc.Switch("exampleSwitch",
vpc_id=alicloud_vpc["default"]["id"],
cidr_block="172.16.0.0/21",
zone_id=example_zones.zones[0].id,
vswitch_name="example_value")
example_nat_gateway = alicloud.vpc.NatGateway("exampleNatGateway",
vpc_id=example_network.id,
internet_charge_type="PayByLcu",
nat_gateway_name="example_value",
description="example_value",
nat_type="Enhanced",
vswitch_id=example_switch.id,
network_type="intranet")
example_nat_ip_cidr = alicloud.vpc.NatIpCidr("exampleNatIpCidr",
nat_ip_cidr="192.168.0.0/16",
nat_gateway_id=example_nat_gateway.id,
nat_ip_cidr_description="example_value",
nat_ip_cidr_name="example_value")
example_nat_ip = alicloud.vpc.NatIp("exampleNatIp",
nat_ip="192.168.0.37",
nat_gateway_id=example_nat_gateway.id,
nat_ip_description="example_value",
nat_ip_name="example_value",
nat_ip_cidr=example_nat_ip_cidr.nat_ip_cidr)
```
## Import
VPC Nat Ip can be imported using the id, e.g.
```sh
$ pulumi import alicloud:vpc/natIp:NatIp example <nat_gateway_id>:<nat_ip_id>
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] dry_run: Specifies whether to check the validity of the request without actually making the request.
:param pulumi.Input[str] nat_gateway_id: The ID of the Virtual Private Cloud (VPC) NAT gateway for which you want to create the NAT IP address.
:param pulumi.Input[str] nat_ip: The NAT IP address that you want to create. If you do not specify an IP address, the system selects a random IP address from the specified CIDR block.
:param pulumi.Input[str] nat_ip_cidr: NAT IP ADDRESS of the address segment.
:param pulumi.Input[str] nat_ip_cidr_id: The ID of the CIDR block to which the NAT IP address belongs.
:param pulumi.Input[str] nat_ip_description: NAT IP ADDRESS description of information. Length is from `2` to `256` characters, must start with a letter or the Chinese at the beginning, but not at the` http://` Or `https://` at the beginning.
:param pulumi.Input[str] nat_ip_name: NAT IP ADDRESS the name of the root directory. Length is from `2` to `128` characters, must start with a letter or the Chinese at the beginning can contain numbers, half a period (.), underscore (_) and dash (-). But do not start with `http://` or `https://` at the beginning.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: NatIpArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a VPC Nat Ip resource.
For information about VPC Nat Ip and how to use it, see [What is Nat Ip](https://www.alibabacloud.com/help/doc-detail/281976.htm).
> **NOTE:** Available in v1.136.0+.
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
example_zones = alicloud.get_zones(available_resource_creation="VSwitch")
example_network = alicloud.vpc.Network("exampleNetwork",
vpc_name="example_value",
cidr_block="172.16.0.0/12")
example_switch = alicloud.vpc.Switch("exampleSwitch",
vpc_id=alicloud_vpc["default"]["id"],
cidr_block="172.16.0.0/21",
zone_id=example_zones.zones[0].id,
vswitch_name="example_value")
example_nat_gateway = alicloud.vpc.NatGateway("exampleNatGateway",
vpc_id=example_network.id,
internet_charge_type="PayByLcu",
nat_gateway_name="example_value",
description="example_value",
nat_type="Enhanced",
vswitch_id=example_switch.id,
network_type="intranet")
example_nat_ip_cidr = alicloud.vpc.NatIpCidr("exampleNatIpCidr",
nat_ip_cidr="192.168.0.0/16",
nat_gateway_id=example_nat_gateway.id,
nat_ip_cidr_description="example_value",
nat_ip_cidr_name="example_value")
example_nat_ip = alicloud.vpc.NatIp("exampleNatIp",
nat_ip="192.168.0.37",
nat_gateway_id=example_nat_gateway.id,
nat_ip_description="example_value",
nat_ip_name="example_value",
nat_ip_cidr=example_nat_ip_cidr.nat_ip_cidr)
```
## Import
VPC Nat Ip can be imported using the id, e.g.
```sh
$ pulumi import alicloud:vpc/natIp:NatIp example <nat_gateway_id>:<nat_ip_id>
```
:param str resource_name: The name of the resource.
:param NatIpArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(NatIpArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
dry_run: Optional[pulumi.Input[bool]] = None,
nat_gateway_id: Optional[pulumi.Input[str]] = None,
nat_ip: Optional[pulumi.Input[str]] = None,
nat_ip_cidr: Optional[pulumi.Input[str]] = None,
nat_ip_cidr_id: Optional[pulumi.Input[str]] = None,
nat_ip_description: Optional[pulumi.Input[str]] = None,
nat_ip_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = NatIpArgs.__new__(NatIpArgs)
__props__.__dict__["dry_run"] = dry_run
if nat_gateway_id is None and not opts.urn:
raise TypeError("Missing required property 'nat_gateway_id'")
__props__.__dict__["nat_gateway_id"] = nat_gateway_id
__props__.__dict__["nat_ip"] = nat_ip
__props__.__dict__["nat_ip_cidr"] = nat_ip_cidr
__props__.__dict__["nat_ip_cidr_id"] = nat_ip_cidr_id
__props__.__dict__["nat_ip_description"] = nat_ip_description
__props__.__dict__["nat_ip_name"] = nat_ip_name
__props__.__dict__["nat_ip_id"] = None
__props__.__dict__["status"] = None
super(NatIp, __self__).__init__(
'alicloud:vpc/natIp:NatIp',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
dry_run: Optional[pulumi.Input[bool]] = None,
nat_gateway_id: Optional[pulumi.Input[str]] = None,
nat_ip: Optional[pulumi.Input[str]] = None,
nat_ip_cidr: Optional[pulumi.Input[str]] = None,
nat_ip_cidr_id: Optional[pulumi.Input[str]] = None,
nat_ip_description: Optional[pulumi.Input[str]] = None,
nat_ip_id: Optional[pulumi.Input[str]] = None,
nat_ip_name: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None) -> 'NatIp':
"""
Get an existing NatIp resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] dry_run: Specifies whether to check the validity of the request without actually making the request.
:param pulumi.Input[str] nat_gateway_id: The ID of the Virtual Private Cloud (VPC) NAT gateway for which you want to create the NAT IP address.
:param pulumi.Input[str] nat_ip: The NAT IP address that you want to create. If you do not specify an IP address, the system selects a random IP address from the specified CIDR block.
:param pulumi.Input[str] nat_ip_cidr: NAT IP ADDRESS of the address segment.
:param pulumi.Input[str] nat_ip_cidr_id: The ID of the CIDR block to which the NAT IP address belongs.
:param pulumi.Input[str] nat_ip_description: NAT IP ADDRESS description of information. Length is from `2` to `256` characters, must start with a letter or the Chinese at the beginning, but not at the` http://` Or `https://` at the beginning.
:param pulumi.Input[str] nat_ip_name: NAT IP ADDRESS the name of the root directory. Length is from `2` to `128` characters, must start with a letter or the Chinese at the beginning can contain numbers, half a period (.), underscore (_) and dash (-). But do not start with `http://` or `https://` at the beginning.
:param pulumi.Input[str] status: The status of the NAT IP address. Valid values: `Available`, `Deleting`, `Creating` and `Deleted`.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _NatIpState.__new__(_NatIpState)
__props__.__dict__["dry_run"] = dry_run
__props__.__dict__["nat_gateway_id"] = nat_gateway_id
__props__.__dict__["nat_ip"] = nat_ip
__props__.__dict__["nat_ip_cidr"] = nat_ip_cidr
__props__.__dict__["nat_ip_cidr_id"] = nat_ip_cidr_id
__props__.__dict__["nat_ip_description"] = nat_ip_description
__props__.__dict__["nat_ip_id"] = nat_ip_id
__props__.__dict__["nat_ip_name"] = nat_ip_name
__props__.__dict__["status"] = status
return NatIp(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="dryRun")
def dry_run(self) -> pulumi.Output[bool]:
"""
Specifies whether to check the validity of the request without actually making the request.
"""
return pulumi.get(self, "dry_run")
@property
@pulumi.getter(name="natGatewayId")
def nat_gateway_id(self) -> pulumi.Output[str]:
"""
The ID of the Virtual Private Cloud (VPC) NAT gateway for which you want to create the NAT IP address.
"""
return pulumi.get(self, "nat_gateway_id")
@property
@pulumi.getter(name="natIp")
def nat_ip(self) -> pulumi.Output[Optional[str]]:
"""
The NAT IP address that you want to create. If you do not specify an IP address, the system selects a random IP address from the specified CIDR block.
"""
return pulumi.get(self, "nat_ip")
@property
@pulumi.getter(name="natIpCidr")
def nat_ip_cidr(self) -> pulumi.Output[Optional[str]]:
"""
NAT IP ADDRESS of the address segment.
"""
return pulumi.get(self, "nat_ip_cidr")
@property
@pulumi.getter(name="natIpCidrId")
def nat_ip_cidr_id(self) -> pulumi.Output[Optional[str]]:
"""
The ID of the CIDR block to which the NAT IP address belongs.
"""
return pulumi.get(self, "nat_ip_cidr_id")
@property
@pulumi.getter(name="natIpDescription")
def nat_ip_description(self) -> pulumi.Output[Optional[str]]:
"""
NAT IP ADDRESS description of information. Length is from `2` to `256` characters, must start with a letter or the Chinese at the beginning, but not at the` http://` Or `https://` at the beginning.
"""
return pulumi.get(self, "nat_ip_description")
@property
@pulumi.getter(name="natIpId")
def nat_ip_id(self) -> pulumi.Output[str]:
return pulumi.get(self, "nat_ip_id")
@property
@pulumi.getter(name="natIpName")
def nat_ip_name(self) -> pulumi.Output[Optional[str]]:
"""
NAT IP ADDRESS the name of the root directory. Length is from `2` to `128` characters, must start with a letter or the Chinese at the beginning can contain numbers, half a period (.), underscore (_) and dash (-). But do not start with `http://` or `https://` at the beginning.
"""
return pulumi.get(self, "nat_ip_name")
@property
@pulumi.getter
def status(self) -> pulumi.Output[str]:
"""
The status of the NAT IP address. Valid values: `Available`, `Deleting`, `Creating` and `Deleted`.
"""
return pulumi.get(self, "status")
| 47.644214 | 322 | 0.648082 | 3,793 | 27,586 | 4.465858 | 0.064856 | 0.073204 | 0.074385 | 0.076628 | 0.904363 | 0.892674 | 0.879745 | 0.874137 | 0.862034 | 0.850463 | 0 | 0.007472 | 0.248061 | 27,586 | 578 | 323 | 47.726644 | 0.80914 | 0.44055 | 0 | 0.745763 | 1 | 0 | 0.092567 | 0.001737 | 0 | 0 | 0 | 0 | 0 | 1 | 0.162712 | false | 0.00339 | 0.016949 | 0.00678 | 0.277966 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
807a5780507112b5e159e40fc891bd6dd97e9017 | 138 | py | Python | examples/docs_snippets/docs_snippets_tests/concepts_tests/io_management_tests/test_load_from_config.py | rpatil524/dagster | 6f918d94cbd543ab752ab484a65e3a40fd441716 | [
"Apache-2.0"
] | 1 | 2021-01-31T19:16:29.000Z | 2021-01-31T19:16:29.000Z | examples/docs_snippets/docs_snippets_tests/concepts_tests/io_management_tests/test_load_from_config.py | rpatil524/dagster | 6f918d94cbd543ab752ab484a65e3a40fd441716 | [
"Apache-2.0"
] | null | null | null | examples/docs_snippets/docs_snippets_tests/concepts_tests/io_management_tests/test_load_from_config.py | rpatil524/dagster | 6f918d94cbd543ab752ab484a65e3a40fd441716 | [
"Apache-2.0"
] | 1 | 2019-09-11T03:02:27.000Z | 2019-09-11T03:02:27.000Z | from docs_snippets.concepts.io_management.load_from_config import execute_with_config
def test_execute_job():
execute_with_config()
| 23 | 85 | 0.847826 | 20 | 138 | 5.35 | 0.7 | 0.205607 | 0.317757 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.094203 | 138 | 5 | 86 | 27.6 | 0.856 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
80cacf79d0e729898f38de1c18f5117269805ad4 | 14,858 | py | Python | test/testin.py | jmelchio/spinnaker-resource | 14b074fdcb3415269f5963e34f7e635c03b92e13 | [
"Apache-2.0"
] | 2 | 2019-04-08T23:03:10.000Z | 2019-04-08T23:41:47.000Z | test/testin.py | jmelchio/spinnaker-resource | 14b074fdcb3415269f5963e34f7e635c03b92e13 | [
"Apache-2.0"
] | null | null | null | test/testin.py | jmelchio/spinnaker-resource | 14b074fdcb3415269f5963e34f7e635c03b92e13 | [
"Apache-2.0"
] | 2 | 2020-04-01T22:21:06.000Z | 2020-04-02T00:54:25.000Z | import json
import os
import sys
import unittest
from io import StringIO
from unittest.mock import patch
from assets import inscript
spinnaker_waitforconcourse_running = json.loads('''
[
{
"application": "metricsdemo",
"authentication": {
"allowedAccounts": [
"seoul",
"montclair",
"atherton"
],
"user": "anonymous"
},
"buildTime": 1554412918160,
"canceled": false,
"id": "01D7N3NNCGRF14VNPHMM46X19X",
"initialConfig": {},
"keepWaitingPipelines": false,
"limitConcurrent": true,
"name": "block",
"notifications": [],
"origin": "api",
"pipelineConfigId": "4652d7ac-e9af-41b2-b41f-a946e24354f2",
"stages": [
{
"context": {
"master": "some-master",
"teamName": "A-team",
"pipelineName": "some-pipeline",
"resourceName": "spin-resource",
"parameters": {
"thing_one": "one",
"thing_two": "two"
}
},
"id": "01D7N3NNCG0GBKK28RS25R4HX4",
"name": "Manual Judgment",
"outputs": {},
"refId": "1",
"requisiteStageRefIds": [],
"startTime": 1554412918193,
"status": "RUNNING",
"tasks": [
{
"id": "1",
"implementingClass": "com.netflix.spinnaker.orca.echo.pipeline.ManualJudgmentStage$WaitForManualJudgmentTask",
"loopEnd": false,
"loopStart": false,
"name": "waitForConcourseJobStartTask",
"stageEnd": true,
"stageStart": true,
"startTime": 1554412918208,
"status": "RUNNING"
}
],
"type": "concourse"
}
],
"startTime": 1554412918173,
"status": "RUNNING",
"systemNotifications": [],
"trigger": {
"artifacts": [],
"dryRun": false,
"enabled": false,
"eventId": "fdc68837-d4ae-421a-817d-c9d31d532939",
"notifications": [],
"parameters": {},
"rebake": false,
"resolvedExpectedArtifacts": [],
"strategy": false,
"type": "manual",
"user": "anonymous"
},
"type": "PIPELINE"
}
]
''')
spinnaker_waitforconcourse_completed = json.loads('''
[
{
"application": "metricsdemo",
"authentication": {
"allowedAccounts": [
"seoul",
"montclair",
"atherton"
],
"user": "anonymous"
},
"buildTime": 1554412918160,
"canceled": false,
"id": "01D7N3NNCGRF14VNPHMM46X19X",
"initialConfig": {},
"keepWaitingPipelines": false,
"limitConcurrent": true,
"name": "block",
"notifications": [],
"origin": "api",
"pipelineConfigId": "4652d7ac-e9af-41b2-b41f-a946e24354f2",
"stages": [
{
"context": {
"master": "some-master",
"teamName": "A-team",
"pipelineName": "some-pipeline",
"resourceName": "spin-resource",
"parameters": {
"thing_one": "one",
"thing_two": "two"
}
},
"id": "01D7N3NNCG0GBKK28RS25R4HX4",
"name": "Manual Judgment",
"outputs": {},
"refId": "1",
"requisiteStageRefIds": [],
"startTime": 1554412918193,
"status": "RUNNING",
"tasks": [
{
"id": "1",
"implementingClass": "com.netflix.spinnaker.orca.echo.pipeline.ManualJudgmentStage$WaitForManualJudgmentTask",
"loopEnd": false,
"loopStart": false,
"name": "waitForConcourseJobStartTask",
"stageEnd": true,
"stageStart": true,
"startTime": 1554412918208,
"status": "COMPLETED"
}
],
"type": "concourse"
}
],
"startTime": 1554412918173,
"status": "RUNNING",
"systemNotifications": [],
"trigger": {
"artifacts": [],
"dryRun": false,
"enabled": false,
"eventId": "fdc68837-d4ae-421a-817d-c9d31d532939",
"notifications": [],
"parameters": {},
"rebake": false,
"resolvedExpectedArtifacts": [],
"strategy": false,
"type": "manual",
"user": "anonymous"
},
"type": "PIPELINE"
}
]
''')
spinnaker_multiple_values = json.loads('''
[
{
"application": "metricsdemo",
"authentication": {
"allowedAccounts": [
"seoul",
"montclair",
"atherton"
],
"user": "anonymous"
},
"buildTime": 1554412918160,
"canceled": false,
"id": "01D7N3NNCGRF14VNPHMM46X19X",
"initialConfig": {},
"keepWaitingPipelines": false,
"limitConcurrent": true,
"name": "block",
"notifications": [],
"origin": "api",
"pipelineConfigId": "4652d7ac-e9af-41b2-b41f-a946e24354f2",
"stages": [
{
"context": {
"failPipeline": true,
"instructions": "Should I complete?",
"judgmentInputs": [],
"notifications": []
},
"id": "01D7N3NNCG0GBKK28RS25R4HX4",
"name": "Manual Judgment",
"outputs": {},
"refId": "1",
"requisiteStageRefIds": [],
"startTime": 1554412918193,
"status": "RUNNING",
"tasks": [
{
"id": "1",
"implementingClass": "com.netflix.spinnaker.orca.echo.pipeline.ManualJudgmentStage$WaitForManualJudgmentTask",
"loopEnd": false,
"loopStart": false,
"name": "waitForJudgment",
"stageEnd": true,
"stageStart": true,
"startTime": 1554412918208,
"status": "RUNNING"
}
],
"type": "manualJudgment"
},
{
"context": {
"master": "some-master",
"teamName": "A-team",
"pipelineName": "some-pipeline",
"resourceName": "spin-resource"
},
"id": "01D7N3NNCG0GBKK28RS25R4HX4",
"name": "Manual Judgment",
"outputs": {},
"refId": "1",
"requisiteStageRefIds": [],
"startTime": 1554412918193,
"status": "RUNNING",
"tasks": [
{
"id": "1",
"implementingClass": "com.netflix.spinnaker.orca.echo.pipeline.ManualJudgmentStage$WaitForManualJudgmentTask",
"loopEnd": false,
"loopStart": false,
"name": "waitForConcourseJobStartTask",
"stageEnd": true,
"stageStart": true,
"startTime": 1554412918208,
"status": "RUNNING"
}
],
"type": "concourse"
},
{
"context": {
"master": "some-master",
"teamName": "A-team",
"pipelineName": "some-pipeline",
"resourceName": "spin-resource"
},
"id": "01D7N3NNCGZ2PWFS2FKYBS2FFV",
"name": "Clone Server Group",
"outputs": {},
"refId": "2",
"requisiteStageRefIds": [
"1"
],
"status": "NOT_STARTED",
"tasks": [],
"type": "concourse"
}
],
"startTime": 1554412918173,
"status": "RUNNING",
"systemNotifications": [],
"trigger": {
"artifacts": [],
"dryRun": false,
"enabled": false,
"eventId": "fdc68837-d4ae-421a-817d-c9d31d532939",
"notifications": [],
"parameters": {},
"rebake": false,
"resolvedExpectedArtifacts": [],
"strategy": false,
"type": "manual",
"user": "anonymous"
},
"type": "PIPELINE"
}
]
''')
concourse_in_match_version = json.loads(''' { "source":
{ "base_url": "http://spinnaker.gate:8084/", "app_name": "metricsdemo", "master": "some-master", "team_name": "A-team",
"pipeline_name": "some-pipeline", "resource_name": "spin-resource", "path": "file.props"},
"version": { "stage_guid": "01D7N3NNCG0GBKK28RS25R4HX4"}} ''')
concourse_in_match_version_two = json.loads(''' { "source":
{ "base_url": "http://spinnaker.gate:8084/", "app_name": "metricsdemo", "master": "some-master", "team_name": "A-team",
"pipeline_name": "some-pipeline", "resource_name": "spin-resource", "path": "file_two.props"},
"version": { "stage_guid": "01D7N3NNCG0GBKK28RS25R4HX4"}} ''')
concourse_in_match_version_three = json.loads(''' { "source":
{ "base_url": "http://spinnaker.gate:8084/", "app_name": "metricsdemo", "master": "some-master", "team_name": "A-team",
"pipeline_name": "some-pipeline", "resource_name": "spin-resource", "path": "file_three.props"},
"version": { "stage_guid": "01D7N3NNCG0GBKK28RS25R4HX4"}} ''')
concourse_in_without_baseurl = json.loads('''{ "source": { "app_name": "metricsdemo", "master": "some-master"
, "team_name": "A-team", "pipeline_name": "some-pipeline", "resource_name": "spin-resource"},
"version": {"stage_guid": "1"}}''')
class TestIn(unittest.TestCase):
@patch('assets.common.call_spinnaker', return_value=spinnaker_waitforconcourse_running)
@patch('assets.common.capture_input', return_value=concourse_in_match_version)
@patch('assets.inscript.notify_spinnaker', return_value=True)
def test_unit_happy_path(self, call_spinnaker, capture_input, notify_spinnaker):
backup = sys.stdout
sys.stdout = StringIO()
inscript.main('/tmp/')
out = sys.stdout.getvalue()
sys.stdout.close()
sys.stdout = backup
self.assertEqual(out,
'{"version": {"stage_guid": "01D7N3NNCG0GBKK28RS25R4HX4"}, "job_name": "job-unknown", \
"build_name": "build-number-name", "metadata": [{"name": "thing_one", "value": "one"}, \
{"name": "thing_two", "value": "two"}]}\n',
'Wrong information returned from in script')
self.assertTrue(os.path.isfile('/tmp/file.props'), 'File does not exist.')
with open('/tmp/file.props', 'r') as config_file:
contents = config_file.read()
self.assertEqual(contents, 'thing_one=one\nthing_two=two\n', 'String not found')
os.remove('/tmp/file.props')
@patch('assets.common.call_spinnaker', return_value=spinnaker_multiple_values)
@patch('assets.common.capture_input', return_value=concourse_in_match_version_two)
@patch('assets.inscript.notify_spinnaker', return_value=True)
def test_unit_happy_path_no_parameters(self, call_spinnaker, capture_input, notify_spinnaker):
backup = sys.stdout
sys.stdout = StringIO()
inscript.main('/tmp/')
out = sys.stdout.getvalue()
sys.stdout.close()
sys.stdout = backup
self.assertEqual(out,
'{"version": {"stage_guid": "01D7N3NNCG0GBKK28RS25R4HX4"}, "job_name": "job-unknown", \
"build_name": "build-number-name", "metadata": []}\n', 'Wrong information returned from in script')
self.assertTrue(os.path.isfile('/tmp/file_two.props'), 'File does not exist.')
with open('/tmp/file_two.props', 'r') as config_file:
contents = config_file.read()
self.assertEqual(contents, '', 'File not empty')
os.remove('/tmp/file_two.props')
@patch('assets.common.call_spinnaker', return_value=spinnaker_waitforconcourse_completed)
@patch('assets.common.capture_input', return_value=concourse_in_match_version_three)
@patch('assets.inscript.notify_spinnaker', return_value=True)
def test_unit_crappy_path_no_running_wait_task(self, call_spinnaker, capture_input, notify_spinnaker):
backup = sys.stderr
sys.stderr = StringIO()
with self.assertRaises(SystemExit) as context:
inscript.main('/tmp/')
err = sys.stderr.getvalue()
sys.stderr.close()
sys.stderr = backup
self.assertEqual(str(context.exception), '1', 'Return code of `1` expected')
self.assertEqual(err, 'No running Wait for Concourse task found\nSystem Exit detected\n')
@patch('assets.common.capture_input', return_value=concourse_in_without_baseurl)
def test_unit_crappy_path_missing_base_url(self, capture_input):
backup = sys.stderr
sys.stderr = StringIO()
with self.assertRaises(SystemExit) as context:
inscript.main('/tmp/')
err = sys.stderr.getvalue()
sys.stderr.close()
sys.stderr = backup
self.assertEqual(str(context.exception), '1', 'Return code of `1` expected')
self.assertEqual(err, 'Unable to complete operation: \'base_url\'\nSystem Exit detected\n',
'Expected error message about base_url')
class TestTimeOut(unittest.TestCase):
def test_unit_crappy_path_timeout(self):
backup = sys.stderr
sys.stderr = StringIO()
with self.assertRaises(SystemExit) as context:
inscript.main('/tmp/')
err = sys.stderr.getvalue()
sys.stderr.close()
sys.stderr = backup
self.assertEqual(str(context.exception), '1', 'Return code of `1` expected')
if __name__ == '__main__':
unittest.main()
| 37.331658 | 134 | 0.49899 | 1,117 | 14,858 | 6.501343 | 0.19248 | 0.01859 | 0.017626 | 0.019003 | 0.871798 | 0.862435 | 0.862435 | 0.854448 | 0.854448 | 0.776783 | 0 | 0.049406 | 0.359739 | 14,858 | 397 | 135 | 37.425693 | 0.71397 | 0 | 0 | 0.712366 | 0 | 0.021505 | 0.756764 | 0.115022 | 0 | 0 | 0 | 0 | 0.037634 | 1 | 0.013441 | false | 0 | 0.018817 | 0 | 0.037634 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
03bbc951e29b58507c5bab1390864a843e11e66d | 146 | py | Python | stest.py | rzaracota/vulkan-rails | a8196e42bfb4ea68b6d6721d8d328eaf18e8dadd | [
"MIT"
] | null | null | null | stest.py | rzaracota/vulkan-rails | a8196e42bfb4ea68b6d6721d8d328eaf18e8dadd | [
"MIT"
] | 1 | 2017-08-09T04:19:49.000Z | 2017-08-09T04:19:49.000Z | stest.py | rzaracota/vulkan-rails | a8196e42bfb4ea68b6d6721d8d328eaf18e8dadd | [
"MIT"
] | null | null | null | # a hypothetical pairing for scons to test a built program
from subprocess import run
from subprocess import PIPE
run(["./driver"], stdout=PIPE)
| 24.333333 | 58 | 0.773973 | 22 | 146 | 5.136364 | 0.727273 | 0.247788 | 0.353982 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.150685 | 146 | 5 | 59 | 29.2 | 0.91129 | 0.383562 | 0 | 0 | 0 | 0 | 0.090909 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.666667 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
03f9a686231fbf05d4d75c03291a57ba361b59ae | 30,886 | py | Python | good_params/mnist/test_config_padding.py | Lupin1998/inv-ML | 9f3db461911748292dff18024587538eb66d44bf | [
"MIT"
] | 1 | 2021-12-14T09:16:17.000Z | 2021-12-14T09:16:17.000Z | good_params/mnist/test_config_padding.py | Lupin1998/inv-ML | 9f3db461911748292dff18024587538eb66d44bf | [
"MIT"
] | null | null | null | good_params/mnist/test_config_padding.py | Lupin1998/inv-ML | 9f3db461911748292dff18024587538eb66d44bf | [
"MIT"
] | 2 | 2021-12-14T09:10:00.000Z | 2022-01-21T16:57:44.000Z | # mnist baseline + ExtraHead + Orth Loss + Padding loss
def import_test_config(test_num, mode='encoder'):
if test_num == 1:
return test_1(mode)
elif test_num == 2:
return test_2(mode)
elif test_num ==3:
return test_3(mode)
elif test_num ==4:
return test_4(mode)
# elif test_num ==5:
# return test_5(mode)
# elif test_num ==6:
# return test_6(mode)
# elif test_num ==7:
# return test_7(mode)
# elif test_num ==8:
# return test_8(mode)
def base_params(mode):
param = {}
if mode == 'encoder':
param = dict(
# regular
EPOCHS=8000,
ratio = dict(AE=0.005, dist=1, angle=0, push=0.8, orth=0, pad=0),
add_jump = True,
# structure
NetworkStructure = dict(
layer = [784, 784, 784, 784, 784, 784, 784, 784, 10],
relu = [ 1, 1, 1, 1, 1, 1, 1, 0],
Enc_require_gard = [ 1, 1, 1, 1, 1, 1, 1, 1],
Dec_require_gard = [ 0, 0, 0, 0, 0, 0, 0, 0],
inv_Enc=0, inv_Dec=1,
),
# Extra Head (DR project)
ExtraHead = dict(
layer = [],
weight = [],
),
# AE
AEWeight = dict(
each = [],
AE_gradual = [0, 0, 1],
),
# LIS
LISWeght = dict(
cross = [1,], # ok
enc_forward = [0, 0, 0, 0, 0, 0, 0, 0, 1],
dec_forward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
enc_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
dec_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
each = [0, 0, 0, 0, 0, 0, 0, 0, 0],
# [dist, angle, push],
cross_w = [1, 1, 1],
enc_forward_w = [1, 1, 1],
dec_forward_w = [0, 0, 0],
enc_backward_w = [0, 0, 0],
dec_backward_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [1, 1, 1],
# gradual
LIS_gradual = [0, 0, 1], # [start, end, mode]
push_gradual = dict(
cross_w = [500, 1000, 0],
enc_w = [500, 1000, 0],
dec_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [0, 0, 0],
),
),
# Orth
OrthWeight = dict(
Orth_gradual = [0, 0, 1],
each = [],
),
### inverse mode
InverseMode = dict(
mode = "pinverse", #"CSinverse",
loss_type = "L2",
padding = [ 0, 0, 0, 0, 0, 0, 0, 0],
pad_w = [ 0, 0, 0, 0, 0, 0, 0, 0],
pad_gradual = [0, 0, 1],
),
)
elif mode == 'decoder':
param = dict(
# regular
EPOCHS= 1,
ratio = dict(AE=0.005, dist=1, angle=0, push=0.8, orth=0, pad=0),
add_jump = True,
# structure
NetworkStructure = dict(
layer = [784, 784, 784, 784, 784, 784, 784, 784, 10],
relu = [ 1, 1, 1, 1, 1, 1, 1, 0],
Enc_require_gard = [ 1, 1, 1, 1, 1, 1, 1, 1],
Dec_require_gard = [ 0, 0, 0, 0, 0, 0, 0, 0],
inv_Enc=0, inv_Dec=1,
),
# Extra Head (DR project)
ExtraHead = dict(
layer = [],
weight = [],
),
# AE
AEWeight = dict(
each = [],
AE_gradual = [0, 0, 1],
),
# LIS
LISWeght = dict(
cross = [0,], # ok
enc_forward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
dec_forward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
enc_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
dec_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
each = [0, 0, 0, 0, 0, 0, 0, 0, 0],
# [dist, angle, push],
cross_w = [0, 0, 0],
enc_forward_w = [0, 0, 0],
dec_forward_w = [0, 0, 0],
enc_backward_w = [0, 0, 0],
dec_backward_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [0, 0, 0], # 0730, add new
# gradual
LIS_gradual = [0, 0, 1], # [start, end, mode]
push_gradual = dict( # [1 -> 0]
cross_w = [0, 0, 0],
enc_w = [0, 0, 0],
dec_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [0, 0, 0],
),
),
)
# result
return param
# mnist, 8layers ML-Enc + ExtraHead + Orth + Padding, d=2, good
def test_1(mode):
# copy of 0812: 8 layers inverse + DR + padding, (classs=10), (0812-2)
params = base_params(mode)
if mode == 'encoder':
param = dict(
numberClass = 10,
# regular
DATASET="mnist",
# BATCHSIZE = 20000,
# N_dataset = 20000,
BATCHSIZE = 10000,
N_dataset = 10000,
EPOCHS=10000,
regularB = 100,
# regularB = 10,
MAEK = 30,
PlotForloop = 1000,
ratio = dict(AE=0, dist=1, angle=0, push=0.8,
orth=0.1,
# pad=20
pad=100 # ok
),
# structure
NetworkStructure = dict(
# layer = [784, 784, 784, 784, 784, 784, 784, 784, 10],
layer = [784, 784, 784, 784, 784, 784, 784, 784, 2],
relu = [ 1, 1, 1, 1, 1, 1, 1, 0],
Enc_require_gard = [ 1, 1, 1, 1, 1, 1, 1, 1],
Dec_require_gard = [ 0, 0, 0, 0, 0, 0, 0, 0],
inv_Enc=0, inv_Dec=1,
),
# Extra Head (DR project)
ExtraHead = dict(
layer = [0, 12, 10, 8, 6, 4, 3, 2, ],
weight = [ 2, 4, 8, 16, 32, 64, 128, ],
),
# LIS
LISWeght = dict(
cross = [1,], # ok
enc_forward = [0, 0, 0, 0, 0, 0, 0, 0, 1],
dec_forward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
enc_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
dec_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
each = [0, 0, 0, 0, 0, 0, 0, 0, 0],
# [dist, angle, push],
cross_w = [1, 1, 10],
enc_forward_w = [1, 1, 10],
dec_forward_w = [0, 0, 0],
enc_backward_w = [0, 0, 0],
dec_backward_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [1, 1, 1],
# gradual
LIS_gradual = [0, 0, 1],
push_gradual = dict(
cross_w = [4000, 9000, 0],
enc_w = [3000, 8000, 0],
dec_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [3000, 8000, 0],
),
),
# Orth
OrthWeight = dict(
Orth_gradual = [50, 1200, 1],
each = [8000, 5500, 1500, 600, 350, 200, 100, ],
),
# inverse mode, 0808-1
InverseMode = dict(
mode = "ZeroPadding", #"CSinverse",
loss_type = "L1",
padding = [ 0, 780, 760, 700, 750, 650, 700, 630, 600], # 0814
pad_w = [ 0, 2, 4, 16, 8, 32, 16, 64, 64], # 0813
pad_gradual = [0, 3500, 1],
),
)
elif mode == 'decoder':
param = dict(
# regular
numberClass = 10,
DATASET="mnist",
BATCHSIZE = 10000,
N_dataset = 10000,
EPOCHS= 0,
PlotForloop = 1,
ratio = dict(AE=0.005, dist=1, angle=0, push=0.8, orth=0, pad=0),
# structure
NetworkStructure = dict(
layer = [784, 784, 784, 784, 784, 784, 784, 784, 2],
relu = [ 1, 1, 1, 1, 1, 1, 1, 0],
Enc_require_gard = [ 0, 0, 0, 0, 0, 0, 0, 0],
Dec_require_gard = [ 1, 0, 0, 0, 0, 0, 0, 0],
inv_Enc=0, inv_Dec=1,
),
# AE layer
AEWeight = dict(
each = [1, 0, 0, 0, 0, 0, 0, 0, 0],
AE_gradual = [0, 0, 1], # [start, end, mode]
),
# LIS
LISWeght = dict(
cross = [0,], # ok
enc_forward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
dec_forward = [1, 0, 0, 0, 0, 0, 0, 0, 0],
enc_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
dec_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
each = [0, 0, 0, 0, 0, 0, 0, 0, 0],
# [dist, angle, push],
cross_w = [0, 0, 0],
enc_forward_w = [0, 0, 0], # [dist, angle, push]
dec_forward_w = [0, 0, 0],
enc_backward_w = [0, 0, 0],
dec_backward_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [1, 1, 1],
# gradual
LIS_gradual = [0, 0, 1], # [start, end, mode]
push_gradual = dict(
cross_w = [0, 0, 0],
enc_w = [0, 0, 0],
dec_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [0, 0, 0],
),
),
)
# result
params.update(param)
return params
# mnist, 8layers ML-Enc + ExtraHead + Orth + Padding, d=2, ok
def test_2(mode):
# copy of test_6_0814_6: 8 layers inverse + DR + padding, Good!
params = base_params(mode)
if mode == 'encoder':
param = dict(
numberClass = 10,
# regular
DATASET="mnist",
BATCHSIZE = 10000,
N_dataset = 10000,
EPOCHS=10000,
regularB = 100,
# regularB = 10,
MAEK = 30,
PlotForloop = 1000,
ratio = dict(AE=0, dist=1, angle=0, push=0.8,
orth=0.1,
# pad=20
pad=100
),
# structure
NetworkStructure = dict(
layer = [784, 784, 784, 784, 784, 784, 784, 784, 2],
relu = [ 1, 1, 1, 1, 1, 1, 1, 0],
Enc_require_gard = [ 1, 1, 1, 1, 1, 1, 1, 1],
Dec_require_gard = [ 0, 0, 0, 0, 0, 0, 0, 0],
inv_Enc=0, inv_Dec=1,
),
# Extra Head (DR project)
ExtraHead = dict(
layer = [0, 12, 10, 8, 6, 4, 3, 2, ],
weight = [ 2, 4, 8, 16, 32, 64, 128, ],
),
# LIS
LISWeght = dict(
cross = [1,],
enc_forward = [0, 0, 0, 0, 0, 0, 0, 0, 1],
dec_forward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
enc_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
dec_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
each = [0, 0, 0, 0, 0, 0, 0, 0, 0],
# [dist, angle, push],
cross_w = [1, 1, 10],
enc_forward_w = [1, 1, 10],
dec_forward_w = [0, 0, 0],
enc_backward_w = [0, 0, 0],
dec_backward_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [1, 1, 1],
# gradual
LIS_gradual = [0, 0, 1], # [start, end, mode]
push_gradual = dict(
cross_w = [4000, 9500, 0],
enc_w = [3000, 8000, 0],
dec_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [2000, 8000, 0],
),
),
# Orth
OrthWeight = dict(
Orth_gradual = [50, 1200, 1],
each = [8000, 5500, 1500, 600, 350, 200, 100, ],
),
# inverse mode, 0808-1
InverseMode = dict(
mode = "ZeroPadding",
loss_type = "L1",
padding = [ 0, 780, 760, 700, 750, 650, 700, 630, 600], # 0814, good for mnist!
# pad_w = [ 0, 1, 1, 1, 1, 1, 1, 1, 1], # ori
pad_w = [ 0, 2, 4, 16, 8, 32, 16, 64, 64], # 0813-3,
# pad_gradual = [0, 3500, 1], # ori
pad_gradual = [0, 3000, 1],
),
)
elif mode == 'decoder':
param = dict(
# regular
numberClass = 10, # 0812
DATASET="mnist",
BATCHSIZE = 10000,
N_dataset = 10000,
EPOCHS= 0,
PlotForloop = 1,
ratio = dict(AE=0.005, dist=1, angle=0, push=0.8, orth=0, pad=0),
# structure
NetworkStructure = dict(
layer = [784, 784, 784, 784, 784, 784, 784, 784, 2],
relu = [ 1, 1, 1, 1, 1, 1, 1, 0],
Enc_require_gard = [ 0, 0, 0, 0, 0, 0, 0, 0],
Dec_require_gard = [ 1, 0, 0, 0, 0, 0, 0, 0],
inv_Enc=0, inv_Dec=1,
),
# AE layer
AEWeight = dict(
each = [1, 0, 0, 0, 0, 0, 0, 0, 0],
AE_gradual = [0, 0, 1], # [start, end, mode]
),
# LIS
LISWeght = dict(
cross = [0,],
enc_forward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
dec_forward = [1, 0, 0, 0, 0, 0, 0, 0, 0],
enc_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
dec_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
each = [0, 0, 0, 0, 0, 0, 0, 0, 0],
# [dist, angle, push],
cross_w = [0, 0, 0],
enc_forward_w = [0, 0, 0], # [dist, angle, push]
dec_forward_w = [0, 0, 0],
enc_backward_w = [0, 0, 0],
dec_backward_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [1, 1, 1],
# gradual
LIS_gradual = [0, 0, 1], # [start, end, mode]
push_gradual = dict(
cross_w = [0, 0, 0],
enc_w = [0, 0, 0],
dec_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [0, 0, 0],
),
),
)
# result
params.update(param)
return params
# mnist, 8layers ML-Enc + ExtraHead + Orth + Padding, d=10
def test_3(mode):
# copy of test_6_0814_6: 8 layers inverse + DR + padding, Good!
params = base_params(mode)
if mode == 'encoder':
param = dict(
numberClass = 10,
# regular
DATASET="mnist",
BATCHSIZE = 10000,
N_dataset = 10000,
EPOCHS=10000,
# regularB = 100,
regularB = 10,
MAEK = 30,
PlotForloop = 1000,
ratio = dict(AE=0, dist=1, angle=0, push=0.8,
orth=0.1,
pad=100
),
# structure
NetworkStructure = dict(
layer = [784, 784, 784, 784, 784, 784, 784, 784, 10],
relu = [ 1, 1, 1, 1, 1, 1, 1, 0],
Enc_require_gard = [ 1, 1, 1, 1, 1, 1, 1, 1],
Dec_require_gard = [ 0, 0, 0, 0, 0, 0, 0, 0],
inv_Enc=0, inv_Dec=1,
),
# Extra Head (DR project)
ExtraHead = dict(
layer = [0, 12, 10, 8, 6, 4, 3, 2, ],
weight = [ 2, 4, 8, 16, 32, 64, 128, ],
),
# LIS
LISWeght = dict(
cross = [1,],
enc_forward = [0, 0, 0, 0, 0, 0, 0, 0, 1],
dec_forward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
enc_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
dec_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
each = [0, 0, 0, 0, 0, 0, 0, 0, 0],
# [dist, angle, push],
cross_w = [1, 1, 10],
# enc_forward_w = [1, 1, 10],
enc_forward_w = [1, 1, 15],
dec_forward_w = [0, 0, 0],
enc_backward_w = [0, 0, 0],
dec_backward_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [1, 1, 1],
# gradual
LIS_gradual = [0, 0, 1], # [start, end, mode]
push_gradual = dict(
cross_w = [4000, 9500, 0],
enc_w = [3000, 8000, 0],
dec_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [2000, 8000, 0],
),
),
# Orth
OrthWeight = dict(
Orth_gradual = [50, 1200, 1],
each = [8000, 5800, 1500, 600, 380, 240, 100, ],
),
# inverse mode, 0808-1
InverseMode = dict(
mode = "ZeroPadding",
loss_type = "L1",
padding = [ 0, 780, 760, 700, 750, 650, 700, 630, 600], # 0814, good for mnist
pad_w = [ 0, 2, 4, 16, 8, 32, 16, 64, 64],
pad_gradual = [0, 3000, 1],
),
)
elif mode == 'decoder':
param = dict(
# regular
numberClass = 10,
DATASET="mnist",
BATCHSIZE = 10000,
N_dataset = 10000,
EPOCHS= 0,
PlotForloop = 1,
ratio = dict(AE=0.005, dist=1, angle=0, push=0.8, orth=0, pad=0),
# structure
NetworkStructure = dict(
layer = [784, 784, 784, 784, 784, 784, 784, 784, 10],
relu = [ 1, 1, 1, 1, 1, 1, 1, 0],
Enc_require_gard = [ 0, 0, 0, 0, 0, 0, 0, 0],
Dec_require_gard = [ 1, 0, 0, 0, 0, 0, 0, 0],
inv_Enc=0, inv_Dec=1,
),
# AE layer
AEWeight = dict(
each = [1, 0, 0, 0, 0, 0, 0, 0, 0],
AE_gradual = [0, 0, 1], # [start, end, mode]
),
# LIS
LISWeght = dict(
cross = [0,],
enc_forward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
dec_forward = [1, 0, 0, 0, 0, 0, 0, 0, 0],
enc_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
dec_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
each = [0, 0, 0, 0, 0, 0, 0, 0, 0],
# [dist, angle, push],
cross_w = [0, 0, 0],
enc_forward_w = [0, 0, 0], # [dist, angle, push]
dec_forward_w = [0, 0, 0],
enc_backward_w = [0, 0, 0],
dec_backward_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [1, 1, 1],
# gradual
LIS_gradual = [0, 0, 1], # [start, end, mode]
push_gradual = dict(
cross_w = [0, 0, 0],
enc_w = [0, 0, 0],
dec_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [0, 0, 0],
),
),
)
# result
params.update(param)
return params
# achieve more sparse embedding:
# mnist, 8layers ML-Enc + ExtraHead + Orth + Padding, d=10, train last 2 layer for more sparse.
def test_4(mode):
params = base_params(mode)
if mode == 'encoder':
param = dict(
resume_from = "./final_summary/mnist/+Padding/test1_good/MLP_8layer_0814_6_mnist(inv+pad)_good/checkpoints/8000.pth",
numberClass = 10,
# regular
DATASET="mnist",
BATCHSIZE = 10000,
N_dataset = 10000,
EPOCHS = 4000,
regularB = 3,
MAEK = 15,
PlotForloop = 1000,
ratio = dict(AE=0, dist=1, angle=0, push=0.8,
orth=0.1,
pad = 0,
# pad=20
# pad=100 # ok
),
# structure
NetworkStructure = dict(
layer = [784, 784, 784, 784, 784, 784, 784, 784, 10],
# layer = [784, 784, 784, 784, 784, 784, 784, 784, 2], # prev
relu = [ 1, 1, 1, 1, 1, 1, 1, 0],
# Enc_require_gard = [ 1, 1, 1, 1, 1, 1, 1, 1], # prev
Enc_require_gard = [ 0, 0, 0, 0, 0, 0, 1, 1],
Dec_require_gard = [ 0, 0, 0, 0, 0, 0, 0, 0],
inv_Enc=0, inv_Dec=1,
),
# Extra Head (DR project)
ExtraHead = dict(
# layer = [0, 12, 10, 8, 6, 4, 3, 2, ], # prev
# weight = [ 2, 4, 8, 16, 32, 64, 128, ],
layer = [0, 0, 0, 0, 0, 0, 0, 2, ],
weight = [ 64, ],
),
# LIS
LISWeght = dict(
cross = [1,], # ok
enc_forward = [0, 0, 0, 0, 0, 0, 0, 0, 1],
dec_forward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
enc_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
dec_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
each = [0, 0, 0, 0, 0, 0, 0, 0, 0],
# [dist, angle, push],
cross_w = [1, 1, 10],
enc_forward_w = [1, 1, 10],
dec_forward_w = [0, 0, 0],
enc_backward_w = [0, 0, 0],
dec_backward_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [1, 1, 1],
# gradual
LIS_gradual = [0, 0, 1],
# push_gradual = dict(
# cross_w = [4000, 9000, 0],
# enc_w = [3000, 8000, 0],
# dec_w = [0, 0, 0],
# each_w = [0, 0, 0],
# extra_w = [3000, 8000, 0],
# ),
push_gradual = dict(
cross_w = [2000, 8000, 0],
enc_w = [ 500, 6000, 0],
dec_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [ 500, 6000, 0],
),
),
# Orth
OrthWeight = dict(
# Orth_gradual = [50, 1200, 1], # prev
# each = [8000, 5500, 1500, 600, 350, 200, 100, ],
Orth_gradual = [0, 500, 1],
each = [0, 0, 0, 0, 0, 0, 400, ],
),
# inverse mode, 0808-1
# InverseMode = dict(
# mode = "ZeroPadding", #"CSinverse",
# loss_type = "L1",
# # padding = [ 0, 780, 760, 700, 750, 650, 700, 630, 600], # prev
# # pad_w = [ 0, 2, 4, 16, 8, 32, 16, 64, 64], # prev
# # pad_gradual = [0, 3500, 1], # prev
# padding = [ 0, 0, 0, 0, 0, 0, 0, 550],
# pad_w = [ 0, 0, 0, 0, 0, 0, 0, 32],
# pad_gradual = [0, 500, 1],
# ),
)
elif mode == 'decoder':
param = dict(
# regular
numberClass = 10,
DATASET="mnist",
BATCHSIZE = 10000,
N_dataset = 10000,
# EPOCHS= 0, # prev
EPOCHS= 1000, # try
regularB = 3,
MAEK = 15,
PlotForloop = 1000,
ratio = dict(AE=0, dist=1, angle=0,
# push=0.8, # prev
push = 5,
orth=0, pad=0),
# structure
NetworkStructure = dict(
# layer = [784, 784, 784, 784, 784, 784, 784, 784, 2],
layer = [784, 784, 784, 784, 784, 784, 784, 784, 10],
relu = [ 1, 1, 1, 1, 1, 1, 1, 0],
# Enc_require_gard = [ 0, 0, 0, 0, 0, 0, 0, 0], # prev
Enc_require_gard = [ 0, 0, 0, 0, 0, 0, 0, 1], # try
Dec_require_gard = [ 0, 0, 0, 0, 0, 0, 0, 0],
inv_Enc=0, inv_Dec=1,
),
# AE layer
AEWeight = dict(
each = [0, 0, 0, 0, 0, 0, 0, 0, 0],
AE_gradual = [0, 0, 1], # [start, end, mode]
),
# LIS
LISWeght = dict(
# cross = [0,], # prev
cross = [1,], # try
enc_forward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
dec_forward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
enc_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
dec_backward = [0, 0, 0, 0, 0, 0, 0, 0, 0],
each = [0, 0, 0, 0, 0, 0, 0, 0, 0],
# [dist, angle, push],
cross_w = [1, 1, 10],
enc_forward_w = [1, 0, 10],
dec_forward_w = [0, 0, 0],
enc_backward_w = [0, 0, 0],
dec_backward_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [0, 0, 0],
# gradual
LIS_gradual = [0, 0, 1],
push_gradual = dict(
# cross_w = [0, 0, 0], # prev
cross_w = [500, 5000, 0], # try
enc_w = [0, 0, 0],
dec_w = [0, 0, 0],
each_w = [0, 0, 0],
extra_w = [0, 0, 0],
),
),
)
# result
params.update(param)
return params
# layer 14: D_rank <= 150
| 44.762319 | 129 | 0.301884 | 3,179 | 30,886 | 2.814092 | 0.051589 | 0.169461 | 0.193159 | 0.184216 | 0.899173 | 0.885088 | 0.881847 | 0.876369 | 0.867539 | 0.844288 | 0 | 0.195652 | 0.578515 | 30,886 | 689 | 130 | 44.827286 | 0.49155 | 0.119439 | 0 | 0.885397 | 0 | 0.001848 | 0.00985 | 0.003703 | 0 | 0 | 0 | 0 | 0 | 1 | 0.011091 | false | 0 | 0.001848 | 0 | 0.029575 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
ff1d39d51cd7bc067bc31ac0c8f5e1afc907ff96 | 1,032 | py | Python | pyspawn/qm_hamiltonian/adiabatic.py | blevine37/pySpawn17 | 4fa65cfc3b4d399bcb586506782d00f86b453139 | [
"MIT"
] | 18 | 2018-03-30T16:11:13.000Z | 2021-08-22T18:57:12.000Z | pyspawn/qm_hamiltonian/adiabatic.py | Quantum-Dynamics-Hub/pySpawn17 | 0b28d968c703266e7af3c8461b494fca0a2da3f8 | [
"MIT"
] | 3 | 2018-03-30T17:26:51.000Z | 2021-08-17T08:49:24.000Z | pyspawn/qm_hamiltonian/adiabatic.py | Quantum-Dynamics-Hub/pySpawn17 | 0b28d968c703266e7af3c8461b494fca0a2da3f8 | [
"MIT"
] | 6 | 2018-11-21T15:30:38.000Z | 2021-07-05T05:37:15.000Z | ######################################################
# adiabatic Hamiltonian
######################################################
def build_Heff_first_half(self):
"""Build Heff for the first half of the time step in
the adibatic rep (with NPI)"""
self.get_qm_data_from_h5()
qm_time = self.get_quantum_time()
dt = self.get_timestep()
t_half = qm_time + 0.5 * dt
self.set_quantum_time_half_step(t_half)
self.get_qm_data_from_h5_half_step()
self.build_S()
self.invert_S()
self.build_Sdot()
self.build_H()
self.build_Heff()
def build_Heff_second_half(self):
"""Build Heff for the second half of the time step in
the adibatic rep (with NPI)"""
self.get_qm_data_from_h5()
qm_time = self.get_quantum_time()
dt = self.get_timestep()
t_half = qm_time - 0.5 * dt
self.set_quantum_time_half_step(t_half)
self.get_qm_data_from_h5_half_step()
self.build_S()
self.invert_S()
self.build_Sdot()
self.build_H()
self.build_Heff()
| 23.454545 | 57 | 0.610465 | 154 | 1,032 | 3.714286 | 0.227273 | 0.157343 | 0.090909 | 0.090909 | 0.884615 | 0.884615 | 0.804196 | 0.804196 | 0.804196 | 0.804196 | 0 | 0.009604 | 0.192829 | 1,032 | 43 | 58 | 24 | 0.677071 | 0.17345 | 0 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.083333 | false | 0 | 0 | 0 | 0.083333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ff1f15a9f151a58f85a042ded56602f16a5838ba | 206 | py | Python | software/reports/add_header_section_to_report.py | Searchlight2/Searchlight2 | 87c85975be49c503f79063d72d321da21a6c6341 | [
"MIT"
] | 17 | 2018-12-11T13:36:12.000Z | 2022-03-31T06:27:59.000Z | software/reports/add_header_section_to_report.py | Searchlight2/Searchlight2 | 87c85975be49c503f79063d72d321da21a6c6341 | [
"MIT"
] | 1 | 2019-11-26T11:00:57.000Z | 2019-11-26T11:01:12.000Z | software/reports/add_header_section_to_report.py | Searchlight2/Searchlight2 | 87c85975be49c503f79063d72d321da21a6c6341 | [
"MIT"
] | 7 | 2020-05-21T16:35:41.000Z | 2022-03-31T06:28:01.000Z | def add_header_section_to_report(config_element_path, pr_dictionary):
pr_dictionary["side_bar"] += "<li class=\"side_bar_header\">" + config_element_path + "</li>"
return pr_dictionary["side_bar"]
| 41.2 | 97 | 0.747573 | 29 | 206 | 4.793103 | 0.551724 | 0.258993 | 0.244604 | 0.273381 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.106796 | 206 | 4 | 98 | 51.5 | 0.755435 | 0 | 0 | 0 | 0 | 0 | 0.160194 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 7 |
2088eaeb42ab3291e0b2282ca3d19f4963f93372 | 10,707 | py | Python | preProcessor.py | evandropp10/predict_energy_star_score_nyc | fed291c6714d8be14abade686efc04a3ef93625a | [
"MIT"
] | null | null | null | preProcessor.py | evandropp10/predict_energy_star_score_nyc | fed291c6714d8be14abade686efc04a3ef93625a | [
"MIT"
] | null | null | null | preProcessor.py | evandropp10/predict_energy_star_score_nyc | fed291c6714d8be14abade686efc04a3ef93625a | [
"MIT"
] | null | null | null |
import pandas as pd
# Function to change "Not Available" to 0
def cleanNotAvailable(var):
if str(var) == 'Not Available':
return 0.0
else:
return float(var)
# Function to code Principal activity
def coding(col, codeDict):
colCoded = pd.Series(col, copy=True)
for key, value in codeDict.items():
colCoded.replace(key, value, inplace=True)
return colCoded
def preProcessTrain(x):
df = pd.read_csv(x)
dfnew = pd.DataFrame(
columns=["Year Built", "Number of Buildings - Self-reported", "Occupancy", "Site EUI (kBtu/ft²)",
"Natural Gas Use (kBtu)", "Weather Normalized Site Natural Gas Use (therms)",
"Weather Normalized Site Electricity (kWh)", "Total GHG Emissions (Metric Tons CO2e)",
"Direct GHG Emissions (Metric Tons CO2e)", "Indirect GHG Emissions (Metric Tons CO2e)",
"Property GFA - Self-Reported (ft²)", "Water Use (All Water Sources) (kgal)",
"Water Intensity (All Water Sources) (gal/ft²)", "Source EUI (kBtu/ft²)",
"Primary Property Type - Self Selected",
"Largest Property Use Type (ft²)", "2nd Largest Property Use (ft²)",
"3rd Largest Property Use Type (ft²)", "Total area (ft²)", "Site Gas (kBtu/ft²)",
"Electricity Use Grid Purchase (kBtu)",
"ENERGY STAR Score", "Property Id"])
dfnew["Property Id"] = df["Property Id"]
dfnew["ENERGY STAR Score"] = df["ENERGY STAR Score"]
dfnew["Year Built"] = df["Year Built"]
dfnew["Number of Buildings - Self-reported"] = df["Number of Buildings - Self-reported"]
dfnew["Occupancy"] = df["Occupancy"]
dfnew["Site EUI (kBtu/ft²)"] = df["Site EUI (kBtu/ft²)"]
dfnew["Primary Property Type - Self Selected"] = df["Primary Property Type - Self Selected"]
# Tratando o Not Available
dfnew["Natural Gas Use (kBtu)"] = df["Natural Gas Use (kBtu)"].apply(cleanNotAvailable)
dfnew["Weather Normalized Site Natural Gas Use (therms)"] = df["Weather Normalized Site Natural Gas Use (therms)"].apply(cleanNotAvailable)
dfnew["Electricity Use - Grid Purchase (kBtu)"] = df["Electricity Use - Grid Purchase (kBtu)"].apply(cleanNotAvailable)
dfnew["Weather Normalized Site Electricity (kWh)"] = df["Weather Normalized Site Electricity (kWh)"].apply(cleanNotAvailable)
dfnew["Total GHG Emissions (Metric Tons CO2e)"] = df["Total GHG Emissions (Metric Tons CO2e)"].apply(cleanNotAvailable)
dfnew["Direct GHG Emissions (Metric Tons CO2e)"] = df["Direct GHG Emissions (Metric Tons CO2e)"].apply(cleanNotAvailable)
dfnew["Indirect GHG Emissions (Metric Tons CO2e)"] = df["Indirect GHG Emissions (Metric Tons CO2e)"].apply(cleanNotAvailable)
dfnew["Property GFA - Self-Reported (ft²)"] = df["Property GFA - Self-Reported (ft²)"].apply(cleanNotAvailable)
dfnew["Water Use (All Water Sources) (kgal)"] = df["Water Use (All Water Sources) (kgal)"].apply(cleanNotAvailable)
dfnew["Water Intensity (All Water Sources) (gal/ft²)"] = df["Water Intensity (All Water Sources) (gal/ft²)"].apply(cleanNotAvailable)
dfnew["Source EUI (kBtu/ft²)"] = df["Source EUI (kBtu/ft²)"].apply(cleanNotAvailable)
dfnew["Largest Property Use Type (ft²)"] = df["Largest Property Use Type - Gross Floor Area (ft²)"].apply(cleanNotAvailable)
dfnew["2nd Largest Property Use (ft²)"] = df["2nd Largest Property Use - Gross Floor Area (ft²)"].apply(cleanNotAvailable)
dfnew["3rd Largest Property Use Type (ft²)"] = df["3rd Largest Property Use Type - Gross Floor Area (ft²)"].apply(cleanNotAvailable)
dfnew["Electricity Use Grid Purchase (kBtu)"] = df["Electricity Use - Grid Purchase (kBtu)"].apply(cleanNotAvailable)
dfnew["Total GHG Emissions (Metric Tons CO2e)"] = df["Total GHG Emissions (Metric Tons CO2e)"].apply(cleanNotAvailable)
dfnew["Water Use (All Water Sources) (kgal)"] = df["Water Use (All Water Sources) (kgal)"].apply(cleanNotAvailable)
dfnew["Total area (ft²)"] = dfnew["Largest Property Use Type (ft²)"] + dfnew["2nd Largest Property Use (ft²)"] + dfnew["3rd Largest Property Use Type (ft²)"]
dfnew["Site Gas (kBtu/ft²)"] = dfnew["Natural Gas Use (kBtu)"] / dfnew["Total area (ft²)"]
dfnew["Electricity Use Area (kBtu/ft²)"] = dfnew["Electricity Use Grid Purchase (kBtu)"] / dfnew["Total area (ft²)"]
dfnew["GHG Emissions Area (Metric Tons CO2e/ft²)"] = dfnew["Total GHG Emissions (Metric Tons CO2e)"] / dfnew["Total area (ft²)"]
dfnew["Water use Area (kgal/ft²)"] = dfnew["Water Use (All Water Sources) (kgal)"] / dfnew["Total area (ft²)"]
dfnew["Code Property Type"] = coding(df["Primary Property Type - Self Selected"],
{ 'Multifamily Housing':0,
'Office':1,
'Hotel':2,
'Non-Refrigerated Warehouse':3,
'Residence Hall/Dormitory':4,
'K-12 School':5,
'Senior Care Community':6,
'Distribution Center':7,
'Retail Store':8,
'Other':9,
'Medical Office': 10,
'College/University': 11,
'Hospital (General Medical & Surgical)': 12,
'Financial Office': 13,
'Worship Facility':14,
'Mixed Use Property': 15,
'Supermarket/Grocery Store': 16,
'Refrigerated Warehouse': 17,
'Wholesale Club/Supercenter': 18,
'Self-Storage Facility': 19,
'Courthouse': 20,
'Residential Care Facility': 21,
'Bank Branch': 22,
'Manufacturing/Industrial Plant': 23,
'Fitness Center/Health Club/Gym':24})
return dfnew
def preProcessTest(x):
df = pd.read_csv(x)
dfnew = pd.DataFrame(
columns=["Year Built", "Number of Buildings - Self-reported", "Occupancy", "Site EUI (kBtu/ft²)",
"Natural Gas Use (kBtu)", "Weather Normalized Site Natural Gas Use (therms)",
"Weather Normalized Site Electricity (kWh)", "Total GHG Emissions (Metric Tons CO2e)",
"Direct GHG Emissions (Metric Tons CO2e)", "Indirect GHG Emissions (Metric Tons CO2e)",
"Property GFA - Self-Reported (ft²)", "Water Use (All Water Sources) (kgal)",
"Water Intensity (All Water Sources) (gal/ft²)", "Source EUI (kBtu/ft²)",
"Largest Property Use Type (ft²)", "2nd Largest Property Use (ft²)",
"3rd Largest Property Use Type (ft²)", "Total area (ft²)", "Site Gas (kBtu/ft²)",
"Electricity Use Grid Purchase (kBtu)",
"Property Id"])
dfnew["Property Id"] = df["Property Id"]
dfnew["Year Built"] = df["Year Built"]
dfnew["Number of Buildings - Self-reported"] = df["Number of Buildings - Self-reported"]
dfnew["Occupancy"] = df["Occupancy"]
dfnew["Site EUI (kBtu/ft²)"] = df["Site EUI (kBtu/ft²)"]
# Tratando o Not Available
dfnew["Natural Gas Use (kBtu)"] = df["Natural Gas Use (kBtu)"].apply(cleanNotAvailable)
dfnew["Weather Normalized Site Natural Gas Use (therms)"] = df["Weather Normalized Site Natural Gas Use (therms)"].apply(cleanNotAvailable)
dfnew["Electricity Use - Grid Purchase (kBtu)"] = df["Electricity Use - Grid Purchase (kBtu)"].apply(cleanNotAvailable)
dfnew["Weather Normalized Site Electricity (kWh)"] = df["Weather Normalized Site Electricity (kWh)"].apply(cleanNotAvailable)
dfnew["Total GHG Emissions (Metric Tons CO2e)"] = df["Total GHG Emissions (Metric Tons CO2e)"].apply(cleanNotAvailable)
dfnew["Direct GHG Emissions (Metric Tons CO2e)"] = df["Direct GHG Emissions (Metric Tons CO2e)"].apply(cleanNotAvailable)
dfnew["Indirect GHG Emissions (Metric Tons CO2e)"] = df["Indirect GHG Emissions (Metric Tons CO2e)"].apply(cleanNotAvailable)
dfnew["Property GFA - Self-Reported (ft²)"] = df["Property GFA - Self-Reported (ft²)"].apply(cleanNotAvailable)
dfnew["Water Use (All Water Sources) (kgal)"] = df["Water Use (All Water Sources) (kgal)"].apply(cleanNotAvailable)
dfnew["Water Intensity (All Water Sources) (gal/ft²)"] = df["Water Intensity (All Water Sources) (gal/ft²)"].apply(cleanNotAvailable)
dfnew["Source EUI (kBtu/ft²)"] = df["Source EUI (kBtu/ft²)"].apply(cleanNotAvailable)
dfnew["Largest Property Use Type (ft²)"] = df["Largest Property Use Type - Gross Floor Area (ft²)"].apply(cleanNotAvailable)
dfnew["2nd Largest Property Use (ft²)"] = df["2nd Largest Property Use - Gross Floor Area (ft²)"].apply(cleanNotAvailable)
dfnew["3rd Largest Property Use Type (ft²)"] = df["3rd Largest Property Use Type - Gross Floor Area (ft²)"].apply(cleanNotAvailable)
dfnew["Electricity Use Grid Purchase (kBtu)"] = df["Electricity Use - Grid Purchase (kBtu)"].apply(cleanNotAvailable)
dfnew["Total GHG Emissions (Metric Tons CO2e)"] = df["Total GHG Emissions (Metric Tons CO2e)"].apply(cleanNotAvailable)
dfnew["Water Use (All Water Sources) (kgal)"] = df["Water Use (All Water Sources) (kgal)"].apply(cleanNotAvailable)
dfnew["Total area (ft²)"] = dfnew["Largest Property Use Type (ft²)"] + dfnew["2nd Largest Property Use (ft²)"] + dfnew["3rd Largest Property Use Type (ft²)"]
dfnew["Site Gas (kBtu/ft²)"] = dfnew["Natural Gas Use (kBtu)"] / dfnew["Total area (ft²)"]
dfnew["Electricity Use Area (kBtu/ft²)"] = dfnew["Electricity Use Grid Purchase (kBtu)"] / dfnew["Total area (ft²)"]
dfnew["GHG Emissions Area (Metric Tons CO2e/ft²)"] = dfnew["Total GHG Emissions (Metric Tons CO2e)"] / dfnew["Total area (ft²)"]
dfnew["Water use Area (kgal/ft²)"] = dfnew["Water Use (All Water Sources) (kgal)"] / dfnew["Total area (ft²)"]
dfnew["Code Property Type"] = coding(df["Primary Property Type - Self Selected"],
{ 'Multifamily Housing':0,
'Office':1,
'Hotel':2,
'Non-Refrigerated Warehouse':3,
'Residence Hall/Dormitory':4,
'K-12 School':5,
'Senior Care Community':6,
'Distribution Center':7,
'Retail Store':8,
'Other':9,
'Medical Office': 10,
'College/University': 11,
'Hospital (General Medical & Surgical)': 12,
'Financial Office': 13,
'Worship Facility':14,
'Mixed Use Property': 15,
'Supermarket/Grocery Store': 16,
'Refrigerated Warehouse': 17,
'Wholesale Club/Supercenter': 18,
'Self-Storage Facility': 19,
'Courthouse': 20,
'Residential Care Facility': 21,
'Bank Branch': 22,
'Manufacturing/Industrial Plant': 23,
'Fitness Center/Health Club/Gym':24})
return dfnew
| 60.151685 | 161 | 0.639115 | 1,293 | 10,707 | 5.290797 | 0.130704 | 0.109341 | 0.134191 | 0.077182 | 0.948107 | 0.939044 | 0.93422 | 0.93422 | 0.93422 | 0.922672 | 0 | 0.023858 | 0.220977 | 10,707 | 177 | 162 | 60.491525 | 0.796307 | 0.011768 | 0 | 0.878378 | 0 | 0 | 0.585359 | 0.00454 | 0 | 0 | 0 | 0 | 0 | 1 | 0.027027 | false | 0 | 0.006757 | 0 | 0.067568 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
20ac8fe6f95049fe1bb250ca5848a0912ab5e415 | 9,213 | py | Python | loans/migrations/0001_initial.py | prateekmohanty63/microfinance | 39839c0d378be4ccc40a9dde5dc38a10773a38a1 | [
"MIT"
] | 1 | 2022-02-25T18:39:44.000Z | 2022-02-25T18:39:44.000Z | loans/migrations/0001_initial.py | prateekmohanty63/microfinance | 39839c0d378be4ccc40a9dde5dc38a10773a38a1 | [
"MIT"
] | null | null | null | loans/migrations/0001_initial.py | prateekmohanty63/microfinance | 39839c0d378be4ccc40a9dde5dc38a10773a38a1 | [
"MIT"
] | null | null | null | # Generated by Django 3.1.4 on 2022-02-25 05:37
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('organizations', '0002_organizationcustomer'),
('customers', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Fee',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('fee_id', models.CharField(blank=True, max_length=32)),
('fee_number', models.CharField(blank=True, max_length=16)),
('amount', models.FloatField(blank=True, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='FeeConfig',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('fee_config_id', models.CharField(blank=True, max_length=16)),
('label', models.CharField(blank=True, max_length=200, null=True)),
('day', models.IntegerField()),
('amount', models.FloatField(blank=True, null=True)),
('structure', models.CharField(choices=[('flat-fee', 'Flat Fee'), ('principle-percentage', 'Percentage of Principal'), ('loan-percentage', 'Percentage of Loan Amount')], max_length=25)),
('status', models.CharField(choices=[('active', 'Active'), ('archived', 'Archived'), ('deleted', 'Deleted')], default='active', max_length=25)),
],
),
migrations.CreateModel(
name='Interest',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('interest_id', models.CharField(blank=True, max_length=32)),
('amount', models.FloatField(blank=True, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='InterestConfig',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('interest_config_id', models.CharField(blank=True, max_length=16)),
('label', models.CharField(blank=True, max_length=200, null=True)),
('day', models.IntegerField()),
('amount', models.FloatField(blank=True, null=True)),
('config', models.CharField(choices=[('principle-percentage', 'Percentage of Principal'), ('loan-percentage', 'Percentage of Loan Amount'), ('flat-fee', 'Flat Fee')], max_length=25)),
('status', models.CharField(choices=[('active', 'Active'), ('archived', 'Archived'), ('deleted', 'Deleted')], default='active', max_length=25)),
],
),
migrations.CreateModel(
name='Loan',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('loan_id', models.CharField(blank=True, max_length=32)),
('loan_number', models.CharField(blank=True, max_length=16)),
('created_at', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('status', models.CharField(choices=[('active', 'Active'), ('void', 'Void'), ('cleared', 'Cleared'), ('overdue', 'Overdue'), ('default', 'Default')], default='active', max_length=25)),
('customer', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='customers.customer')),
],
),
migrations.CreateModel(
name='Payment',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('payment_id', models.CharField(blank=True, max_length=32)),
('payment_number', models.CharField(blank=True, max_length=16)),
('amount', models.FloatField(blank=True, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='PaymentConfig',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('payment_config_id', models.CharField(blank=True, max_length=16)),
('label', models.CharField(blank=True, max_length=200, null=True)),
('day', models.IntegerField()),
('amount', models.FloatField(blank=True, null=True)),
('structure', models.CharField(choices=[('flat-amount', 'Flat Amount'), ('principle-percentage', 'Percentage of Principal'), ('loan-percentage', 'Percentage of Loan Amount')], max_length=25)),
('status', models.CharField(choices=[('active', 'Active'), ('archived', 'Archived'), ('deleted', 'Deleted')], default='active', max_length=25)),
],
),
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('product_id', models.CharField(blank=True, max_length=16)),
('label', models.CharField(blank=True, max_length=200)),
('created_at', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('status', models.CharField(choices=[('active', 'Active'), ('archived', 'Archived'), ('deleted', 'Deleted')], default='active', max_length=25)),
],
),
migrations.CreateModel(
name='Transaction',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('transaction_id', models.CharField(blank=True, max_length=32)),
('transaction_number', models.CharField(blank=True, max_length=16)),
('created_at', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('type', models.CharField(choices=[('payment', 'Payment'), ('interest', 'Interest'), ('fee', 'Fee')], default='applied', max_length=25)),
('status', models.CharField(choices=[('applied', 'Applied'), ('voided', 'Voided')], default='applied', max_length=25)),
('fee', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='loans.fee')),
('interest', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='loans.interest')),
('loan', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='loans.loan')),
('payment', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='loans.payment')),
],
),
migrations.CreateModel(
name='ProductConfig',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('product_config_id', models.CharField(blank=True, max_length=16)),
('length', models.IntegerField()),
('overdue_on_day', models.IntegerField()),
('default_on_day', models.IntegerField()),
('created_at', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('status', models.CharField(choices=[('active', 'Active'), ('archived', 'Archived'), ('deleted', 'Deleted')], default='active', max_length=25)),
('fee_config', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='loans.feeconfig')),
('interest_config', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='loans.interestconfig')),
('payment_config', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='loans.paymentconfig')),
('product', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='loans.product')),
],
),
migrations.CreateModel(
name='OrganizationSettings',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('allow_multiple_loans', models.BooleanField(default=False)),
('created_at', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('organization', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='organizations.organization')),
],
),
migrations.AddField(
model_name='loan',
name='product',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='loans.productconfig'),
),
]
| 58.681529 | 208 | 0.582221 | 913 | 9,213 | 5.730559 | 0.112815 | 0.083142 | 0.068807 | 0.082569 | 0.801223 | 0.789182 | 0.789182 | 0.781728 | 0.746369 | 0.708907 | 0 | 0.012356 | 0.253338 | 9,213 | 156 | 209 | 59.057692 | 0.748219 | 0.004884 | 0 | 0.597315 | 1 | 0 | 0.184704 | 0.005564 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.013423 | 0 | 0.040268 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
20b6b36313456603fc2602b281565dfa9f403a36 | 4,434 | py | Python | api/tests/integration/tests/mass/reaction_mass_and_gross_fmla.py | tsingdao-Tp/Indigo | b2d73faebb6a450e9b3d34fed553fad4f9d0012f | [
"Apache-2.0"
] | 204 | 2015-11-06T21:34:34.000Z | 2022-03-30T16:17:01.000Z | api/tests/integration/tests/mass/reaction_mass_and_gross_fmla.py | tsingdao-Tp/Indigo | b2d73faebb6a450e9b3d34fed553fad4f9d0012f | [
"Apache-2.0"
] | 509 | 2015-11-05T13:54:43.000Z | 2022-03-30T22:15:30.000Z | api/tests/integration/tests/mass/reaction_mass_and_gross_fmla.py | tsingdao-Tp/Indigo | b2d73faebb6a450e9b3d34fed553fad4f9d0012f | [
"Apache-2.0"
] | 89 | 2015-11-17T08:22:54.000Z | 2022-03-17T04:26:28.000Z | import sys
sys.path.append('../../common')
from env_indigo import *
indigo = Indigo()
# Reactions
r = indigo.loadReaction("C>>C")
print(r.grossFormula())
r = indigo.loadReaction("CC.C>>C.CC")
print(r.grossFormula())
r = indigo.loadReaction('''$RXN
1 1 0
$MOL
Ketcher 11071614152D 1 1.00000 0.00000 0
18 17 0 0 0 999 V2000
-6.9282 0.0000 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
-6.0622 -0.5000 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
-5.1961 0.0000 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
-4.3301 -0.5000 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
-3.4641 0.0000 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
-2.5981 -0.5000 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
-1.7321 0.0000 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
-0.8660 -0.5000 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
0.0000 0.0000 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
0.8660 -0.5000 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
1.7321 0.0000 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
2.5981 -0.5000 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
3.4641 0.0000 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
4.3301 -0.5000 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
5.1961 0.0000 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
6.0622 -0.5000 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
6.9282 0.0000 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
7.7942 -0.5000 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
1 2 1 0 0 0
2 3 1 0 0 0
3 4 1 0 0 0
4 5 1 0 0 0
5 6 1 0 0 0
6 7 1 0 0 0
7 8 1 0 0 0
8 9 1 0 0 0
9 10 1 0 0 0
10 11 1 0 0 0
11 12 1 0 0 0
12 13 1 0 0 0
13 14 1 0 0 0
14 15 1 0 0 0
15 16 1 0 0 0
16 17 1 0 0 0
17 18 1 0 0 0
M STY 1 1 SRU
M SLB 1 1 1
M SCN 1 1 HT
M SMT 1 n
M SAL 1 2 4 5
M SBL 1 2 3 5
M SDI 1 4 -4.7631 -1.0000 -4.7631 0.5000
M SDI 1 4 -3.0311 0.5000 -3.0311 -1.0000
M STY 1 2 SRU
M SLB 1 2 2
M SCN 1 2 HT
M SMT 2 k
M SAL 2 3 11 12 13
M SBL 2 2 10 13
M SDI 2 4 1.2990 -1.0000 1.2990 0.5000
M SDI 2 4 3.8971 0.5000 3.8971 -1.0000
M STY 1 3 SRU
M SLB 1 3 3
M SCN 1 3 HT
M SMT 3 n
M SAL 3 1 8
M SBL 3 2 7 8
M SDI 3 4 -1.2990 -1.0000 -1.2990 0.5000
M SDI 3 4 -0.4330 0.5000 -0.4330 -1.0000
M END
$MOL
Ketcher 11071614152D 1 1.00000 0.00000 0
18 17 0 0 0 999 V2000
11.9888 0.1500 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
12.8548 -0.3500 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
13.7209 0.1500 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
14.5869 -0.3500 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
15.4529 0.1500 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
16.3189 -0.3500 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
17.1849 0.1500 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
18.0510 -0.3500 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
18.9170 0.1500 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
19.7829 -0.3500 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
20.6490 0.1500 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
21.5150 -0.3500 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
22.3810 0.1500 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
23.2470 -0.3500 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
24.1130 0.1500 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
24.9791 -0.3500 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
25.8451 0.1500 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
26.7112 -0.3500 0.0000 C 0 0 0 0 0 0 0 0 0 0 0 0
1 2 1 0 0 0
2 3 1 0 0 0
3 4 1 0 0 0
4 5 1 0 0 0
5 6 1 0 0 0
6 7 1 0 0 0
7 8 1 0 0 0
8 9 1 0 0 0
9 10 1 0 0 0
10 11 1 0 0 0
11 12 1 0 0 0
12 13 1 0 0 0
13 14 1 0 0 0
14 15 1 0 0 0
15 16 1 0 0 0
16 17 1 0 0 0
17 18 1 0 0 0
M END
''')
print(r.grossFormula())
| 34.640625 | 69 | 0.421065 | 1,130 | 4,434 | 1.651327 | 0.086726 | 0.504823 | 0.641479 | 0.700965 | 0.751876 | 0.741158 | 0.701501 | 0.701501 | 0.701501 | 0.701501 | 0 | 0.709402 | 0.525034 | 4,434 | 127 | 70 | 34.913386 | 0.176638 | 0.00203 | 0 | 0.394737 | 0 | 0.315789 | 0.945738 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.017544 | 0 | 0.017544 | 0.026316 | 0 | 0 | 1 | null | 1 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 14 |
20db37e2526610c8f45f7dc6f44d32e332c60977 | 760 | py | Python | src/rewrite_reload2.py | pmdscully/Prototype_Rewrite_Module | d69e4c8f46d35767d34824af3c41204624f08486 | [
"Apache-2.0"
] | null | null | null | src/rewrite_reload2.py | pmdscully/Prototype_Rewrite_Module | d69e4c8f46d35767d34824af3c41204624f08486 | [
"Apache-2.0"
] | null | null | null | src/rewrite_reload2.py | pmdscully/Prototype_Rewrite_Module | d69e4c8f46d35767d34824af3c41204624f08486 | [
"Apache-2.0"
] | null | null | null | import datetime
def editable_function():
print("I added a line.")
print("I added a line.")
print("I added a line.")
print("I added a line.")
print("I added a line.")
print("I added a line.")
print("I added a line.")
print("I added a line.")
print("I added a line.")
print("I added a line.")
print("I added a line.")
print("I added a line.")
print("I added a line.")
print("I added a line.")
print("I added a line.")
print("I added a line.")
print("I added a line.")
print("I added a line.")
print("I added a line.")
print("I added a line.")
print("I loaded:"+str(datetime.datetime.utcnow()))
| 31.666667 | 58 | 0.514474 | 112 | 760 | 3.482143 | 0.116071 | 0.323077 | 0.564103 | 0.615385 | 0.835897 | 0.835897 | 0.835897 | 0.835897 | 0.835897 | 0.835897 | 0 | 0 | 0.334211 | 760 | 23 | 59 | 33.043478 | 0.770751 | 0 | 0 | 0.869565 | 0 | 0 | 0.406579 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.043478 | true | 0 | 0.043478 | 0 | 0.086957 | 0.913043 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 12 |
20e1c5306368140a528e3ce629d372e17359ff2d | 3,081 | py | Python | active_reward_learning/envs/chain.py | david-lindner/idrl | 54cfad330b0598ad4f6621796f2411644e50a6ba | [
"MIT"
] | 9 | 2021-11-20T18:14:38.000Z | 2022-03-20T16:29:48.000Z | active_reward_learning/envs/chain.py | david-lindner/idrl | 54cfad330b0598ad4f6621796f2411644e50a6ba | [
"MIT"
] | null | null | null | active_reward_learning/envs/chain.py | david-lindner/idrl | 54cfad330b0598ad4f6621796f2411644e50a6ba | [
"MIT"
] | null | null | null | from typing import Optional
import numpy as np
from active_reward_learning.envs.tabular_mdp import TabularMDP
class SimpleChain(TabularMDP):
# x <-> x <-> x <-> x <-> x <-> x <-> x <-> x
def __init__(
self,
rewards: np.ndarray,
discount_factor: float,
init_agent_pos: Optional[int],
episode_length: int,
observation_type: str = "state",
observation_noise: float = 0,
):
N = rewards.shape[0]
transitions = np.zeros((2, N, N))
for i in range(N):
transitions[0, i, max(0, i - 1)] = 1
transitions[1, i, min(N - 1, i + 1)] = 1
super().__init__(
N,
2,
rewards,
transitions,
discount_factor,
[],
episode_length,
init_agent_pos,
observation_type=observation_type,
observation_noise=observation_noise,
)
class LimitedActionChain(TabularMDP):
# x -> x -> x -> x <-> x <-> x <-> x
def __init__(
self,
rewards: np.ndarray,
discount_factor: float,
episode_length: int,
block_N: int,
init_agent_pos: Optional[int] = None,
use_sparse_transitions: bool = False,
observation_type: str = "state",
observation_noise: float = 0,
):
N = rewards.shape[0]
transitions = np.zeros((2, N, N))
for i in range(block_N):
transitions[0, i, min(N - 1, i + 1)] = 1
transitions[1, i, min(N - 1, i + 1)] = 1
for i in range(block_N, N):
transitions[0, i, max(0, i - 1)] = 1
transitions[1, i, min(N - 1, i + 1)] = 1
super().__init__(
N,
2,
rewards,
transitions,
discount_factor,
[],
episode_length,
init_agent_pos,
use_sparse_transitions=use_sparse_transitions,
observation_type=observation_type,
observation_noise=observation_noise,
)
class FirstStateTrapChain(TabularMDP):
# x <-> x <-> x <-> x <-> x <-> x <-> x <-> x
def __init__(
self,
rewards: np.ndarray,
discount_factor: float,
init_agent_pos: Optional[int],
episode_length: int,
prob_to_first: float,
observation_type: str = "state",
observation_noise: float = 0,
):
N = rewards.shape[0]
transitions = np.zeros((2, N, N))
for i in range(N):
transitions[0, i, max(0, i - 1)] += 1 - prob_to_first
transitions[0, i, 0] += prob_to_first
transitions[1, i, min(N - 1, i + 1)] += 1 - prob_to_first
transitions[1, i, 0] += prob_to_first
super().__init__(
N,
2,
rewards,
transitions,
discount_factor,
[],
episode_length,
init_agent_pos,
observation_type=observation_type,
observation_noise=observation_noise,
)
| 29.342857 | 69 | 0.511522 | 348 | 3,081 | 4.275862 | 0.172414 | 0.026882 | 0.034274 | 0.037634 | 0.816532 | 0.791667 | 0.767473 | 0.733871 | 0.727151 | 0.664651 | 0 | 0.024935 | 0.375203 | 3,081 | 104 | 70 | 29.625 | 0.748052 | 0.039598 | 0 | 0.774194 | 0 | 0 | 0.005076 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.032258 | false | 0 | 0.032258 | 0 | 0.096774 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
afe432eb7f1203e2350c88619d6e587c41ccc4d3 | 25,679 | py | Python | src/python_pachyderm/proto/admin/v1_11/pps/pps_pb2_grpc.py | barretthinson/python-pachyderm | 82cea22d1105d70833a5522ccac750ca521694ff | [
"Apache-2.0"
] | null | null | null | src/python_pachyderm/proto/admin/v1_11/pps/pps_pb2_grpc.py | barretthinson/python-pachyderm | 82cea22d1105d70833a5522ccac750ca521694ff | [
"Apache-2.0"
] | null | null | null | src/python_pachyderm/proto/admin/v1_11/pps/pps_pb2_grpc.py | barretthinson/python-pachyderm | 82cea22d1105d70833a5522ccac750ca521694ff | [
"Apache-2.0"
] | null | null | null | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from python_pachyderm.proto.admin.v1_11.pps import pps_pb2 as client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
class APIStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.CreateJob = channel.unary_unary(
'/pps_1_11.API/CreateJob',
request_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.CreateJobRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.Job.FromString,
)
self.InspectJob = channel.unary_unary(
'/pps_1_11.API/InspectJob',
request_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.InspectJobRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.JobInfo.FromString,
)
self.ListJob = channel.unary_unary(
'/pps_1_11.API/ListJob',
request_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.ListJobRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.JobInfos.FromString,
)
self.ListJobStream = channel.unary_stream(
'/pps_1_11.API/ListJobStream',
request_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.ListJobRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.JobInfo.FromString,
)
self.FlushJob = channel.unary_stream(
'/pps_1_11.API/FlushJob',
request_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.FlushJobRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.JobInfo.FromString,
)
self.DeleteJob = channel.unary_unary(
'/pps_1_11.API/DeleteJob',
request_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.DeleteJobRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.StopJob = channel.unary_unary(
'/pps_1_11.API/StopJob',
request_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.StopJobRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.InspectDatum = channel.unary_unary(
'/pps_1_11.API/InspectDatum',
request_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.InspectDatumRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.DatumInfo.FromString,
)
self.ListDatum = channel.unary_unary(
'/pps_1_11.API/ListDatum',
request_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.ListDatumRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.ListDatumResponse.FromString,
)
self.ListDatumStream = channel.unary_stream(
'/pps_1_11.API/ListDatumStream',
request_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.ListDatumRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.ListDatumStreamResponse.FromString,
)
self.RestartDatum = channel.unary_unary(
'/pps_1_11.API/RestartDatum',
request_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.RestartDatumRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.CreatePipeline = channel.unary_unary(
'/pps_1_11.API/CreatePipeline',
request_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.CreatePipelineRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.InspectPipeline = channel.unary_unary(
'/pps_1_11.API/InspectPipeline',
request_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.InspectPipelineRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.PipelineInfo.FromString,
)
self.ListPipeline = channel.unary_unary(
'/pps_1_11.API/ListPipeline',
request_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.ListPipelineRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.PipelineInfos.FromString,
)
self.DeletePipeline = channel.unary_unary(
'/pps_1_11.API/DeletePipeline',
request_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.DeletePipelineRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.StartPipeline = channel.unary_unary(
'/pps_1_11.API/StartPipeline',
request_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.StartPipelineRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.StopPipeline = channel.unary_unary(
'/pps_1_11.API/StopPipeline',
request_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.StopPipelineRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.RunPipeline = channel.unary_unary(
'/pps_1_11.API/RunPipeline',
request_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.RunPipelineRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.RunCron = channel.unary_unary(
'/pps_1_11.API/RunCron',
request_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.RunCronRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.CreateSecret = channel.unary_unary(
'/pps_1_11.API/CreateSecret',
request_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.CreateSecretRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.DeleteSecret = channel.unary_unary(
'/pps_1_11.API/DeleteSecret',
request_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.DeleteSecretRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.ListSecret = channel.unary_unary(
'/pps_1_11.API/ListSecret',
request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.SecretInfos.FromString,
)
self.InspectSecret = channel.unary_unary(
'/pps_1_11.API/InspectSecret',
request_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.InspectSecretRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.SecretInfo.FromString,
)
self.DeleteAll = channel.unary_unary(
'/pps_1_11.API/DeleteAll',
request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetLogs = channel.unary_stream(
'/pps_1_11.API/GetLogs',
request_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.GetLogsRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.LogMessage.FromString,
)
self.GarbageCollect = channel.unary_unary(
'/pps_1_11.API/GarbageCollect',
request_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.GarbageCollectRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.GarbageCollectResponse.FromString,
)
self.ActivateAuth = channel.unary_unary(
'/pps_1_11.API/ActivateAuth',
request_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.ActivateAuthRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.ActivateAuthResponse.FromString,
)
self.UpdateJobState = channel.unary_unary(
'/pps_1_11.API/UpdateJobState',
request_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.UpdateJobStateRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
class APIServicer(object):
# missing associated documentation comment in .proto file
pass
def CreateJob(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def InspectJob(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListJob(self, request, context):
"""ListJob returns information about current and past Pachyderm jobs. This is
deprecated in favor of ListJobStream
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListJobStream(self, request, context):
"""ListJobStream returns information about current and past Pachyderm jobs.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def FlushJob(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteJob(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def StopJob(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def InspectDatum(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListDatum(self, request, context):
"""ListDatum returns information about each datum fed to a Pachyderm job. This
is deprecated in favor of ListDatumStream
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListDatumStream(self, request, context):
"""ListDatumStream returns information about each datum fed to a Pachyderm job
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RestartDatum(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreatePipeline(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def InspectPipeline(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListPipeline(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeletePipeline(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def StartPipeline(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def StopPipeline(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RunPipeline(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RunCron(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateSecret(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteSecret(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListSecret(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def InspectSecret(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteAll(self, request, context):
"""DeleteAll deletes everything
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetLogs(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GarbageCollect(self, request, context):
"""Garbage collection
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ActivateAuth(self, request, context):
"""An internal call that causes PPS to put itself into an auth-enabled state
(all pipeline have tokens, correct permissions, etcd)
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateJobState(self, request, context):
"""An internal call used to move a job from one state to another
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_APIServicer_to_server(servicer, server):
rpc_method_handlers = {
'CreateJob': grpc.unary_unary_rpc_method_handler(
servicer.CreateJob,
request_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.CreateJobRequest.FromString,
response_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.Job.SerializeToString,
),
'InspectJob': grpc.unary_unary_rpc_method_handler(
servicer.InspectJob,
request_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.InspectJobRequest.FromString,
response_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.JobInfo.SerializeToString,
),
'ListJob': grpc.unary_unary_rpc_method_handler(
servicer.ListJob,
request_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.ListJobRequest.FromString,
response_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.JobInfos.SerializeToString,
),
'ListJobStream': grpc.unary_stream_rpc_method_handler(
servicer.ListJobStream,
request_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.ListJobRequest.FromString,
response_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.JobInfo.SerializeToString,
),
'FlushJob': grpc.unary_stream_rpc_method_handler(
servicer.FlushJob,
request_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.FlushJobRequest.FromString,
response_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.JobInfo.SerializeToString,
),
'DeleteJob': grpc.unary_unary_rpc_method_handler(
servicer.DeleteJob,
request_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.DeleteJobRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'StopJob': grpc.unary_unary_rpc_method_handler(
servicer.StopJob,
request_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.StopJobRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'InspectDatum': grpc.unary_unary_rpc_method_handler(
servicer.InspectDatum,
request_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.InspectDatumRequest.FromString,
response_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.DatumInfo.SerializeToString,
),
'ListDatum': grpc.unary_unary_rpc_method_handler(
servicer.ListDatum,
request_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.ListDatumRequest.FromString,
response_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.ListDatumResponse.SerializeToString,
),
'ListDatumStream': grpc.unary_stream_rpc_method_handler(
servicer.ListDatumStream,
request_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.ListDatumRequest.FromString,
response_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.ListDatumStreamResponse.SerializeToString,
),
'RestartDatum': grpc.unary_unary_rpc_method_handler(
servicer.RestartDatum,
request_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.RestartDatumRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'CreatePipeline': grpc.unary_unary_rpc_method_handler(
servicer.CreatePipeline,
request_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.CreatePipelineRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'InspectPipeline': grpc.unary_unary_rpc_method_handler(
servicer.InspectPipeline,
request_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.InspectPipelineRequest.FromString,
response_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.PipelineInfo.SerializeToString,
),
'ListPipeline': grpc.unary_unary_rpc_method_handler(
servicer.ListPipeline,
request_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.ListPipelineRequest.FromString,
response_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.PipelineInfos.SerializeToString,
),
'DeletePipeline': grpc.unary_unary_rpc_method_handler(
servicer.DeletePipeline,
request_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.DeletePipelineRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'StartPipeline': grpc.unary_unary_rpc_method_handler(
servicer.StartPipeline,
request_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.StartPipelineRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'StopPipeline': grpc.unary_unary_rpc_method_handler(
servicer.StopPipeline,
request_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.StopPipelineRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'RunPipeline': grpc.unary_unary_rpc_method_handler(
servicer.RunPipeline,
request_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.RunPipelineRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'RunCron': grpc.unary_unary_rpc_method_handler(
servicer.RunCron,
request_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.RunCronRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'CreateSecret': grpc.unary_unary_rpc_method_handler(
servicer.CreateSecret,
request_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.CreateSecretRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'DeleteSecret': grpc.unary_unary_rpc_method_handler(
servicer.DeleteSecret,
request_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.DeleteSecretRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'ListSecret': grpc.unary_unary_rpc_method_handler(
servicer.ListSecret,
request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
response_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.SecretInfos.SerializeToString,
),
'InspectSecret': grpc.unary_unary_rpc_method_handler(
servicer.InspectSecret,
request_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.InspectSecretRequest.FromString,
response_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.SecretInfo.SerializeToString,
),
'DeleteAll': grpc.unary_unary_rpc_method_handler(
servicer.DeleteAll,
request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'GetLogs': grpc.unary_stream_rpc_method_handler(
servicer.GetLogs,
request_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.GetLogsRequest.FromString,
response_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.LogMessage.SerializeToString,
),
'GarbageCollect': grpc.unary_unary_rpc_method_handler(
servicer.GarbageCollect,
request_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.GarbageCollectRequest.FromString,
response_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.GarbageCollectResponse.SerializeToString,
),
'ActivateAuth': grpc.unary_unary_rpc_method_handler(
servicer.ActivateAuth,
request_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.ActivateAuthRequest.FromString,
response_serializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.ActivateAuthResponse.SerializeToString,
),
'UpdateJobState': grpc.unary_unary_rpc_method_handler(
servicer.UpdateJobState,
request_deserializer=client_dot_admin_dot_v1__11_dot_pps_dot_pps__pb2.UpdateJobStateRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'pps_1_11.API', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
| 50.35098 | 121 | 0.778535 | 3,043 | 25,679 | 6.053566 | 0.058495 | 0.054069 | 0.06308 | 0.076597 | 0.854785 | 0.854785 | 0.847077 | 0.749362 | 0.743934 | 0.743934 | 0 | 0.020931 | 0.153472 | 25,679 | 509 | 122 | 50.449902 | 0.826479 | 0.077456 | 0 | 0.41784 | 1 | 0 | 0.098211 | 0.029853 | 0 | 0 | 0 | 0 | 0 | 1 | 0.070423 | false | 0.051643 | 0.007042 | 0 | 0.08216 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
b366da44590c256d4e5f9094097cf57af8ccc282 | 7,314 | py | Python | tests/test_line_segment.py | CompMaterSci/python-geometry | 403244252b9d8521cd4d001ec63cd768d75f5393 | [
"BSD-3-Clause"
] | 3 | 2016-07-12T13:22:53.000Z | 2019-09-20T07:48:02.000Z | tests/test_line_segment.py | CompMaterSci/python-geometry | 403244252b9d8521cd4d001ec63cd768d75f5393 | [
"BSD-3-Clause"
] | 3 | 2016-12-07T07:20:33.000Z | 2017-02-08T09:14:58.000Z | tests/test_line_segment.py | CompMaterSci/python-geometry | 403244252b9d8521cd4d001ec63cd768d75f5393 | [
"BSD-3-Clause"
] | 5 | 2016-07-13T10:40:29.000Z | 2020-07-01T07:20:41.000Z | # -*- coding: utf-8 -*-
from numpy import array
from python_geometry.line_segment import LineSegment
class TestEquality(object):
def test_EqualIntegerLineSegment_ReturnTrue_0(self):
line_segment_0 = LineSegment(
end_point_0=array([0, 0, 0]),
end_point_1=array([1, 1, 1]),
end_point_0_included=True,
end_point_1_included=True)
line_segment_1 = LineSegment(
end_point_0=array([0, 0, 0]),
end_point_1=array([1, 1, 1]),
end_point_0_included=True,
end_point_1_included=True)
assert line_segment_0 == line_segment_1
def test_EqualIntegerLineSegment_ReturnTrue_1(self):
line_segment_0 = LineSegment(
end_point_0=array([0, 0, 0]),
end_point_1=array([1, 1, 1]),
end_point_0_included=True,
end_point_1_included=True)
line_segment_1 = LineSegment(
end_point_0=array([1, 1, 1]),
end_point_1=array([0, 0, 0]),
end_point_0_included=True,
end_point_1_included=True)
assert line_segment_0 == line_segment_1
def test_EqualIntegerLineSegment_ReturnTrue_2(self):
line_segment_0 = LineSegment(
end_point_0=array([0, 0, 0]),
end_point_1=array([1, 1, 1]),
end_point_0_included=False,
end_point_1_included=True)
line_segment_1 = LineSegment(
end_point_0=array([1, 1, 1]),
end_point_1=array([0, 0, 0]),
end_point_0_included=True,
end_point_1_included=False)
assert line_segment_0 == line_segment_1
def test_EqualIntegerLineSegment_ReturnTrue_3(self):
line_segment_0 = LineSegment(
end_point_0=array([0, 0, 0]),
end_point_1=array([1, 1, 1]),
end_point_0_included=True,
end_point_1_included=False)
line_segment_1 = LineSegment(
end_point_0=array([1, 1, 1]),
end_point_1=array([0, 0, 0]),
end_point_0_included=False,
end_point_1_included=True)
assert line_segment_0 == line_segment_1
def test_EqualIntegerLineSegment_ReturnTrue_4(self):
line_segment_0 = LineSegment(
end_point_0=array([0, 0, 0]),
end_point_1=array([1, 1, 1]),
end_point_0_included=False,
end_point_1_included=False)
line_segment_1 = LineSegment(
end_point_0=array([1, 1, 1]),
end_point_1=array([0, 0, 0]),
end_point_0_included=False,
end_point_1_included=False)
assert line_segment_0 == line_segment_1
def test_EqualFloatLineSegment_ReturnTrue_0(self):
line_segment_0 = LineSegment(
end_point_0=array([0.3, 0.3, 0.3]),
end_point_1=array([1.3, 1.3, 1.3]),
end_point_0_included=True,
end_point_1_included=True)
line_segment_1 = LineSegment(
end_point_0=array([0.3, 0.3, 0.3]),
end_point_1=array([1.3, 1.3, 1.3]),
end_point_0_included=True,
end_point_1_included=True)
assert line_segment_0 == line_segment_1
def test_EqualFloatLineSegment_ReturnTrue_1(self):
line_segment_0 = LineSegment(
end_point_0=array([0.3, 0.3, 0.3]),
end_point_1=array([1.3, 1.3, 1.3]),
end_point_0_included=True,
end_point_1_included=True)
line_segment_1 = LineSegment(
end_point_0=array([1.3, 1.3, 1.3]),
end_point_1=array([0.3, 0.3, 0.3]),
end_point_0_included=True,
end_point_1_included=True)
assert line_segment_0 == line_segment_1
def test_NonequalIntegerLineSegments_ReturnFalse_0(self):
line_segment_0 = LineSegment(
end_point_0=array([0, 0, 0]),
end_point_1=array([1, 1, 1]),
end_point_0_included=True,
end_point_1_included=True)
line_segment_1 = LineSegment(
end_point_0=array([0, 0, 0]),
end_point_1=array([1, 1, 2]),
end_point_0_included=True,
end_point_1_included=True)
assert not line_segment_0 == line_segment_1
def test_NonequalIntegerLineSegments_ReturnFalse_1(self):
line_segment_0 = LineSegment(
end_point_0=array([0, 0, 0]),
end_point_1=array([1, 1, 1]),
end_point_0_included=True,
end_point_1_included=True)
line_segment_1 = LineSegment(
end_point_0=array([0, 0, 0]),
end_point_1=array([1, 1, 1]),
end_point_0_included=True,
end_point_1_included=False)
assert not line_segment_0 == line_segment_1
def test_NonequalIntegerLineSegments_ReturnFalse_2(self):
line_segment_0 = LineSegment(
end_point_0=array([0, 0, 0]),
end_point_1=array([1, 1, 1]),
end_point_0_included=True,
end_point_1_included=True)
line_segment_1 = LineSegment(
end_point_0=array([0, 0, 0]),
end_point_1=array([1, 1, 1]),
end_point_0_included=False,
end_point_1_included=True)
assert not line_segment_0 == line_segment_1
def test_NonequalIntegerLineSegments_ReturnFalse_3(self):
line_segment_0 = LineSegment(
end_point_0=array([0.3, 0.3, 0.3]),
end_point_1=array([1.3, 1.3, 1.3]),
end_point_0_included=True,
end_point_1_included=True)
line_segment_1 = LineSegment(
end_point_0=array([0.3, 0.3, 0.3]),
end_point_1=array([1.3, 1.3, 2.3]),
end_point_0_included=True,
end_point_1_included=True)
assert not line_segment_0 == line_segment_1
class TestRepr(object):
def test_CompareAgainstItsRepr_ReturnTrue_0(self):
line_segment = LineSegment(
end_point_0=array([0, 0, 0]),
end_point_1=array([1, 1, 1]))
assert eval(repr(line_segment)) == line_segment
def test_CompareAgainstItsRepr_ReturnTrue_1(self):
line_segment = LineSegment(
end_point_0=array([0, 0, 0]),
end_point_1=array([1, 1, 1]),
end_point_0_included=True,
end_point_1_included=True)
assert eval(repr(line_segment)) == line_segment
def test_CompareAgainstItsRepr_ReturnTrue_2(self):
line_segment = LineSegment(
end_point_0=array([0, 0, 0]),
end_point_1=array([1, 1, 1]),
end_point_0_included=True,
end_point_1_included=False)
assert eval(repr(line_segment)) == line_segment
def test_CompareAgainstItsRepr_ReturnTrue_3(self):
line_segment = LineSegment(
end_point_0=array([0, 0, 0]),
end_point_1=array([1, 1, 1]),
end_point_0_included=False,
end_point_1_included=True)
assert eval(repr(line_segment)) == line_segment
def test_CompareAgainstItsRepr_ReturnTrue_4(self):
line_segment = LineSegment(
end_point_0=array([0, 0, 0]),
end_point_1=array([1, 1, 1]),
end_point_0_included=False,
end_point_1_included=False)
assert eval(repr(line_segment)) == line_segment
| 38.09375 | 61 | 0.609653 | 1,009 | 7,314 | 4.005946 | 0.038652 | 0.209797 | 0.118011 | 0.133597 | 0.955962 | 0.951757 | 0.951757 | 0.951757 | 0.951757 | 0.946066 | 0 | 0.070179 | 0.288898 | 7,314 | 191 | 62 | 38.293194 | 0.706979 | 0.002871 | 0 | 0.852071 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.094675 | 1 | 0.094675 | false | 0 | 0.011834 | 0 | 0.118343 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
2fdad185f0f223b06f3844c5ab6e2e995ad4de82 | 7,487 | py | Python | dsatools/_base/_time_domain_decomposition/_ssa.py | diarmaidocualain/dsatools | 50b9259e2846b5fdd3dc52206967b0ee8d0144de | [
"MIT"
] | 31 | 2020-09-14T16:12:01.000Z | 2022-03-30T01:44:18.000Z | dsatools/_base/_time_domain_decomposition/_ssa.py | Jimmy-INL/dsatools | 5c811838bb3fb8ae00195d5f68e451bd23b3448c | [
"MIT"
] | 4 | 2021-09-24T17:47:42.000Z | 2022-03-04T08:06:43.000Z | dsatools/_base/_time_domain_decomposition/_ssa.py | Jimmy-INL/dsatools | 5c811838bb3fb8ae00195d5f68e451bd23b3448c | [
"MIT"
] | 11 | 2020-12-06T08:18:56.000Z | 2022-03-20T13:28:22.000Z | import numpy as np
import scipy
from ... import operators
__all__ = ['ssa','kernel_ssa']
#-----------------------------------------------------------------
__EPSILON__ = 1e-4
def ssa(x, order, mode='toeplitz', lags=None, averaging=True, extrasize = False):
'''
Estimation the signal components based on the
Singular Spectral Analysis (SSA) algorithm.
Parameters
-------------
* x: 1d ndarray,
input 1d signal.
* order: int,
order of the model (number of valuable components,
size of signal subspace).
* mode:
The mode of lags matrix
(i.e. trajectory (or caterpillar) matrix or its analouge),
mode = {traj, full, covar, toeplitz, hankel}.
* lags: int or None,
Number of lags in correlation function
(x.shape[0]//2 by default).
* averaging: bool,
If True, then mean of each diagonal will be taken
for diagonal averaging instead of
just summarizing (True, by default).
* extrasize: bool,
if True, than near doubled size of
output will be returned.
Returns
-----------
* components, 2d ndarray
components with dimentions
(order, x.shape[0]).
Notes
------------
* Not all methods combinations are tested.
* SSA is calculated for each component as:
..math::
s_p(n)=dav{λ_p^0.5*U_p(n)*[(r(n)^T*U_p(n))⁄λ_p^0.5]^T},
where
* s_p(n) is the p-th component of ssa decomposition;
* U_p(n) and λ_p are eigenvectors and eigenvalues
correspondingly for matrix rr^H;
where
* r(n) is lags_matrix formed for x(t).
* dav is the operator of averaging by each diagonal.
Refernces
------------
[1] A. Zhigljavsky, Singular Spectrum Analysis
for Time Series. In: Lovric M. (eds)
International Encyclopedia of Statistical
Science. Springer, Berlin, Heidelberg, 2011.
See also
-------------
kernel_ssa,
pca,
dmd,
matrix_pencil,
esprit
'''
x = np.asarray(x)
N = x.shape[0]
#TODO: for toeplitz and hankel Nlags always = N
if(lags is None): lags = N//2
reverse = False
if(mode in ['traj', 'hankel', 'trajectory','caterpillar']):
reverse = True
base = operators.lags_matrix(x,
lags=lags,
mode=mode)
R = np.dot(base.T,np.conj(base))
# TODO: in my practice eigen value always sorted
# from the highest to the lowest,
# but probably sorting would be better
es,ev=np.linalg.eig(R)
# if(use_eigval):
es = np.sqrt(es)+__EPSILON__
# else:
# es = np.ones(es.shape)
psd = np.zeros((order,
base.shape[0] + base.shape[1] -1 ),
dtype = x.dtype)
for i in range(order):
Ys = np.matrix(ev[:,i])*es[i]
Vs = np.dot(base, Ys.H)/es[i]
hankel = np.outer(Ys,Vs)
diag= operators.diaganal_average(hankel,
reverse=reverse,
averaging=averaging,
samesize=extrasize)
psd[i,:diag.size] = diag
#TODO: calc diag.size it vale in psd declaration
psd = psd[:,:diag.size]
if(mode in ['traj', 'trajectory','caterpillar']):
psd = np.conj(psd)
return np.asarray(psd)/N
#------------------------------------------
__EPSILON__ = 1e-4
def kernel_ssa(x, order, mode='toeplitz', kernel='linear',kpar=1,
lags=None, averaging=True, extrasize = False):
'''
Estimation the signal components based on the
Singular Spectral Analysis (SSA) algorithm.
Parameters
-------------
* x: 1d ndarray,
input 1d signal.
* order: int,
order of the model (number of valuable components,
size of signal subspace).
* mode:
The mode of lags matrix
(i.e. trajectory (or caterpillar) matrix or its analouge),
mode = {traj, full, covar, toeplitz, hankel}.
* kernel: string,
kernel matrix type,
kernel = {'rbf','thin_plate','linear','euclid',
'minkowsky','sigmoid','poly'}.
* kpar: float,
is kernal parameter, depends on kernal type.
* lags: int or None,
Number of lags in correlation function
(x.shape[0]//2 by default).
* averaging: bool,
If True, then mean of each diagonal will be taken
for diagonal averaging instead of
just summarizing (True, by default).
* extrasize: bool,
if True, than near doubled size of
output will be returned.
Returns
-----------
* components, 2d ndarray
components with dimentions
(order, x.shape[0]).
Notes
------------
* Not all methods combinations are tested.
* SSA is calculated for each component as:
..math::
s_p(n)=dav{λ_p^0.5*U_p(n)*[(r(n)^T*U_p(n))⁄λ_p^0.5]^T},
where
* s_p(n) is the p-th component of ssa decomposition;
* U_p(n) and λ_p are eigenvectors and eigenvalues
correspondingly for matrix rr^H;
where
* r(n) is lags_matrix formed for x(t).
* dav is the operator of averaging by each diagonal.
Refernces
------------
[1] A. Zhigljavsky, Singular Spectrum Analysis
for Time Series. In: Lovric M. (eds)
International Encyclopedia of Statistical
Science. Springer, Berlin, Heidelberg, 2011.
See also
-------------
ssa,
pca,
dmd,
matrix_pencil,
esprit
'''
x = np.asarray(x)
N = x.shape[0]
#TODO: for toeplitz and hankel Nlags always = N
if(lags is None): lags = N//2
reverse = False
if(mode in ['traj', 'hankel', 'trajectory','caterpillar']):
reverse = True
R,base = operators.kernel_matrix(x,
mode = mode,
kernel = kernel,
kpar = kpar,
lags = lags,
ret_base=True)
# TODO: in my practice eigen value always sorted
# from the highest to the lowest,
# but probably sorting would be better
es,ev=np.linalg.eig(R)
# if(use_eigval):
es = np.sqrt(es)+__EPSILON__
# else:
# es = np.ones(es.shape)
psd = np.zeros((order,
base.shape[0] + base.shape[1] -1 ),
dtype = x.dtype)
for i in range(order):
Ys = np.matrix(ev[:,i])*es[i]
Vs = np.dot(base, Ys.H)/es[i]
hankel = np.outer(Ys,Vs)
diag= operators.diaganal_average(hankel,
reverse=reverse,
averaging=averaging,
samesize=extrasize)
psd[i,:diag.size] = diag
#TODO: calc diag.size it vale in psd declaration
psd = psd[:,:diag.size]
if(mode in ['traj', 'trajectory','caterpillar']):
psd = np.conj(psd)
return np.asarray(psd)/N
| 30.189516 | 85 | 0.511019 | 882 | 7,487 | 4.282313 | 0.231293 | 0.005295 | 0.01112 | 0.004236 | 0.901244 | 0.890124 | 0.890124 | 0.890124 | 0.890124 | 0.890124 | 0 | 0.009448 | 0.363831 | 7,487 | 247 | 86 | 30.311741 | 0.78312 | 0.543342 | 0 | 0.776119 | 0 | 0 | 0.051095 | 0 | 0 | 0 | 0 | 0.008097 | 0 | 1 | 0.029851 | false | 0 | 0.044776 | 0 | 0.104478 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
2fe21e7ff67d717358dee838cae7666795b7e0a8 | 2,936 | py | Python | functions.py | niteya-shah/AI-PSO-FPA | 3b842fd2cb3978c0177062cfc9e4739582bd7d47 | [
"MIT"
] | null | null | null | functions.py | niteya-shah/AI-PSO-FPA | 3b842fd2cb3978c0177062cfc9e4739582bd7d47 | [
"MIT"
] | null | null | null | functions.py | niteya-shah/AI-PSO-FPA | 3b842fd2cb3978c0177062cfc9e4739582bd7d47 | [
"MIT"
] | null | null | null | import numpy as np
class dist_from_zero:
def __init__(self, d):
self.shape = [3]
self.d = d
def eval(self, i):
return np.linalg.norm(i)
def generate_values(self, num_values):
values = list()
for i in range(num_values):
values.append(np.random.rand(*self.shape)*2*self.d - self.d)
return np.array(values)
class rastrigin:
def __init__(self, d):
self.shape = [5]
self.d = d
self.A = 10
def eval(self, i):
return (self.A*self.shape[0] + np.sum((i*i) - self.A * np.cos(2 * np.pi * i)))
def generate_values(self, num_values):
values = list()
for i in range(num_values):
values.append(np.random.rand(*self.shape)*2*self.d - self.d)
return np.array(values)
class booth:
def __init__(self, d):
self.shape = [2]
self.d = d
def eval(self, i):
return (i[0] + 2*i[1] - 7) * (i[0] + 2*i[1] - 7) + (2*i[0] + i[1] - 5) * (2 * i[0] + i[1] - 5)
def generate_values(self, num_values):
values = list()
for i in range(num_values):
values.append(np.random.rand(*self.shape)*2*self.d - self.d)
return np.array(values)
class Matyas:
def __init__(self, d):
self.shape = [2]
self.d = d
def eval(self, i):
return 0.26 * (i[0] * i[0] + i[1] * i[1]) - 0.48 * i[0] * i[1]
def generate_values(self, num_values):
values = list()
for i in range(num_values):
values.append(np.random.rand(*self.shape)*2*self.d - self.d)
return np.array(values)
class THC:
def __init__(self, d):
self.shape = [2]
self.d = d
def eval(self, i):
return 2 * i[0] * i[0] - 1.05 * np.power(i[0],4) + np.power(i[0],6) + i[0] * i[1] + i[1] * i[1]
def generate_values(self, num_values):
values = list()
for i in range(num_values):
values.append(np.random.rand(*self.shape)*2*self.d - self.d)
return np.array(values)
class McCormic:
def __init__(self, d):
self.shape = [2]
self.d = d
def eval(self, i):
return np.sin(i[0] + i[1]) + ((i[0] - i[1]) * (i[0] - i[1])) + ( -1.5 * i[0]) + (2.5 * i[1]) + 1
def generate_values(self, num_values):
values = list()
for i in range(num_values):
values.append(np.random.rand(*self.shape)*2*self.d - self.d)
return np.array(values)
class Rosenbrock:
def __init__(self, d):
self.shape = [10]
self.d = d
def eval(self, i):
i1 = np.copy(i)[:-1]
i2 = np.copy(i)[1:]
return np.sum(100 * (i2 - i1 * i1) * (i2 - i1 * i1) + (1 - i1) * (1 - i1))
def generate_values(self, num_values):
values = list()
for i in range(num_values):
values.append(np.random.rand(*self.shape)*2*self.d - self.d)
return np.array(values)
| 28.504854 | 104 | 0.526567 | 470 | 2,936 | 3.180851 | 0.121277 | 0.093645 | 0.084281 | 0.10301 | 0.840803 | 0.828763 | 0.762542 | 0.750502 | 0.742475 | 0.723746 | 0 | 0.044542 | 0.304155 | 2,936 | 102 | 105 | 28.784314 | 0.687225 | 0 | 0 | 0.740741 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.259259 | false | 0 | 0.012346 | 0.074074 | 0.530864 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 10 |
2ff39019365765d22a173b515877a79f54ebbabb | 1,365 | py | Python | botcommands/get_members.py | pastorhudson/mtb-pykeybasebot | af977f5823b178c91fb870058369f8a65205f7d6 | [
"BSD-3-Clause"
] | null | null | null | botcommands/get_members.py | pastorhudson/mtb-pykeybasebot | af977f5823b178c91fb870058369f8a65205f7d6 | [
"BSD-3-Clause"
] | null | null | null | botcommands/get_members.py | pastorhudson/mtb-pykeybasebot | af977f5823b178c91fb870058369f8a65205f7d6 | [
"BSD-3-Clause"
] | null | null | null |
def get_members(channel):
members = []
for owners in channel['owners']:
members.append(owners['username'])
for admins in channel['admins']:
if admins['username'] not in members:
members.append(admins['username'])
for writers in channel['writers']:
if writers['username'] not in members:
members.append(writers['username'])
for readers in channel['readers']:
if readers['username'] not in members:
members.append(readers['username'])
for bots in channel['bots']:
if bots['username'] not in members:
members.append(bots['username'])
for restrictedBots in channel['restrictedBots']:
if restrictedBots['username'] not in members:
members.append(restrictedBots['username'])
return members
def get_user(channel):
members = []
for owners in channel['owners']:
members.append(owners['username'])
for admins in channel['admins']:
if admins['username'] not in members:
members.append(admins['username'])
for writers in channel['writers']:
if writers['username'] not in members:
members.append(writers['username'])
for readers in channel['readers']:
if readers['username'] not in members:
members.append(readers['username'])
return members
| 35 | 54 | 0.624908 | 152 | 1,365 | 5.598684 | 0.118421 | 0.105758 | 0.122209 | 0.188014 | 0.80611 | 0.80611 | 0.728555 | 0.728555 | 0.728555 | 0.728555 | 0 | 0 | 0.249084 | 1,365 | 38 | 55 | 35.921053 | 0.830244 | 0 | 0 | 0.764706 | 0 | 0 | 0.157007 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.058824 | false | 0 | 0 | 0 | 0.117647 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
2ffedec34c455d1caa9e76435faf4fa95883eb11 | 9,394 | py | Python | py/HW3/option_models/sabr.py | daifengqi/ASP | ba073a3d052e7f880ac663c53a36d1a9a7559aa5 | [
"MIT"
] | null | null | null | py/HW3/option_models/sabr.py | daifengqi/ASP | ba073a3d052e7f880ac663c53a36d1a9a7559aa5 | [
"MIT"
] | null | null | null | py/HW3/option_models/sabr.py | daifengqi/ASP | ba073a3d052e7f880ac663c53a36d1a9a7559aa5 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Tue Oct 10
@author: jaehyuk
"""
import numpy as np
import scipy.stats as ss
import scipy.optimize as sopt
import scipy.integrate as spint
from . import normal
from . import bsm
import pyfeng as pf
'''
MC model class for Beta=1
'''
class ModelBsmMC:
beta = 1.0 # fixed (not used)
vov, rho = 0.0, 0.0
sigma, intr, divr = None, None, None
bsm_model = None
'''
You may define more members for MC: time step, etc
'''
def __init__(self, sigma, vov=0, rho=0.0, beta=1.0, intr=0, divr=0):
self.sigma = sigma
self.vov = vov
self.rho = rho
self.intr = intr
self.divr = divr
self.bsm_model = pf.Bsm(sigma, intr=intr, divr=divr)
def bsm_vol(self, strike, spot, texp=None, sigma=None):
''''
From the price from self.price() compute the implied vol
this is the opposite of bsm_vol in ModelHagan class
use bsm_model
'''
impvol = np.ones(len(strike))
price = self.price(strike, spot, texp, sigma)
for i, K in enumerate(strike):
impvol[i] = self.bsm_model._impvol_newton(price[i], K, spot, texp)
return impvol
def price(self, strike, spot, texp=None, sigma=None, cp=1):
'''
Your MC routine goes here
Generate paths for vol and price first. Then get prices (vector) for all strikes
You may fix the random number seed
'''
# set parameters
N = 101
n_path = 10000
dtk = texp/N
rand = np.random.multivariate_normal(
mean=[0, 0], cov=[[1, self.rho], [self.rho, 1]], size=[N, n_path])
W = rand[:, :, 0]
Z = rand[:, :, 1]
# simulate volatility
sigma_path = np.ones([N, n_path])
sigma = sigma if sigma else self.sigma
sigma_path[0, :] = np.ones(n_path) * sigma
for i in range(1, N):
sigma_path[i, :] = sigma_path[i-1, :] * \
np.exp(self.vov*np.sqrt(dtk)*Z[i, :]-0.5*self.vov**2*dtk)
# simulate price
spot_path = np.ones([N, n_path])
spot_path[0, :] = np.ones(n_path) * spot
for i in range(1, N):
spot_path[i, :] = np.exp(np.log(spot_path[i-1, :])+sigma_path[i-1, :]*np.sqrt(
dtk)*W[i, :]-0.5*np.power(sigma_path[i-1, :], 2)*dtk)
spot_T = spot_path[-1, :]
# calculate payoff
price = np.ones(len(strike))
var = np.ones(len(strike))
for i, K in enumerate(strike):
price[i] = np.mean(np.fmax(spot_T - K, 0))
return price
'''
MC model class for Beta=0
'''
class ModelNormalMC:
beta = 0.0 # fixed (not used)
vov, rho = 0.0, 0.0
sigma, intr, divr = None, None, None
normal_model = None
def __init__(self, sigma, vov=0, rho=0.0, beta=0.0, intr=0, divr=0):
self.sigma = sigma
self.vov = vov
self.rho = rho
self.intr = intr
self.divr = divr
self.normal_model = pf.Norm(sigma, intr=intr, divr=divr)
def norm_vol(self, strike, spot, texp=None, sigma=None):
''''
From the price from self.price() compute the implied vol
this is the opposite of normal_vol in ModelNormalHagan class
use normal_model
'''
impvol = np.ones(len(strike))
price = self.price(strike, spot, texp, sigma)
for i, K in enumerate(strike):
impvol[i] = self.normal_model._impvol_Choi2009(
price[i], K, spot, texp)
return impvol
def price(self, strike, spot, texp=None, sigma=None, cp=1):
'''
Your MC routine goes here
Generate paths for vol and price first. Then get prices (vector) for all strikes
You may fix the random number seed
'''
# set parameters
N = 101
n_path = 10000
dtk = texp/N
rand = np.random.multivariate_normal(
mean=[0, 0], cov=[[1, self.rho], [self.rho, 1]], size=[N, n_path])
W = rand[:, :, 0]
Z = rand[:, :, 1]
# simulate volatility
sigma_path = np.ones([N, n_path])
sigma = sigma if sigma else self.sigma
sigma_path[0, :] = np.ones(n_path) * sigma
for i in range(1, N):
sigma_path[i, :] = sigma_path[i-1, :] * \
np.exp(self.vov*np.sqrt(dtk)*Z[i, :]-0.5*self.vov**2*dtk)
# simulate price
spot_path = np.ones([N, n_path])
spot_path[0, :] = np.ones(n_path) * spot
for i in range(1, N):
spot_path[i, :] = spot_path[i-1, :] + \
sigma_path[i-1, :]*W[i, :]*np.sqrt(dtk)
spot_T = spot_path[-1, :]
# calculate payoff
price = np.ones(len(strike))
for i, K in enumerate(strike):
price[i] = np.mean(np.fmax(spot_T - K, 0))
return price
'''
Conditional MC model class for Beta=1
'''
class ModelBsmCondMC:
beta = 1.0 # fixed (not used)
vov, rho = 0.0, 0.0
sigma, intr, divr = None, None, None
bsm_model = None
'''
You may define more members for MC: time step, etc
'''
def __init__(self, sigma, vov=0, rho=0.0, beta=1.0, intr=0, divr=0):
self.sigma = sigma
self.vov = vov
self.rho = rho
self.intr = intr
self.divr = divr
self.bsm_model = pf.Bsm(sigma, intr=intr, divr=divr)
def bsm_vol(self, strike, spot, texp=None):
''''
From the price from self.price() compute the implied vol
this is the opposite of bsm_vol in ModelHagan class
use bsm_model
should be same as bsm_vol method in ModelBsmMC (just copy & paste)
'''
return 0
def price(self, strike, spot, texp=None, cp=1):
'''
Your MC routine goes here
Generate paths for vol only. Then compute integrated variance and BSM price.
Then get prices (vector) for all strikes
You may fix the random number seed
'''
n_path = 100000
N = 100
# simulate sigma
m = pf.BsmNdMc(self.vov)
tobs = np.arange(0, N+1) * texp/N
m.simulate(tobs=tobs, n_path=n_path)
sigma_path = np.squeeze(m.path)
sigma_final = sigma_path[-1, :]
int_var = spint.simps(sigma_path**2, dx=1, axis=0) * texp/N
# get S(bsm) and sigma(bsm)
sigma_0 = self.sigma
sigma_T = sigma_final * sigma_0
S_bs = spot*np.exp(self.rho/self.vov*(sigma_T-sigma_0) -
0.5*(self.rho*sigma_0)**2*texp*int_var)
sigma_bs = sigma_0 * np.sqrt((1-self.rho**2)*int_var)
# bsm formula
disc_fac = np.exp(-texp * self.intr)
sigma_std = np.maximum(np.array(sigma_bs) *
np.sqrt(texp), np.finfo(float).eps)
spst = ss # scipy.stats
price = np.ones(len(strike))
for i, K in enumerate(strike):
d1 = np.log(S_bs / K) / sigma_std
d2 = d1 - 0.5*sigma_std
d1 += 0.5*sigma_std
cp = np.array(cp)
price_k = S_bs * \
spst.norm.cdf(cp * d1) - K * spst.norm.cdf(cp * d2)
price_k *= cp * disc_fac
price[i] = np.mean(price_k)
return price
'''
Conditional MC model class for Beta=0
'''
class ModelNormalCondMC:
beta = 0.0 # fixed (not used)
vov, rho = 0.0, 0.0
sigma, intr, divr = None, None, None
normal_model = None
def __init__(self, sigma, vov=0, rho=0.0, beta=0.0, intr=0, divr=0):
self.sigma = sigma
self.vov = vov
self.rho = rho
self.intr = intr
self.divr = divr
self.normal_model = pf.Norm(sigma, intr=intr, divr=divr)
def norm_vol(self, strike, spot, texp=None):
''''
From the price from self.price() compute the implied vol
this is the opposite of normal_vol in ModelNormalHagan class
use normal_model
should be same as norm_vol method in ModelNormalMC (just copy & paste)
'''
return 0
def price(self, strike, spot, texp, cp=1):
'''
Your MC routine goes here
Generate paths for vol only. Then compute integrated variance and normal price.
You may fix the random number seed
'''
n_path = 100000
N = 100
# simulate sigma
m = pf.BsmNdMc(self.vov)
tobs = np.arange(0, N+1) * texp/N
m.simulate(tobs=tobs, n_path=n_path)
sigma_path = np.squeeze(m.path)
sigma_final = sigma_path[-1, :]
int_var = spint.simps(sigma_path**2, dx=1, axis=0) * texp/N
# get S(norm) and sigma(norm)
sigma_0 = self.sigma
sigma_T = sigma_final * sigma_0
S_norm = spot + self.rho/self.vov*(sigma_T-sigma_0)
sigma_norm = sigma_0 * np.sqrt((1-self.rho**2)*int_var)
# bachelier formula
df = np.exp(-texp * self.intr)
fwd = S_norm
sigma_std = np.maximum(np.array(sigma_norm) *
np.sqrt(texp), np.finfo(float).eps)
spst = ss
price = np.ones(len(strike))
for i, K in enumerate(strike):
d = (fwd - K) / sigma_std
price_k = df * (cp*(fwd - K)*spst.norm.cdf(cp *
d) + sigma_std * spst.norm.pdf(d))
price[i] = np.mean(price_k)
return price
| 31.0033 | 90 | 0.550351 | 1,391 | 9,394 | 3.616104 | 0.125809 | 0.009145 | 0.027833 | 0.028628 | 0.868191 | 0.845129 | 0.845129 | 0.821272 | 0.779125 | 0.766799 | 0 | 0.028544 | 0.324995 | 9,394 | 302 | 91 | 31.10596 | 0.764706 | 0.176389 | 0 | 0.742857 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.068571 | false | 0 | 0.04 | 0 | 0.222857 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
640c75f10fbe84d5e8dfa29dffdd3918a61a6e92 | 15,038 | py | Python | sdk/python/pulumi_oci/loadbalancer/path_route_set.py | EladGabay/pulumi-oci | 6841e27d4a1a7e15c672306b769912efbfd3ba99 | [
"ECL-2.0",
"Apache-2.0"
] | 5 | 2021-08-17T11:14:46.000Z | 2021-12-31T02:07:03.000Z | sdk/python/pulumi_oci/loadbalancer/path_route_set.py | pulumi-oci/pulumi-oci | 6841e27d4a1a7e15c672306b769912efbfd3ba99 | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2021-09-06T11:21:29.000Z | 2021-09-06T11:21:29.000Z | sdk/python/pulumi_oci/loadbalancer/path_route_set.py | pulumi-oci/pulumi-oci | 6841e27d4a1a7e15c672306b769912efbfd3ba99 | [
"ECL-2.0",
"Apache-2.0"
] | 2 | 2021-08-24T23:31:30.000Z | 2022-01-02T19:26:54.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['PathRouteSetArgs', 'PathRouteSet']
@pulumi.input_type
class PathRouteSetArgs:
def __init__(__self__, *,
load_balancer_id: pulumi.Input[str],
path_routes: pulumi.Input[Sequence[pulumi.Input['PathRouteSetPathRouteArgs']]],
name: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a PathRouteSet resource.
:param pulumi.Input[str] load_balancer_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the load balancer to add the path route set to.
:param pulumi.Input[Sequence[pulumi.Input['PathRouteSetPathRouteArgs']]] path_routes: (Updatable) The set of path route rules.
:param pulumi.Input[str] name: The name for this set of path route rules. It must be unique and it cannot be changed. Avoid entering confidential information. Example: `example_path_route_set`
"""
pulumi.set(__self__, "load_balancer_id", load_balancer_id)
pulumi.set(__self__, "path_routes", path_routes)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="loadBalancerId")
def load_balancer_id(self) -> pulumi.Input[str]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the load balancer to add the path route set to.
"""
return pulumi.get(self, "load_balancer_id")
@load_balancer_id.setter
def load_balancer_id(self, value: pulumi.Input[str]):
pulumi.set(self, "load_balancer_id", value)
@property
@pulumi.getter(name="pathRoutes")
def path_routes(self) -> pulumi.Input[Sequence[pulumi.Input['PathRouteSetPathRouteArgs']]]:
"""
(Updatable) The set of path route rules.
"""
return pulumi.get(self, "path_routes")
@path_routes.setter
def path_routes(self, value: pulumi.Input[Sequence[pulumi.Input['PathRouteSetPathRouteArgs']]]):
pulumi.set(self, "path_routes", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name for this set of path route rules. It must be unique and it cannot be changed. Avoid entering confidential information. Example: `example_path_route_set`
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class _PathRouteSetState:
def __init__(__self__, *,
load_balancer_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
path_routes: Optional[pulumi.Input[Sequence[pulumi.Input['PathRouteSetPathRouteArgs']]]] = None,
state: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering PathRouteSet resources.
:param pulumi.Input[str] load_balancer_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the load balancer to add the path route set to.
:param pulumi.Input[str] name: The name for this set of path route rules. It must be unique and it cannot be changed. Avoid entering confidential information. Example: `example_path_route_set`
:param pulumi.Input[Sequence[pulumi.Input['PathRouteSetPathRouteArgs']]] path_routes: (Updatable) The set of path route rules.
"""
if load_balancer_id is not None:
pulumi.set(__self__, "load_balancer_id", load_balancer_id)
if name is not None:
pulumi.set(__self__, "name", name)
if path_routes is not None:
pulumi.set(__self__, "path_routes", path_routes)
if state is not None:
pulumi.set(__self__, "state", state)
@property
@pulumi.getter(name="loadBalancerId")
def load_balancer_id(self) -> Optional[pulumi.Input[str]]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the load balancer to add the path route set to.
"""
return pulumi.get(self, "load_balancer_id")
@load_balancer_id.setter
def load_balancer_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "load_balancer_id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name for this set of path route rules. It must be unique and it cannot be changed. Avoid entering confidential information. Example: `example_path_route_set`
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="pathRoutes")
def path_routes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['PathRouteSetPathRouteArgs']]]]:
"""
(Updatable) The set of path route rules.
"""
return pulumi.get(self, "path_routes")
@path_routes.setter
def path_routes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['PathRouteSetPathRouteArgs']]]]):
pulumi.set(self, "path_routes", value)
@property
@pulumi.getter
def state(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "state")
@state.setter
def state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "state", value)
class PathRouteSet(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
load_balancer_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
path_routes: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PathRouteSetPathRouteArgs']]]]] = None,
__props__=None):
"""
This resource provides the Path Route Set resource in Oracle Cloud Infrastructure Load Balancer service.
Adds a path route set to a load balancer. For more information, see
[Managing Request Routing](https://docs.cloud.oracle.com/iaas/Content/Balance/Tasks/managingrequest.htm).
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_path_route_set = oci.loadbalancer.PathRouteSet("testPathRouteSet",
load_balancer_id=oci_load_balancer_load_balancer["test_load_balancer"]["id"],
path_routes=[oci.loadbalancer.PathRouteSetPathRouteArgs(
backend_set_name=oci_load_balancer_backend_set["test_backend_set"]["name"],
path=var["path_route_set_path_routes_path"],
path_match_type=oci.loadbalancer.PathRouteSetPathRoutePathMatchTypeArgs(
match_type=var["path_route_set_path_routes_path_match_type_match_type"],
),
)])
```
## Import
PathRouteSets can be imported using the `id`, e.g.
```sh
$ pulumi import oci:loadbalancer/pathRouteSet:PathRouteSet test_path_route_set "loadBalancers/{loadBalancerId}/pathRouteSets/{pathRouteSetName}"
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] load_balancer_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the load balancer to add the path route set to.
:param pulumi.Input[str] name: The name for this set of path route rules. It must be unique and it cannot be changed. Avoid entering confidential information. Example: `example_path_route_set`
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PathRouteSetPathRouteArgs']]]] path_routes: (Updatable) The set of path route rules.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: PathRouteSetArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
This resource provides the Path Route Set resource in Oracle Cloud Infrastructure Load Balancer service.
Adds a path route set to a load balancer. For more information, see
[Managing Request Routing](https://docs.cloud.oracle.com/iaas/Content/Balance/Tasks/managingrequest.htm).
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_path_route_set = oci.loadbalancer.PathRouteSet("testPathRouteSet",
load_balancer_id=oci_load_balancer_load_balancer["test_load_balancer"]["id"],
path_routes=[oci.loadbalancer.PathRouteSetPathRouteArgs(
backend_set_name=oci_load_balancer_backend_set["test_backend_set"]["name"],
path=var["path_route_set_path_routes_path"],
path_match_type=oci.loadbalancer.PathRouteSetPathRoutePathMatchTypeArgs(
match_type=var["path_route_set_path_routes_path_match_type_match_type"],
),
)])
```
## Import
PathRouteSets can be imported using the `id`, e.g.
```sh
$ pulumi import oci:loadbalancer/pathRouteSet:PathRouteSet test_path_route_set "loadBalancers/{loadBalancerId}/pathRouteSets/{pathRouteSetName}"
```
:param str resource_name: The name of the resource.
:param PathRouteSetArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(PathRouteSetArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
load_balancer_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
path_routes: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PathRouteSetPathRouteArgs']]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = PathRouteSetArgs.__new__(PathRouteSetArgs)
if load_balancer_id is None and not opts.urn:
raise TypeError("Missing required property 'load_balancer_id'")
__props__.__dict__["load_balancer_id"] = load_balancer_id
__props__.__dict__["name"] = name
if path_routes is None and not opts.urn:
raise TypeError("Missing required property 'path_routes'")
__props__.__dict__["path_routes"] = path_routes
__props__.__dict__["state"] = None
super(PathRouteSet, __self__).__init__(
'oci:loadbalancer/pathRouteSet:PathRouteSet',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
load_balancer_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
path_routes: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PathRouteSetPathRouteArgs']]]]] = None,
state: Optional[pulumi.Input[str]] = None) -> 'PathRouteSet':
"""
Get an existing PathRouteSet resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] load_balancer_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the load balancer to add the path route set to.
:param pulumi.Input[str] name: The name for this set of path route rules. It must be unique and it cannot be changed. Avoid entering confidential information. Example: `example_path_route_set`
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PathRouteSetPathRouteArgs']]]] path_routes: (Updatable) The set of path route rules.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _PathRouteSetState.__new__(_PathRouteSetState)
__props__.__dict__["load_balancer_id"] = load_balancer_id
__props__.__dict__["name"] = name
__props__.__dict__["path_routes"] = path_routes
__props__.__dict__["state"] = state
return PathRouteSet(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="loadBalancerId")
def load_balancer_id(self) -> pulumi.Output[str]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the load balancer to add the path route set to.
"""
return pulumi.get(self, "load_balancer_id")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name for this set of path route rules. It must be unique and it cannot be changed. Avoid entering confidential information. Example: `example_path_route_set`
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="pathRoutes")
def path_routes(self) -> pulumi.Output[Sequence['outputs.PathRouteSetPathRoute']]:
"""
(Updatable) The set of path route rules.
"""
return pulumi.get(self, "path_routes")
@property
@pulumi.getter
def state(self) -> pulumi.Output[str]:
return pulumi.get(self, "state")
| 46.701863 | 201 | 0.666312 | 1,787 | 15,038 | 5.362619 | 0.106323 | 0.068872 | 0.052593 | 0.043619 | 0.818742 | 0.795576 | 0.762287 | 0.752374 | 0.73046 | 0.707503 | 0 | 0.000087 | 0.232079 | 15,038 | 321 | 202 | 46.847352 | 0.829754 | 0.41721 | 0 | 0.557576 | 1 | 0 | 0.12062 | 0.037308 | 0 | 0 | 0 | 0 | 0 | 1 | 0.151515 | false | 0.006061 | 0.042424 | 0.012121 | 0.284848 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ff6f8109450e0dc930a3885329e82577b5be4f96 | 49,784 | py | Python | operator-pipeline-images/operatorcert/webhook/marketplace/pc_api_pb2.py | mgreczi/operator-pipelines | 2fd5cfbb702cecce98b80a307decf9d27e337416 | [
"Apache-2.0"
] | 4 | 2021-10-12T20:35:36.000Z | 2022-02-04T11:35:59.000Z | operator-pipeline-images/operatorcert/webhook/marketplace/pc_api_pb2.py | mgreczi/operator-pipelines | 2fd5cfbb702cecce98b80a307decf9d27e337416 | [
"Apache-2.0"
] | 93 | 2021-07-14T13:24:09.000Z | 2022-03-31T20:27:58.000Z | operator-pipeline-images/operatorcert/webhook/marketplace/pc_api_pb2.py | mgreczi/operator-pipelines | 2fd5cfbb702cecce98b80a307decf9d27e337416 | [
"Apache-2.0"
] | 34 | 2021-07-13T14:14:17.000Z | 2022-03-25T05:42:24.000Z | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: pyxis/pc_api.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name="pyxis/pc_api.proto",
package="pyxis.redhat.com.package.pc_api",
syntax="proto3",
serialized_options=b"Z:github.com/redhat-marketplace/pipeline-mirroring/rpc/pyxis",
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x12pyxis/pc_api.proto\x12\x1fpyxis.redhat.com.package.pc_api\x1a\x1cgoogle/api/annotations.proto\x1a google/protobuf/descriptor.proto\x1a\x1cgoogle/protobuf/struct.proto"\xfb\x01\n\'PyxisOperatorsGetOperatorBundlesRequest\x12\x0f\n\x07include\x18\x01 \x03(\t\x12\x0f\n\x07\x65xclude\x18\x02 \x03(\t\x12\x0f\n\x07sort_by\x18\x03 \x03(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x03\x12\x0c\n\x04page\x18\x05 \x01(\x03\x12\x14\n\x0c\x63hannel_name\x18\x06 \x01(\t\x12\x0f\n\x07package\x18\x07 \x01(\t\x12\x13\n\x0bocp_version\x18\x08 \x01(\t\x12\x14\n\x0corganization\x18\t \x01(\t\x12\x0e\n\x06\x66ilter\x18\n \x01(\t\x12\x1a\n\x12latest_ocp_version\x18\x0b \x01(\x08"\x82\x01\n\x0b\x41lmExamples\x12\x13\n\x0b\x61pi_version\x18\x01 \x01(\t\x12\x0c\n\x04kind\x18\x02 \x01(\t\x12)\n\x08metadata\x18\x03 \x01(\x0b\x32\x17.google.protobuf.Struct\x12%\n\x04spec\x18\x04 \x01(\x0b\x32\x17.google.protobuf.Struct"J\n\x0b\x41nnotations\x12\x1f\n\x17infrastructure_features\x18\x01 \x03(\t\x12\x1a\n\x12valid_subscription\x18\x02 \x03(\t"/\n\x0cInstallModes\x12\x11\n\tsupported\x18\x01 \x01(\x08\x12\x0c\n\x04type\x18\x02 \x01(\t"L\n\x0cProvidedApis\x12\r\n\x05group\x18\x01 \x01(\t\x12\x0c\n\x04kind\x18\x02 \x01(\t\x12\x0e\n\x06plural\x18\x03 \x01(\t\x12\x0f\n\x07version\x18\x04 \x01(\t"<\n\rRelatedImages\x12\x0e\n\x06\x64igest\x18\x01 \x01(\t\x12\r\n\x05image\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t"\xe3\x06\n\x04\x44\x61ta\x12\x0b\n\x03_id\x18\x01 \x01(\t\x12\x42\n\x0c\x61lm_examples\x18\x02 \x03(\x0b\x32,.pyxis.redhat.com.package.pc_api.AlmExamples\x12\x41\n\x0b\x61nnotations\x18\x03 \x01(\x0b\x32,.pyxis.redhat.com.package.pc_api.Annotations\x12\x15\n\rarchitectures\x18\x04 \x03(\t\x12\x0e\n\x06\x62undle\x18\x05 \x01(\t\x12\x13\n\x0b\x62undle_path\x18\x06 \x01(\t\x12\x1a\n\x12\x62undle_path_digest\x18\x07 \x01(\t\x12\x14\n\x0c\x63\x61pabilities\x18\x08 \x03(\t\x12\x14\n\x0c\x63hannel_name\x18\t \x01(\t\x12\x15\n\rcreation_date\x18\n \x01(\t\x12\x17\n\x0f\x63sv_description\x18\x0b \x01(\t\x12\x18\n\x10\x63sv_display_name\x18\x0c \x01(\t\x12 \n\x18\x63sv_metadata_description\x18\r \x01(\t\x12\x10\n\x08\x63sv_name\x18\x0e \x01(\t\x12\x14\n\x0cin_index_img\x18\x0f \x01(\x08\x12\x44\n\rinstall_modes\x18\x10 \x03(\x0b\x32-.pyxis.redhat.com.package.pc_api.InstallModes\x12\x1a\n\x12is_default_channel\x18\x11 \x01(\x08\x12\x18\n\x10last_update_date\x18\x12 \x01(\t\x12\x19\n\x11latest_in_channel\x18\x13 \x01(\x08\x12\x13\n\x0bocp_version\x18\x14 \x01(\t\x12\x14\n\x0corganization\x18\x15 \x01(\t\x12\x0f\n\x07package\x18\x16 \x01(\t\x12\x44\n\rprovided_apis\x18\x17 \x03(\x0b\x32-.pyxis.redhat.com.package.pc_api.ProvidedApis\x12\x46\n\x0erelated_images\x18\x18 \x03(\x0b\x32..pyxis.redhat.com.package.pc_api.RelatedImages\x12#\n\x1bsource_index_container_path\x18\x19 \x01(\t\x12\x0f\n\x07version\x18\x1a \x01(\t\x12\x18\n\x10version_original\x18\x1b \x01(\t"\x89\x01\n"PyxisOperatorsGetOperatorBundlesOK\x12\x0c\n\x04page\x18\x01 \x01(\x03\x12\x11\n\tpage_size\x18\x02 \x01(\x03\x12\r\n\x05total\x18\x03 \x01(\x03\x12\x33\n\x04\x64\x61ta\x18\x04 \x03(\x0b\x32%.pyxis.redhat.com.package.pc_api.Data2\xd8\x01\n\x06Pc_api\x12\xcd\x01\n PyxisOperatorsGetOperatorBundles\x12H.pyxis.redhat.com.package.pc_api.PyxisOperatorsGetOperatorBundlesRequest\x1a\x43.pyxis.redhat.com.package.pc_api.PyxisOperatorsGetOperatorBundlesOK"\x1a\x82\xd3\xe4\x93\x02\x14\x12\x12/operators/bundlesB<Z:github.com/redhat-marketplace/pipeline-mirroring/rpc/pyxisb\x06proto3',
dependencies=[
google_dot_api_dot_annotations__pb2.DESCRIPTOR,
google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,
google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,
],
)
_PYXISOPERATORSGETOPERATORBUNDLESREQUEST = _descriptor.Descriptor(
name="PyxisOperatorsGetOperatorBundlesRequest",
full_name="pyxis.redhat.com.package.pc_api.PyxisOperatorsGetOperatorBundlesRequest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="include",
full_name="pyxis.redhat.com.package.pc_api.PyxisOperatorsGetOperatorBundlesRequest.include",
index=0,
number=1,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="exclude",
full_name="pyxis.redhat.com.package.pc_api.PyxisOperatorsGetOperatorBundlesRequest.exclude",
index=1,
number=2,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="sort_by",
full_name="pyxis.redhat.com.package.pc_api.PyxisOperatorsGetOperatorBundlesRequest.sort_by",
index=2,
number=3,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="page_size",
full_name="pyxis.redhat.com.package.pc_api.PyxisOperatorsGetOperatorBundlesRequest.page_size",
index=3,
number=4,
type=3,
cpp_type=2,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="page",
full_name="pyxis.redhat.com.package.pc_api.PyxisOperatorsGetOperatorBundlesRequest.page",
index=4,
number=5,
type=3,
cpp_type=2,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="channel_name",
full_name="pyxis.redhat.com.package.pc_api.PyxisOperatorsGetOperatorBundlesRequest.channel_name",
index=5,
number=6,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="package",
full_name="pyxis.redhat.com.package.pc_api.PyxisOperatorsGetOperatorBundlesRequest.package",
index=6,
number=7,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="ocp_version",
full_name="pyxis.redhat.com.package.pc_api.PyxisOperatorsGetOperatorBundlesRequest.ocp_version",
index=7,
number=8,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="organization",
full_name="pyxis.redhat.com.package.pc_api.PyxisOperatorsGetOperatorBundlesRequest.organization",
index=8,
number=9,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="filter",
full_name="pyxis.redhat.com.package.pc_api.PyxisOperatorsGetOperatorBundlesRequest.filter",
index=9,
number=10,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="latest_ocp_version",
full_name="pyxis.redhat.com.package.pc_api.PyxisOperatorsGetOperatorBundlesRequest.latest_ocp_version",
index=10,
number=11,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=150,
serialized_end=401,
)
_ALMEXAMPLES = _descriptor.Descriptor(
name="AlmExamples",
full_name="pyxis.redhat.com.package.pc_api.AlmExamples",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="api_version",
full_name="pyxis.redhat.com.package.pc_api.AlmExamples.api_version",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="kind",
full_name="pyxis.redhat.com.package.pc_api.AlmExamples.kind",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="metadata",
full_name="pyxis.redhat.com.package.pc_api.AlmExamples.metadata",
index=2,
number=3,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="spec",
full_name="pyxis.redhat.com.package.pc_api.AlmExamples.spec",
index=3,
number=4,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=404,
serialized_end=534,
)
_ANNOTATIONS = _descriptor.Descriptor(
name="Annotations",
full_name="pyxis.redhat.com.package.pc_api.Annotations",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="infrastructure_features",
full_name="pyxis.redhat.com.package.pc_api.Annotations.infrastructure_features",
index=0,
number=1,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="valid_subscription",
full_name="pyxis.redhat.com.package.pc_api.Annotations.valid_subscription",
index=1,
number=2,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=536,
serialized_end=610,
)
_INSTALLMODES = _descriptor.Descriptor(
name="InstallModes",
full_name="pyxis.redhat.com.package.pc_api.InstallModes",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="supported",
full_name="pyxis.redhat.com.package.pc_api.InstallModes.supported",
index=0,
number=1,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="type",
full_name="pyxis.redhat.com.package.pc_api.InstallModes.type",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=612,
serialized_end=659,
)
_PROVIDEDAPIS = _descriptor.Descriptor(
name="ProvidedApis",
full_name="pyxis.redhat.com.package.pc_api.ProvidedApis",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="group",
full_name="pyxis.redhat.com.package.pc_api.ProvidedApis.group",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="kind",
full_name="pyxis.redhat.com.package.pc_api.ProvidedApis.kind",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="plural",
full_name="pyxis.redhat.com.package.pc_api.ProvidedApis.plural",
index=2,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="version",
full_name="pyxis.redhat.com.package.pc_api.ProvidedApis.version",
index=3,
number=4,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=661,
serialized_end=737,
)
_RELATEDIMAGES = _descriptor.Descriptor(
name="RelatedImages",
full_name="pyxis.redhat.com.package.pc_api.RelatedImages",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="digest",
full_name="pyxis.redhat.com.package.pc_api.RelatedImages.digest",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="image",
full_name="pyxis.redhat.com.package.pc_api.RelatedImages.image",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="name",
full_name="pyxis.redhat.com.package.pc_api.RelatedImages.name",
index=2,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=739,
serialized_end=799,
)
_DATA = _descriptor.Descriptor(
name="Data",
full_name="pyxis.redhat.com.package.pc_api.Data",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="_id",
full_name="pyxis.redhat.com.package.pc_api.Data._id",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="alm_examples",
full_name="pyxis.redhat.com.package.pc_api.Data.alm_examples",
index=1,
number=2,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="annotations",
full_name="pyxis.redhat.com.package.pc_api.Data.annotations",
index=2,
number=3,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="architectures",
full_name="pyxis.redhat.com.package.pc_api.Data.architectures",
index=3,
number=4,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="bundle",
full_name="pyxis.redhat.com.package.pc_api.Data.bundle",
index=4,
number=5,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="bundle_path",
full_name="pyxis.redhat.com.package.pc_api.Data.bundle_path",
index=5,
number=6,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="bundle_path_digest",
full_name="pyxis.redhat.com.package.pc_api.Data.bundle_path_digest",
index=6,
number=7,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="capabilities",
full_name="pyxis.redhat.com.package.pc_api.Data.capabilities",
index=7,
number=8,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="channel_name",
full_name="pyxis.redhat.com.package.pc_api.Data.channel_name",
index=8,
number=9,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="creation_date",
full_name="pyxis.redhat.com.package.pc_api.Data.creation_date",
index=9,
number=10,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="csv_description",
full_name="pyxis.redhat.com.package.pc_api.Data.csv_description",
index=10,
number=11,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="csv_display_name",
full_name="pyxis.redhat.com.package.pc_api.Data.csv_display_name",
index=11,
number=12,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="csv_metadata_description",
full_name="pyxis.redhat.com.package.pc_api.Data.csv_metadata_description",
index=12,
number=13,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="csv_name",
full_name="pyxis.redhat.com.package.pc_api.Data.csv_name",
index=13,
number=14,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="in_index_img",
full_name="pyxis.redhat.com.package.pc_api.Data.in_index_img",
index=14,
number=15,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="install_modes",
full_name="pyxis.redhat.com.package.pc_api.Data.install_modes",
index=15,
number=16,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="is_default_channel",
full_name="pyxis.redhat.com.package.pc_api.Data.is_default_channel",
index=16,
number=17,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="last_update_date",
full_name="pyxis.redhat.com.package.pc_api.Data.last_update_date",
index=17,
number=18,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="latest_in_channel",
full_name="pyxis.redhat.com.package.pc_api.Data.latest_in_channel",
index=18,
number=19,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="ocp_version",
full_name="pyxis.redhat.com.package.pc_api.Data.ocp_version",
index=19,
number=20,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="organization",
full_name="pyxis.redhat.com.package.pc_api.Data.organization",
index=20,
number=21,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="package",
full_name="pyxis.redhat.com.package.pc_api.Data.package",
index=21,
number=22,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="provided_apis",
full_name="pyxis.redhat.com.package.pc_api.Data.provided_apis",
index=22,
number=23,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="related_images",
full_name="pyxis.redhat.com.package.pc_api.Data.related_images",
index=23,
number=24,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="source_index_container_path",
full_name="pyxis.redhat.com.package.pc_api.Data.source_index_container_path",
index=24,
number=25,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="version",
full_name="pyxis.redhat.com.package.pc_api.Data.version",
index=25,
number=26,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="version_original",
full_name="pyxis.redhat.com.package.pc_api.Data.version_original",
index=26,
number=27,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=802,
serialized_end=1669,
)
_PYXISOPERATORSGETOPERATORBUNDLESOK = _descriptor.Descriptor(
name="PyxisOperatorsGetOperatorBundlesOK",
full_name="pyxis.redhat.com.package.pc_api.PyxisOperatorsGetOperatorBundlesOK",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="page",
full_name="pyxis.redhat.com.package.pc_api.PyxisOperatorsGetOperatorBundlesOK.page",
index=0,
number=1,
type=3,
cpp_type=2,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="page_size",
full_name="pyxis.redhat.com.package.pc_api.PyxisOperatorsGetOperatorBundlesOK.page_size",
index=1,
number=2,
type=3,
cpp_type=2,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="total",
full_name="pyxis.redhat.com.package.pc_api.PyxisOperatorsGetOperatorBundlesOK.total",
index=2,
number=3,
type=3,
cpp_type=2,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="data",
full_name="pyxis.redhat.com.package.pc_api.PyxisOperatorsGetOperatorBundlesOK.data",
index=3,
number=4,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1672,
serialized_end=1809,
)
_ALMEXAMPLES.fields_by_name[
"metadata"
].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_ALMEXAMPLES.fields_by_name[
"spec"
].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_DATA.fields_by_name["alm_examples"].message_type = _ALMEXAMPLES
_DATA.fields_by_name["annotations"].message_type = _ANNOTATIONS
_DATA.fields_by_name["install_modes"].message_type = _INSTALLMODES
_DATA.fields_by_name["provided_apis"].message_type = _PROVIDEDAPIS
_DATA.fields_by_name["related_images"].message_type = _RELATEDIMAGES
_PYXISOPERATORSGETOPERATORBUNDLESOK.fields_by_name["data"].message_type = _DATA
DESCRIPTOR.message_types_by_name[
"PyxisOperatorsGetOperatorBundlesRequest"
] = _PYXISOPERATORSGETOPERATORBUNDLESREQUEST
DESCRIPTOR.message_types_by_name["AlmExamples"] = _ALMEXAMPLES
DESCRIPTOR.message_types_by_name["Annotations"] = _ANNOTATIONS
DESCRIPTOR.message_types_by_name["InstallModes"] = _INSTALLMODES
DESCRIPTOR.message_types_by_name["ProvidedApis"] = _PROVIDEDAPIS
DESCRIPTOR.message_types_by_name["RelatedImages"] = _RELATEDIMAGES
DESCRIPTOR.message_types_by_name["Data"] = _DATA
DESCRIPTOR.message_types_by_name[
"PyxisOperatorsGetOperatorBundlesOK"
] = _PYXISOPERATORSGETOPERATORBUNDLESOK
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
PyxisOperatorsGetOperatorBundlesRequest = _reflection.GeneratedProtocolMessageType(
"PyxisOperatorsGetOperatorBundlesRequest",
(_message.Message,),
{
"DESCRIPTOR": _PYXISOPERATORSGETOPERATORBUNDLESREQUEST,
"__module__": "pyxis.pc_api_pb2"
# @@protoc_insertion_point(class_scope:pyxis.redhat.com.package.pc_api.PyxisOperatorsGetOperatorBundlesRequest)
},
)
_sym_db.RegisterMessage(PyxisOperatorsGetOperatorBundlesRequest)
AlmExamples = _reflection.GeneratedProtocolMessageType(
"AlmExamples",
(_message.Message,),
{
"DESCRIPTOR": _ALMEXAMPLES,
"__module__": "pyxis.pc_api_pb2"
# @@protoc_insertion_point(class_scope:pyxis.redhat.com.package.pc_api.AlmExamples)
},
)
_sym_db.RegisterMessage(AlmExamples)
Annotations = _reflection.GeneratedProtocolMessageType(
"Annotations",
(_message.Message,),
{
"DESCRIPTOR": _ANNOTATIONS,
"__module__": "pyxis.pc_api_pb2"
# @@protoc_insertion_point(class_scope:pyxis.redhat.com.package.pc_api.Annotations)
},
)
_sym_db.RegisterMessage(Annotations)
InstallModes = _reflection.GeneratedProtocolMessageType(
"InstallModes",
(_message.Message,),
{
"DESCRIPTOR": _INSTALLMODES,
"__module__": "pyxis.pc_api_pb2"
# @@protoc_insertion_point(class_scope:pyxis.redhat.com.package.pc_api.InstallModes)
},
)
_sym_db.RegisterMessage(InstallModes)
ProvidedApis = _reflection.GeneratedProtocolMessageType(
"ProvidedApis",
(_message.Message,),
{
"DESCRIPTOR": _PROVIDEDAPIS,
"__module__": "pyxis.pc_api_pb2"
# @@protoc_insertion_point(class_scope:pyxis.redhat.com.package.pc_api.ProvidedApis)
},
)
_sym_db.RegisterMessage(ProvidedApis)
RelatedImages = _reflection.GeneratedProtocolMessageType(
"RelatedImages",
(_message.Message,),
{
"DESCRIPTOR": _RELATEDIMAGES,
"__module__": "pyxis.pc_api_pb2"
# @@protoc_insertion_point(class_scope:pyxis.redhat.com.package.pc_api.RelatedImages)
},
)
_sym_db.RegisterMessage(RelatedImages)
Data = _reflection.GeneratedProtocolMessageType(
"Data",
(_message.Message,),
{
"DESCRIPTOR": _DATA,
"__module__": "pyxis.pc_api_pb2"
# @@protoc_insertion_point(class_scope:pyxis.redhat.com.package.pc_api.Data)
},
)
_sym_db.RegisterMessage(Data)
PyxisOperatorsGetOperatorBundlesOK = _reflection.GeneratedProtocolMessageType(
"PyxisOperatorsGetOperatorBundlesOK",
(_message.Message,),
{
"DESCRIPTOR": _PYXISOPERATORSGETOPERATORBUNDLESOK,
"__module__": "pyxis.pc_api_pb2"
# @@protoc_insertion_point(class_scope:pyxis.redhat.com.package.pc_api.PyxisOperatorsGetOperatorBundlesOK)
},
)
_sym_db.RegisterMessage(PyxisOperatorsGetOperatorBundlesOK)
DESCRIPTOR._options = None
_PC_API = _descriptor.ServiceDescriptor(
name="Pc_api",
full_name="pyxis.redhat.com.package.pc_api.Pc_api",
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=1812,
serialized_end=2028,
methods=[
_descriptor.MethodDescriptor(
name="PyxisOperatorsGetOperatorBundles",
full_name="pyxis.redhat.com.package.pc_api.Pc_api.PyxisOperatorsGetOperatorBundles",
index=0,
containing_service=None,
input_type=_PYXISOPERATORSGETOPERATORBUNDLESREQUEST,
output_type=_PYXISOPERATORSGETOPERATORBUNDLESOK,
serialized_options=b"\202\323\344\223\002\024\022\022/operators/bundles",
create_key=_descriptor._internal_create_key,
),
],
)
_sym_db.RegisterServiceDescriptor(_PC_API)
DESCRIPTOR.services_by_name["Pc_api"] = _PC_API
# @@protoc_insertion_point(module_scope)
| 34.644398 | 3,486 | 0.594649 | 5,173 | 49,784 | 5.412527 | 0.061086 | 0.051145 | 0.079396 | 0.054645 | 0.789314 | 0.76417 | 0.75117 | 0.723883 | 0.719383 | 0.646452 | 0 | 0.036172 | 0.307529 | 49,784 | 1,436 | 3,487 | 34.668524 | 0.776005 | 0.018379 | 0 | 0.792208 | 1 | 0.003608 | 0.128268 | 0.100489 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.005051 | 0 | 0.005051 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
ff74f3d4fd6b7b5ecc4026ec37bc1de696e422f6 | 24,823 | py | Python | tb_rest_client/api/api_ce/tenant_controller_api.py | jernkuan/thingsboard-python-rest-client | 3fb25272507494e6d494b27ca2380d3c543562e5 | [
"Apache-2.0"
] | null | null | null | tb_rest_client/api/api_ce/tenant_controller_api.py | jernkuan/thingsboard-python-rest-client | 3fb25272507494e6d494b27ca2380d3c543562e5 | [
"Apache-2.0"
] | null | null | null | tb_rest_client/api/api_ce/tenant_controller_api.py | jernkuan/thingsboard-python-rest-client | 3fb25272507494e6d494b27ca2380d3c543562e5 | [
"Apache-2.0"
] | 1 | 2021-11-26T11:24:56.000Z | 2021-11-26T11:24:56.000Z | # coding: utf-8
"""
ThingsBoard REST API
For instructions how to authorize requests please visit <a href='http://thingsboard.io/docs/reference/rest-api/'>REST API documentation page</a>. # noqa: E501
OpenAPI spec version: 2.0
Contact: info@thingsboard.io
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from tb_rest_client.api_client import ApiClient
class TenantControllerApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def delete_tenant_using_delete(self, tenant_id, **kwargs): # noqa: E501
"""deleteTenant # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_tenant_using_delete(tenant_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str tenant_id: tenantId (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_tenant_using_delete_with_http_info(tenant_id, **kwargs) # noqa: E501
else:
(data) = self.delete_tenant_using_delete_with_http_info(tenant_id, **kwargs) # noqa: E501
return data
def delete_tenant_using_delete_with_http_info(self, tenant_id, **kwargs): # noqa: E501
"""deleteTenant # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_tenant_using_delete_with_http_info(tenant_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str tenant_id: tenantId (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['tenant_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_tenant_using_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'tenant_id' is set
if ('tenant_id' not in params or
params['tenant_id'] is None):
raise ValueError("Missing the required parameter `tenant_id` when calling `delete_tenant_using_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'tenant_id' in params:
path_params['tenantId'] = params['tenant_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/tenant/{tenantId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_tenant_by_id_using_get(self, tenant_id, **kwargs): # noqa: E501
"""getTenantById # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tenant_by_id_using_get(tenant_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str tenant_id: tenantId (required)
:return: Tenant
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tenant_by_id_using_get_with_http_info(tenant_id, **kwargs) # noqa: E501
else:
(data) = self.get_tenant_by_id_using_get_with_http_info(tenant_id, **kwargs) # noqa: E501
return data
def get_tenant_by_id_using_get_with_http_info(self, tenant_id, **kwargs): # noqa: E501
"""getTenantById # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tenant_by_id_using_get_with_http_info(tenant_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str tenant_id: tenantId (required)
:return: Tenant
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['tenant_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_tenant_by_id_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'tenant_id' is set
if ('tenant_id' not in params or
params['tenant_id'] is None):
raise ValueError("Missing the required parameter `tenant_id` when calling `get_tenant_by_id_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'tenant_id' in params:
path_params['tenantId'] = params['tenant_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/tenant/{tenantId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Tenant', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_tenant_info_by_id_using_get(self, tenant_id, **kwargs): # noqa: E501
"""getTenantInfoById # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tenant_info_by_id_using_get(tenant_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str tenant_id: tenantId (required)
:return: TenantInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tenant_info_by_id_using_get_with_http_info(tenant_id, **kwargs) # noqa: E501
else:
(data) = self.get_tenant_info_by_id_using_get_with_http_info(tenant_id, **kwargs) # noqa: E501
return data
def get_tenant_info_by_id_using_get_with_http_info(self, tenant_id, **kwargs): # noqa: E501
"""getTenantInfoById # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tenant_info_by_id_using_get_with_http_info(tenant_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str tenant_id: tenantId (required)
:return: TenantInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['tenant_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_tenant_info_by_id_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'tenant_id' is set
if ('tenant_id' not in params or
params['tenant_id'] is None):
raise ValueError("Missing the required parameter `tenant_id` when calling `get_tenant_info_by_id_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'tenant_id' in params:
path_params['tenantId'] = params['tenant_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/tenant/info/{tenantId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TenantInfo', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_tenant_infos_using_get(self, page_size, page, **kwargs): # noqa: E501
"""getTenantInfos # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tenant_infos_using_get(page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str page_size: pageSize (required)
:param str page: page (required)
:param str text_search: textSearch
:param str sort_property: sortProperty
:param str sort_order: sortOrder
:return: PageDataTenantInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tenant_infos_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501
else:
(data) = self.get_tenant_infos_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501
return data
def get_tenant_infos_using_get_with_http_info(self, page_size, page, **kwargs): # noqa: E501
"""getTenantInfos # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tenant_infos_using_get_with_http_info(page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str page_size: pageSize (required)
:param str page: page (required)
:param str text_search: textSearch
:param str sort_property: sortProperty
:param str sort_order: sortOrder
:return: PageDataTenantInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page_size', 'page', 'text_search', 'sort_property', 'sort_order'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_tenant_infos_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'page_size' is set
if ('page_size' not in params or
params['page_size'] is None):
raise ValueError("Missing the required parameter `page_size` when calling `get_tenant_infos_using_get`") # noqa: E501
# verify the required parameter 'page' is set
if ('page' not in params or
params['page'] is None):
raise ValueError("Missing the required parameter `page` when calling `get_tenant_infos_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'text_search' in params:
query_params.append(('textSearch', params['text_search'])) # noqa: E501
if 'sort_property' in params:
query_params.append(('sortProperty', params['sort_property'])) # noqa: E501
if 'sort_order' in params:
query_params.append(('sortOrder', params['sort_order'])) # noqa: E501
if 'page_size' in params:
query_params.append(('pageSize', params['page_size'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/tenantInfos{?textSearch,sortProperty,sortOrder,pageSize,page}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageDataTenantInfo', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_tenants_using_get(self, page_size, page, **kwargs): # noqa: E501
"""getTenants # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tenants_using_get(page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str page_size: pageSize (required)
:param str page: page (required)
:param str text_search: textSearch
:param str sort_property: sortProperty
:param str sort_order: sortOrder
:return: PageDataTenant
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tenants_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501
else:
(data) = self.get_tenants_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501
return data
def get_tenants_using_get_with_http_info(self, page_size, page, **kwargs): # noqa: E501
"""getTenants # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tenants_using_get_with_http_info(page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str page_size: pageSize (required)
:param str page: page (required)
:param str text_search: textSearch
:param str sort_property: sortProperty
:param str sort_order: sortOrder
:return: PageDataTenant
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page_size', 'page', 'text_search', 'sort_property', 'sort_order'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_tenants_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'page_size' is set
if ('page_size' not in params or
params['page_size'] is None):
raise ValueError("Missing the required parameter `page_size` when calling `get_tenants_using_get`") # noqa: E501
# verify the required parameter 'page' is set
if ('page' not in params or
params['page'] is None):
raise ValueError("Missing the required parameter `page` when calling `get_tenants_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'text_search' in params:
query_params.append(('textSearch', params['text_search'])) # noqa: E501
if 'sort_property' in params:
query_params.append(('sortProperty', params['sort_property'])) # noqa: E501
if 'sort_order' in params:
query_params.append(('sortOrder', params['sort_order'])) # noqa: E501
if 'page_size' in params:
query_params.append(('pageSize', params['page_size'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/tenants{?textSearch,sortProperty,sortOrder,pageSize,page}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageDataTenant', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def save_tenant_using_post(self, body, **kwargs): # noqa: E501
"""saveTenant # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.save_tenant_using_post(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Tenant body: tenant (required)
:return: Tenant
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.save_tenant_using_post_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.save_tenant_using_post_with_http_info(body, **kwargs) # noqa: E501
return data
def save_tenant_using_post_with_http_info(self, body, **kwargs): # noqa: E501
"""saveTenant # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.save_tenant_using_post_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Tenant body: tenant (required)
:return: Tenant
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method save_tenant_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `save_tenant_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/tenant', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Tenant', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 39.276899 | 163 | 0.612738 | 2,903 | 24,823 | 4.950052 | 0.062694 | 0.046207 | 0.023382 | 0.030063 | 0.940223 | 0.933194 | 0.931385 | 0.921294 | 0.912665 | 0.909186 | 0 | 0.014684 | 0.294928 | 24,823 | 631 | 164 | 39.339144 | 0.806365 | 0.306812 | 0 | 0.795918 | 0 | 0 | 0.198755 | 0.060469 | 0 | 0 | 0 | 0 | 0 | 1 | 0.037901 | false | 0 | 0.011662 | 0 | 0.104956 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
44379ebbb3e6ae1cc1d0fde709101c0993332d79 | 117 | py | Python | simple_diffusion_model/__init__.py | neverix/simple-diffusion-model | 635d106b6581e3d3caf3cdb1131db0571ffb41e9 | [
"BSD-3-Clause"
] | 14 | 2021-07-06T21:12:41.000Z | 2022-02-20T19:42:00.000Z | simple_diffusion_model/__init__.py | crowsonkb/simple-diffusion-model | 3f5c2fd8baa7af7e3df42c4f2568ef0e258b9998 | [
"BSD-3-Clause"
] | 1 | 2021-11-03T09:33:25.000Z | 2021-11-03T09:33:25.000Z | simple_diffusion_model/__init__.py | crowsonkb/simple-diffusion-model | 3f5c2fd8baa7af7e3df42c4f2568ef0e258b9998 | [
"BSD-3-Clause"
] | 3 | 2021-08-25T14:05:36.000Z | 2021-12-25T21:06:09.000Z | from simple_diffusion_model.model import Model
from simple_diffusion_model.diffusion_wrapper import DiffusionWrapper
| 39 | 69 | 0.91453 | 15 | 117 | 6.8 | 0.466667 | 0.196078 | 0.372549 | 0.470588 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.068376 | 117 | 2 | 70 | 58.5 | 0.93578 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
4445033743988d13aa14002c2c7baee08c278c19 | 7,218 | py | Python | two_stage_detector/None.py | ThesisSlayers/two_stage_detector | 22541ce1088cb03933ea5a589def4a186f7ad6ca | [
"MIT"
] | null | null | null | two_stage_detector/None.py | ThesisSlayers/two_stage_detector | 22541ce1088cb03933ea5a589def4a186f7ad6ca | [
"MIT"
] | null | null | null | two_stage_detector/None.py | ThesisSlayers/two_stage_detector | 22541ce1088cb03933ea5a589def4a186f7ad6ca | [
"MIT"
] | null | null | null |
# Cell
import tensorflow as tf
import logging
import cv2
from pathlib import Path
import matplotlib.pyplot as plt
import xml.etree.ElementTree as ET
import os
import numpy as np
import json
from IPython.display import clear_output
from tqdm.notebook import tqdm
from utils import *
# from pycocotools.coco import COCO
# from pycocotools.cocoeval import COCOeval
%config Completer.use_jedi = False
# Cell
import tensorflow as tf
import logging
import cv2
from pathlib import Path
import matplotlib.pyplot as plt
import xml.etree.ElementTree as ET
import os
import numpy as np
import json
from IPython.display import clear_output
from tqdm.notebook import tqdm
from utils import *
# from pycocotools.coco import COCO
# from pycocotools.cocoeval import COCOeval
%config Completer.use_jedi = False
# Cell
import tensorflow as tf
import logging
import cv2
from pathlib import Path
import matplotlib.pyplot as plt
import xml.etree.ElementTree as ET
import os
import numpy as np
import json
from IPython.display import clear_output
from tqdm.notebook import tqdm
from utils import *
# from pycocotools.coco import COCO
# from pycocotools.cocoeval import COCOeval
%config Completer.use_jedi = False
# Cell
import tensorflow as tf
import logging
import cv2
from pathlib import Path
import matplotlib.pyplot as plt
import xml.etree.ElementTree as ET
import os
import numpy as np
import json
from IPython.display import clear_output
from tqdm.notebook import tqdm
from utils import *
# from pycocotools.coco import COCO
# from pycocotools.cocoeval import COCOeval
%config Completer.use_jedi = False
# Cell
import tensorflow as tf
import logging
import cv2
from pathlib import Path
import matplotlib.pyplot as plt
import xml.etree.ElementTree as ET
import os
import numpy as np
import json
from IPython.display import clear_output
from tqdm.notebook import tqdm
from utils import *
# from pycocotools.coco import COCO
# from pycocotools.cocoeval import COCOeval
%config Completer.use_jedi = False
# Cell
import tensorflow as tf
import logging
import cv2
from pathlib import Path
import matplotlib.pyplot as plt
import xml.etree.ElementTree as ET
import os
import numpy as np
import json
from IPython.display import clear_output
from tqdm.notebook import tqdm
from utils import *
# from pycocotools.coco import COCO
# from pycocotools.cocoeval import COCOeval
%config Completer.use_jedi = False
# Cell
import tensorflow as tf
import logging
import cv2
from pathlib import Path
import matplotlib.pyplot as plt
import xml.etree.ElementTree as ET
import os
import numpy as np
import json
from IPython.display import clear_output
from tqdm.notebook import tqdm
from utils import *
# from pycocotools.coco import COCO
# from pycocotools.cocoeval import COCOeval
%config Completer.use_jedi = False
# Cell
import tensorflow as tf
import logging
import cv2
from pathlib import Path
import matplotlib.pyplot as plt
import xml.etree.ElementTree as ET
import os
import numpy as np
import json
from IPython.display import clear_output
from tqdm.notebook import tqdm
from utils import *
# from pycocotools.coco import COCO
# from pycocotools.cocoeval import COCOeval
%config Completer.use_jedi = False
# Cell
import tensorflow as tf
import logging
import cv2
from pathlib import Path
import matplotlib.pyplot as plt
import xml.etree.ElementTree as ET
import os
import numpy as np
import json
from IPython.display import clear_output
from tqdm.notebook import tqdm
from utils import *
# from pycocotools.coco import COCO
# from pycocotools.cocoeval import COCOeval
%config Completer.use_jedi = False
# Cell
import tensorflow as tf
import logging
import cv2
from pathlib import Path
import matplotlib.pyplot as plt
import xml.etree.ElementTree as ET
import os
import numpy as np
import json
from IPython.display import clear_output
from tqdm.notebook import tqdm
from utils import *
# from pycocotools.coco import COCO
# from pycocotools.cocoeval import COCOeval
%config Completer.use_jedi = False
# Cell
import tensorflow as tf
import logging
import cv2
from pathlib import Path
import matplotlib.pyplot as plt
import xml.etree.ElementTree as ET
import os
import numpy as np
import json
from IPython.display import clear_output
from tqdm.notebook import tqdm
from utils import *
# from pycocotools.coco import COCO
# from pycocotools.cocoeval import COCOeval
%config Completer.use_jedi = False
# Cell
import tensorflow as tf
import logging
import cv2
from pathlib import Path
import matplotlib.pyplot as plt
import xml.etree.ElementTree as ET
import os
import numpy as np
import json
from IPython.display import clear_output
from tqdm.notebook import tqdm
from utils import *
# from pycocotools.coco import COCO
# from pycocotools.cocoeval import COCOeval
%config Completer.use_jedi = False
# Cell
import tensorflow as tf
import logging
import cv2
from pathlib import Path
import matplotlib.pyplot as plt
import xml.etree.ElementTree as ET
import os
import numpy as np
import json
from IPython.display import clear_output
from tqdm.notebook import tqdm
from utils import *
# from pycocotools.coco import COCO
# from pycocotools.cocoeval import COCOeval
%config Completer.use_jedi = False
# Cell
import tensorflow as tf
import logging
import cv2
from pathlib import Path
import matplotlib.pyplot as plt
import xml.etree.ElementTree as ET
import os
import numpy as np
import json
from IPython.display import clear_output
from tqdm.notebook import tqdm
from utils import *
# from pycocotools.coco import COCO
# from pycocotools.cocoeval import COCOeval
%config Completer.use_jedi = False
# Cell
import tensorflow as tf
import logging
import cv2
from pathlib import Path
import matplotlib.pyplot as plt
import xml.etree.ElementTree as ET
import os
import numpy as np
import json
from IPython.display import clear_output
from tqdm.notebook import tqdm
from utils import *
# from pycocotools.coco import COCO
# from pycocotools.cocoeval import COCOeval
%config Completer.use_jedi = False
# Cell
import tensorflow as tf
import logging
import cv2
from pathlib import Path
import matplotlib.pyplot as plt
import xml.etree.ElementTree as ET
import os
import numpy as np
import json
from IPython.display import clear_output
from tqdm.notebook import tqdm
from utils import *
# from pycocotools.coco import COCO
# from pycocotools.cocoeval import COCOeval
%config Completer.use_jedi = False
# Cell
import tensorflow as tf
import logging
import cv2
from pathlib import Path
import matplotlib.pyplot as plt
import xml.etree.ElementTree as ET
import os
import numpy as np
import json
from IPython.display import clear_output
from tqdm.notebook import tqdm
from utils import *
# from pycocotools.coco import COCO
# from pycocotools.cocoeval import COCOeval
%config Completer.use_jedi = False
# Cell
import tensorflow as tf
import logging
import cv2
from pathlib import Path
import matplotlib.pyplot as plt
import xml.etree.ElementTree as ET
import os
import numpy as np
import json
from IPython.display import clear_output
from tqdm.notebook import tqdm
from utils import *
# from pycocotools.coco import COCO
# from pycocotools.cocoeval import COCOeval
%config Completer.use_jedi = False | 19.99446 | 43 | 0.815461 | 1,098 | 7,218 | 5.327869 | 0.032787 | 0.092308 | 0.061538 | 0.067692 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0.002933 | 0.149626 | 7,218 | 361 | 44 | 19.99446 | 0.950147 | 0.201856 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.923077 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 9 |
44587db5fb1a6a22f2ed387998778a28d4aeb788 | 40 | py | Python | nanoscopy/stats/__init__.py | darianSmalley/NanoscoPy | dfb6784f5ad3f439765bfb0fb67d9cde5aec87d5 | [
"MIT"
] | null | null | null | nanoscopy/stats/__init__.py | darianSmalley/NanoscoPy | dfb6784f5ad3f439765bfb0fb67d9cde5aec87d5 | [
"MIT"
] | null | null | null | nanoscopy/stats/__init__.py | darianSmalley/NanoscoPy | dfb6784f5ad3f439765bfb0fb67d9cde5aec87d5 | [
"MIT"
] | null | null | null | from .factorial_doe import factorial_doe | 40 | 40 | 0.9 | 6 | 40 | 5.666667 | 0.666667 | 0.705882 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.075 | 40 | 1 | 40 | 40 | 0.918919 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
44971acc94aeadc6c7d702a7c1fcc686a7b2214d | 1,998 | py | Python | hearthbreaker/targeting.py | souserge/hearthbreaker | 481dcaa3ae13c7dc16c0e6b7f59f11c36fdb29a7 | [
"MIT"
] | 429 | 2015-01-01T16:07:20.000Z | 2022-03-16T22:30:50.000Z | hearthbreaker/targeting.py | souserge/hearthbreaker | 481dcaa3ae13c7dc16c0e6b7f59f11c36fdb29a7 | [
"MIT"
] | 47 | 2015-01-01T17:07:57.000Z | 2018-05-07T10:49:37.000Z | hearthbreaker/targeting.py | souserge/hearthbreaker | 481dcaa3ae13c7dc16c0e6b7f59f11c36fdb29a7 | [
"MIT"
] | 135 | 2015-01-12T21:52:17.000Z | 2022-02-25T21:18:08.000Z | import copy
def find_spell_target(game, filter_function):
targets = copy.copy(game.other_player.minions)
targets.extend(game.current_player.minions)
targets.append(game.other_player.hero)
targets.append(game.current_player.hero)
targets = [target for target in targets if filter_function(target)]
return targets
def find_enemy_spell_target(game, filter_function):
targets = copy.copy(game.other_player.minions)
targets.append(game.other_player.hero)
targets = [target for target in targets if filter_function(target)]
return targets
def find_friendly_spell_target(game, filter_function):
targets = copy.copy(game.current_player.minions)
targets.append(game.current_player.hero)
targets = [target for target in targets if filter_function(target)]
return targets
def find_minion_spell_target(game, filter_function):
targets = copy.copy(game.other_player.minions)
targets.extend(game.current_player.minions)
targets = [target for target in targets if filter_function(target)]
return targets
def find_enemy_minion_spell_target(game, filter_function):
targets = copy.copy(game.other_player.minions)
targets = [target for target in targets if filter_function(target)]
return targets
def find_friendly_minion_spell_target(game, filter_function):
targets = copy.copy(game.current_player.minions)
targets = [target for target in targets if filter_function(target)]
return targets
def find_enemy_minion_battlecry_target(game, filter_function):
targets = copy.copy(game.other_player.minions)
targets = [target for target in targets if filter_function(target)]
if len(targets) is 0:
return None
return targets
def find_friendly_minion_battlecry_target(game, filter_function):
targets = copy.copy(game.current_player.minions)
targets = [target for target in targets if filter_function(target)]
if len(targets) is 0:
return None
return targets
| 29.382353 | 71 | 0.75976 | 271 | 1,998 | 5.391144 | 0.107011 | 0.15332 | 0.136893 | 0.131417 | 0.988364 | 0.988364 | 0.978097 | 0.978097 | 0.978097 | 0.950719 | 0 | 0.001195 | 0.162162 | 1,998 | 67 | 72 | 29.820896 | 0.871565 | 0 | 0 | 0.790698 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.186047 | false | 0 | 0.023256 | 0 | 0.44186 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
925e6fe16c6b09ba7f9aedcfc93fd685e68f6db3 | 32,403 | py | Python | appengine/gce-backend/instance_group_managers_test.py | Slayo2008/New2 | 3fa4c520dddd82ed190152709e0a54b35faa3bae | [
"Apache-2.0"
] | 1 | 2017-10-30T15:08:10.000Z | 2017-10-30T15:08:10.000Z | appengine/gce-backend/instance_group_managers_test.py | Slayo2008/New2 | 3fa4c520dddd82ed190152709e0a54b35faa3bae | [
"Apache-2.0"
] | null | null | null | appengine/gce-backend/instance_group_managers_test.py | Slayo2008/New2 | 3fa4c520dddd82ed190152709e0a54b35faa3bae | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# Copyright 2016 The LUCI Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
"""Unit tests for instance_group_managers.py."""
import unittest
import test_env
test_env.setup_test_env()
from google.appengine.ext import ndb
from components import datastore_utils
from components import net
from test_support import test_case
import instance_group_managers
import models
class CountInstancesTest(test_case.TestCase):
"""Tests for instance_group_managers.count_instances."""
def test_no_instance_group_managers(self):
self.failIf(instance_group_managers.count_instances())
def test_one_instance_group_manager_no_instances(self):
models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
).put()
expected = {'base-name': 0}
actual = instance_group_managers.count_instances()
self.assertEqual(actual, expected)
def test_one_instance_group_manager(self):
models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
instances=[
ndb.Key(models.Instance, 'fake-key-1'),
ndb.Key(models.Instance, 'fake-key-2'),
],
).put()
expected = {'base-name': 2}
actual = instance_group_managers.count_instances()
self.assertEqual(actual, expected)
def test_several_instance_group_manager(self):
models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name-1',
'revision',
'zone-a',
),
instances=[
ndb.Key(models.Instance, 'fake-key-1'),
ndb.Key(models.Instance, 'fake-key-2'),
],
).put()
models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name-1',
'revision',
'zone-b',
),
instances=[
ndb.Key(models.Instance, 'fake-key-3'),
],
).put()
models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name-2',
'revision',
'zone',
),
instances=[
ndb.Key(models.Instance, 'fake-key-4'),
],
).put()
models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name-3',
'revision',
'zone',
),
).put()
expected = {'base-name-1': 3, 'base-name-2': 1, 'base-name-3': 0}
actual = instance_group_managers.count_instances()
self.assertEqual(actual, expected)
class CreateTest(test_case.TestCase):
"""Tests for instance_group_managers.create."""
def test_entity_doesnt_exist(self):
"""Ensures nothing happens when the entity doesn't exist."""
key = ndb.Key(models.InstanceGroupManager, 'fake-key')
instance_group_managers.create(key)
self.failIf(key.get())
def test_url_specified(self):
"""Ensures nothing happens when URL is already specified."""
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
url='url',
).put()
expected_url = 'url'
instance_group_managers.create(key)
self.assertEqual(key.get().url, expected_url)
def test_parent_doesnt_exist(self):
"""Ensures nothing happens when the parent doesn't exist."""
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
).put()
instance_group_managers.create(key)
self.failIf(key.get().url)
def test_parent_project_unspecified(self):
"""Ensures nothing happens when parent doesn't specify project."""
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
).put()
models.InstanceTemplateRevision(key=key.parent(), url='url').put()
instance_group_managers.create(key)
self.failIf(key.get().url)
def test_parent_url_unspecified(self):
"""Ensures nothing happens when parent doesn't specify URL."""
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
).put()
models.InstanceTemplateRevision(key=key.parent(), project='project').put()
instance_group_managers.create(key)
self.failIf(key.get().url)
def test_creates(self):
"""Ensures an instance group manager is created."""
def create_instance_group_manager(*args, **kwargs):
return {'targetLink': 'url'}
self.mock(
instance_group_managers.gce.Project,
'create_instance_group_manager',
create_instance_group_manager,
)
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
minimum_size=2,
maximum_size=2,
).put()
models.InstanceTemplateRevision(
key=key.parent(),
project='project',
url='instance-template-url',
).put()
expected_url = 'url'
instance_group_managers.create(key)
self.assertEqual(key.get().url, expected_url)
def test_updates_when_already_created(self):
"""Ensures an instance group manager is updated when already created."""
def create_instance_group_manager(*args, **kwargs):
raise net.Error('', 409, '')
def get_instance_group_manager(*args, **kwargs):
return {'selfLink': 'url'}
self.mock(
instance_group_managers.gce.Project,
'create_instance_group_manager',
create_instance_group_manager,
)
self.mock(
instance_group_managers.gce.Project,
'get_instance_group_manager',
get_instance_group_manager,
)
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
minimum_size=2,
maximum_size=2,
).put()
models.InstanceTemplateRevision(
key=key.parent(),
project='project',
url='instance-template-url',
).put()
expected_url = 'url'
instance_group_managers.create(key)
self.assertEqual(key.get().url, expected_url)
def test_doesnt_update_when_creation_fails(self):
"""Ensures an instance group manager is not updated when creation fails."""
def create_instance_group_manager(*args, **kwargs):
raise net.Error('', 400, '')
self.mock(
instance_group_managers.gce.Project,
'create_instance_group_manager',
create_instance_group_manager,
)
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
minimum_size=2,
maximum_size=2,
).put()
models.InstanceTemplateRevision(
key=key.parent(),
project='project',
url='instance-template-url',
).put()
self.assertRaises(net.Error, instance_group_managers.create, key)
self.failIf(key.get().url)
class DeleteTest(test_case.TestCase):
"""Tests for instance_group_managers.delete."""
def test_entity_doesnt_exist(self):
"""Ensures nothing happens when the entity doesn't exist."""
key = ndb.Key(models.InstanceGroupManager, 'fake-key')
instance_group_managers.delete(key)
self.failIf(key.get())
def test_deletes(self):
"""Ensures an instance group manager is deleted."""
def json_request(url, *args, **kwargs):
return {'targetLink': url}
self.mock(instance_group_managers.net, 'json_request', json_request)
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
url='url',
).put()
instance_group_managers.delete(key)
self.failIf(key.get().url)
def test_target_link_mismatch(self):
"""Ensures nothing happens when targetLink doesn't match."""
def json_request(*args, **kwargs):
return {'targetLink': 'mismatch'}
self.mock(instance_group_managers.net, 'json_request', json_request)
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
url='url',
).put()
instance_group_managers.delete(key)
self.assertEqual(key.get().url, 'url')
def test_url_unspecified(self):
"""Ensures nothing happens when URL is unspecified."""
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
).put()
instance_group_managers.delete(key)
self.failIf(key.get().url)
def test_url_not_found(self):
"""Ensures URL is updated when the instance group manager is not found."""
def json_request(url, *args, **kwargs):
raise net.Error('', 404, '')
self.mock(instance_group_managers.net, 'json_request', json_request)
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
url='url',
).put()
instance_group_managers.delete(key)
self.failIf(key.get().url)
def test_deletion_fails(self):
"""Ensures nothing happens when instance group manager deletion fails."""
def json_request(url, *args, **kwargs):
raise net.Error('', 400, '')
self.mock(instance_group_managers.net, 'json_request', json_request)
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
url='url',
).put()
expected_url = 'url'
self.assertRaises(net.Error, instance_group_managers.delete, key)
self.assertEqual(key.get().url, expected_url)
class GetDrainedInstanceGroupManagersTest(test_case.TestCase):
"""Tests for instance_group_managers.get_drained_instance_group_managers."""
def test_no_entities(self):
"""Ensures nothing is returned when there are no entities."""
self.failIf(instance_group_managers.get_drained_instance_group_managers())
def test_nothing_active_or_drained(self):
"""Ensures nothing is returned when there are no active/drained entities."""
models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
).put()
self.failIf(instance_group_managers.get_drained_instance_group_managers())
def test_active_only(self):
"""Ensures nothing is returned when there are only active entities."""
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
).put()
models.InstanceTemplateRevision(
key=key.parent(),
active=[
key,
],
).put()
self.failIf(instance_group_managers.get_drained_instance_group_managers())
def test_drained(self):
"""Ensures drained entities are returned."""
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
).put()
models.InstanceTemplateRevision(
key=key.parent(),
drained=[
key,
],
).put()
expected_keys = [key]
self.assertItemsEqual(
instance_group_managers.get_drained_instance_group_managers(),
expected_keys,
)
def test_implicitly_drained(self):
"""Ensures implicitly drained entities are returned."""
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
).put()
models.InstanceTemplateRevision(
key=key.parent(),
active=[
key,
],
).put()
models.InstanceTemplate(
key=key.parent().parent(),
drained=[
key.parent(),
],
).put()
expected_keys = [
key,
]
self.assertItemsEqual(
instance_group_managers.get_drained_instance_group_managers(),
expected_keys,
)
class GetInstanceGroupManagerToDeleteTest(test_case.TestCase):
"""Tests for instance_group_managers.get_instance_group_manager_to_delete."""
def test_entity_doesnt_exist(self):
"""Ensures no URL when the entity doesn't exist."""
key = ndb.Key(models.InstanceGroupManager, 'fake-key')
self.failIf(
instance_group_managers.get_instance_group_manager_to_delete(key))
def test_instances(self):
"""Ensures no URL when there are active instances."""
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
instances=[
ndb.Key(models.Instance, 'fake-key'),
],
url='url',
).put()
self.failIf(
instance_group_managers.get_instance_group_manager_to_delete(key))
def test_url_unspecified(self):
"""Ensures no URL when URL is unspecified."""
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
).put()
self.failIf(
instance_group_managers.get_instance_group_manager_to_delete(key))
def test_returns_url(self):
"""Ensures URL is returned."""
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
url='url',
).put()
expected_url = 'url'
self.assertEqual(
instance_group_managers.get_instance_group_manager_to_delete(key),
expected_url,
)
class ResizeTest(test_case.TestCase):
"""Tests for instance_group_managers.resize."""
def test_entity_doesnt_exist(self):
def get_instance_group_manager(*args, **kwargs):
self.fail('get_instance_group_manager called')
def resize_managed_instance_group(*args, **kwargs):
self.fail('resize_managed_instance_group called')
self.mock(
instance_group_managers.gce.Project,
'get_instance_group_manager',
get_instance_group_manager,
)
self.mock(
instance_group_managers.gce.Project,
'resize_managed_instance_group',
resize_managed_instance_group,
)
key = ndb.Key(models.InstanceGroupManager, 'fake-key')
instance_group_managers.resize(key)
self.failIf(key.get())
def test_no_url(self):
def get_instance_group_manager(*args, **kwargs):
self.fail('get_instance_group_manager called')
def resize_managed_instance_group(*args, **kwargs):
self.fail('resize_managed_instance_group called')
self.mock(
instance_group_managers.gce.Project,
'get_instance_group_manager',
get_instance_group_manager,
)
self.mock(
instance_group_managers.gce.Project,
'resize_managed_instance_group',
resize_managed_instance_group,
)
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
minimum_size=10,
maximum_size=10,
).put()
models.InstanceTemplateRevision(
key=key.parent(),
project='fake-project',
).put()
models.InstanceTemplate(key=key.parent().parent()).put()
instance_group_managers.resize(key)
def test_parent_doesnt_exist(self):
def get_instance_group_manager(*args, **kwargs):
self.fail('get_instance_group_manager called')
def resize_managed_instance_group(*args, **kwargs):
self.fail('resize_managed_instance_group called')
self.mock(
instance_group_managers.gce.Project,
'get_instance_group_manager',
get_instance_group_manager,
)
self.mock(
instance_group_managers.gce.Project,
'resize_managed_instance_group',
resize_managed_instance_group,
)
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
minimum_size=10,
maximum_size=10,
url='https://example.com',
).put()
models.InstanceTemplate(key=key.parent().parent()).put()
instance_group_managers.resize(key)
def test_no_project(self):
def get_instance_group_manager(*args, **kwargs):
self.fail('get_instance_group_manager called')
def resize_managed_instance_group(*args, **kwargs):
self.fail('resize_managed_instance_group called')
self.mock(
instance_group_managers.gce.Project,
'get_instance_group_manager',
get_instance_group_manager,
)
self.mock(
instance_group_managers.gce.Project,
'resize_managed_instance_group',
resize_managed_instance_group,
)
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
minimum_size=10,
maximum_size=10,
url='https://example.com',
).put()
models.InstanceTemplateRevision(
key=key.parent(),
).put()
models.InstanceTemplate(key=key.parent().parent()).put()
instance_group_managers.resize(key)
def test_resize_no_actions(self):
def get_instance_group_manager(*args, **kwargs):
return {
'name': 'name',
}
def resize_managed_instance_group(*args, **kwargs):
self.fail('resize_managed_instance_group called')
self.mock(
instance_group_managers.gce.Project,
'get_instance_group_manager',
get_instance_group_manager,
)
self.mock(
instance_group_managers.gce.Project,
'resize_managed_instance_group',
resize_managed_instance_group,
)
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
minimum_size=10,
maximum_size=10,
url='https://example.com',
).put()
models.InstanceTemplateRevision(
key=key.parent(),
project='fake-project',
).put()
models.InstanceTemplate(key=key.parent().parent()).put()
instance_group_managers.resize(key)
def test_resize_none_created(self):
def get_instance_group_manager(*args, **kwargs):
return {
'currentActions': {
'none': 0,
},
'name': 'name',
'targetSize': 0,
}
def resize_managed_instance_group(_, name, zone, size):
self.assertEqual(name, 'name')
self.assertEqual(zone, 'zone')
self.assertEqual(size, 10)
self.mock(
instance_group_managers.gce.Project,
'get_instance_group_manager',
get_instance_group_manager,
)
self.mock(
instance_group_managers.gce.Project,
'resize_managed_instance_group',
resize_managed_instance_group,
)
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
minimum_size=10,
maximum_size=10,
url='https://example.com',
).put()
models.InstanceTemplateRevision(
key=key.parent(),
project='fake-project',
).put()
models.InstanceTemplate(key=key.parent().parent()).put()
instance_group_managers.resize(key)
def test_resize_some_created(self):
def get_instance_group_manager(*args, **kwargs):
return {
'currentActions': {
'none': 3,
},
'name': 'name',
'targetSize': 3,
}
def resize_managed_instance_group(_, name, zone, size):
self.assertEqual(name, 'name')
self.assertEqual(zone, 'zone')
self.assertEqual(size, 103)
self.mock(
instance_group_managers.gce.Project,
'get_instance_group_manager',
get_instance_group_manager,
)
self.mock(
instance_group_managers.gce.Project,
'resize_managed_instance_group',
resize_managed_instance_group,
)
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
minimum_size=1000,
maximum_size=1000,
url='https://example.com',
).put()
models.InstanceTemplateRevision(
key=key.parent(),
project='fake-project',
).put()
models.InstanceTemplate(key=key.parent().parent()).put()
instance_group_managers.resize(key)
def test_resize_all_created(self):
def get_instance_group_manager(*args, **kwargs):
return {
'currentActions': {
'none': 10,
},
'name': 'name',
'targetSize': 10,
}
def resize_managed_instance_group(_, name, zone, size):
self.failIf(True)
self.mock(
instance_group_managers.gce.Project,
'get_instance_group_manager',
get_instance_group_manager,
)
self.mock(
instance_group_managers.gce.Project,
'resize_managed_instance_group',
resize_managed_instance_group,
)
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
minimum_size=10,
maximum_size=10,
url='https://example.com',
).put()
models.InstanceTemplateRevision(
key=key.parent(),
project='fake-project',
).put()
models.InstanceTemplate(key=key.parent().parent()).put()
instance_group_managers.resize(key)
def test_resize_excess_created(self):
def get_instance_group_manager(*args, **kwargs):
return {
'currentActions': {
'none': 2,
},
'name': 'name',
'targetSize': 2,
}
def resize_managed_instance_group(_, name, zone, size):
self.failIf(True)
self.mock(
instance_group_managers.gce.Project,
'get_instance_group_manager',
get_instance_group_manager,
)
self.mock(
instance_group_managers.gce.Project,
'resize_managed_instance_group',
resize_managed_instance_group,
)
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
minimum_size=1,
maximum_size=1,
url='https://example.com',
).put()
models.InstanceTemplateRevision(
key=key.parent(),
project='fake-project',
).put()
models.InstanceTemplate(key=key.parent().parent()).put()
instance_group_managers.resize(key)
def test_resize_other_revisions_created(self):
def get_instance_group_manager(*args, **kwargs):
return {
'currentActions': {
'none': 0,
},
'name': 'name',
'targetSize': 0,
}
def resize_managed_instance_group(_, name, zone, size):
self.assertEqual(name, 'name')
self.assertEqual(zone, 'zone')
self.assertEqual(size, 4)
self.mock(
instance_group_managers.gce.Project,
'get_instance_group_manager',
get_instance_group_manager,
)
self.mock(
instance_group_managers.gce.Project,
'resize_managed_instance_group',
resize_managed_instance_group,
)
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision-1',
'zone',
),
minimum_size=7,
maximum_size=7,
url='https://example.com',
).put()
models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision-2',
'zone',
),
instances=[
ndb.Key(models.Instance, 'instance-name-1'),
ndb.Key(models.Instance, 'instance-name-2'),
ndb.Key(models.Instance, 'instance-name-3'),
],
).put()
models.InstanceTemplateRevision(
key=key.parent(),
project='fake-project',
).put()
models.InstanceTemplate(key=key.parent().parent()).put()
instance_group_managers.resize(key)
class ScheduleCreationTest(test_case.TestCase):
"""Tests for instance_group_managers.schedule_creation."""
def setUp(self, *args, **kwargs):
def enqueue_task(taskqueue, key):
entity = key.get()
entity.url = key.urlsafe()
entity.put()
return True
super(ScheduleCreationTest, self).setUp(*args, **kwargs)
self.mock(instance_group_managers.utilities, 'enqueue_task', enqueue_task)
def test_enqueues_task(self):
"""Ensures a task is enqueued."""
key = instance_group_managers.get_instance_group_manager_key(
'base-name', 'revision', 'zone')
models.InstanceTemplate(
key=key.parent().parent(),
active=key.parent(),
).put()
models.InstanceTemplateRevision(
key=key.parent(),
active=[
key,
],
url='url',
).put()
models.InstanceGroupManager(key=key).put()
expected_url = key.urlsafe()
instance_group_managers.schedule_creation()
self.assertEqual(key.get().url, expected_url)
def test_instance_template_revision_inactive(self):
"""Ensures no task is enqueued for inactive instance template revisions."""
key = instance_group_managers.get_instance_group_manager_key(
'base-name', 'revision', 'zone')
models.InstanceTemplate(
key=key.parent().parent(),
).put()
models.InstanceTemplateRevision(
key=key.parent(),
active=[
key,
],
url='url',
).put()
models.InstanceGroupManager(key=key).put()
instance_group_managers.schedule_creation()
self.failIf(key.get().url)
def test_instance_template_revision_missing(self):
"""Ensures no task is enqueued for missing instance template revisions."""
key = instance_group_managers.get_instance_group_manager_key(
'base-name', 'revision', 'zone')
models.InstanceTemplate(
key=key.parent().parent(),
active=key.parent(),
).put()
models.InstanceGroupManager(key=key).put()
instance_group_managers.schedule_creation()
self.failIf(key.get().url)
def test_instance_template_revision_no_url(self):
"""Ensures no task is enqueued when instance template URL is missing."""
key = instance_group_managers.get_instance_group_manager_key(
'base-name', 'revision', 'zone')
models.InstanceTemplate(
key=key.parent().parent(),
active=key.parent(),
).put()
models.InstanceTemplateRevision(
key=key.parent(),
active=[
key,
],
).put()
models.InstanceGroupManager(key=key).put()
instance_group_managers.schedule_creation()
self.failIf(key.get().url)
def test_instance_group_manager_inactive(self):
"""Ensures no task is enqueued for inactive instance group managers."""
key = instance_group_managers.get_instance_group_manager_key(
'base-name', 'revision', 'zone')
models.InstanceTemplate(
key=key.parent().parent(),
active=key.parent(),
).put()
models.InstanceTemplateRevision(
key=key.parent(),
url='url',
).put()
models.InstanceGroupManager(key=key).put()
instance_group_managers.schedule_creation()
self.failIf(key.get().url)
def test_instance_group_manager_drained(self):
"""Ensures no task is enqueued for drained instance group managers."""
key = instance_group_managers.get_instance_group_manager_key(
'base-name', 'revision', 'zone')
models.InstanceTemplate(
key=key.parent().parent(),
active=key.parent(),
).put()
models.InstanceTemplateRevision(
key=key.parent(),
drained=[
key,
],
url='url',
).put()
models.InstanceGroupManager(key=key).put()
instance_group_managers.schedule_creation()
self.failIf(key.get().url)
def test_instance_group_manager_missing(self):
"""Ensures no task is enqueued for missing instance group managers."""
key = instance_group_managers.get_instance_group_manager_key(
'base-name', 'revision', 'zone')
models.InstanceTemplate(
key=key.parent().parent(),
active=key.parent(),
).put()
models.InstanceTemplateRevision(
key=key.parent(),
active=[
key,
],
url='url',
).put()
key = models.InstanceGroupManager().put()
instance_group_managers.schedule_creation()
self.failIf(key.get().url)
def test_instance_group_manager_already_created(self):
"""Ensures no task is enqueued for existing instance group managers."""
key = instance_group_managers.get_instance_group_manager_key(
'base-name', 'revision', 'zone')
models.InstanceTemplate(
key=key.parent().parent(),
active=key.parent(),
).put()
models.InstanceTemplateRevision(
key=key.parent(),
active=[
key,
],
url='instance-template-url',
).put()
models.InstanceGroupManager(key=key, url='url').put()
expected_url = 'url'
instance_group_managers.schedule_creation()
self.assertEqual(key.get().url, expected_url)
class UpdateURLTest(test_case.TestCase):
"""Tests for instance_group_managers.update_url."""
def test_entity_doesnt_exist(self):
"""Ensures nothing happens when the entity doesn't exist."""
key = ndb.Key(models.InstanceGroupManager, 'fake-key')
instance_group_managers.update_url(key, 'url')
self.failIf(key.get())
def test_url_matches(self):
"""Ensures nothing happens when the URL already matches."""
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
url='url',
).put()
instance_group_managers.update_url(key, 'url')
self.assertEqual(key.get().url, 'url')
def test_url_mismatch(self):
"""Ensures the URL is updated when it doesn't match."""
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
url='old-url',
).put()
instance_group_managers.update_url(key, 'new-url')
self.assertEqual(key.get().url, 'new-url')
def test_url_updated(self):
"""Ensures the URL is updated."""
key = models.InstanceGroupManager(
key=instance_group_managers.get_instance_group_manager_key(
'base-name',
'revision',
'zone',
),
).put()
instance_group_managers.update_url(key, 'url')
self.assertEqual(key.get().url, 'url')
if __name__ == '__main__':
unittest.main()
| 28.649867 | 80 | 0.636176 | 3,506 | 32,403 | 5.596406 | 0.055904 | 0.192141 | 0.155191 | 0.103155 | 0.903522 | 0.891698 | 0.867897 | 0.841649 | 0.81163 | 0.789002 | 0 | 0.004136 | 0.246459 | 32,403 | 1,130 | 81 | 28.675221 | 0.799443 | 0.079499 | 0 | 0.824945 | 0 | 0 | 0.099416 | 0.033645 | 0 | 0 | 0 | 0 | 0.02954 | 1 | 0.086433 | false | 0 | 0.008753 | 0.010941 | 0.115974 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
928f732a534552e1f617f13028a2be769854a691 | 9,558 | py | Python | pypeln/task/api/map_task_test.py | quarckster/pypeln | f4160d0f4d4718b67f79a0707d7261d249459a4b | [
"MIT"
] | 1,281 | 2018-09-20T05:35:27.000Z | 2022-03-30T01:29:48.000Z | pypeln/task/api/map_task_test.py | webclinic017/pypeln | 5231806f2cac9d2019dacbbcf913484fd268b8c1 | [
"MIT"
] | 78 | 2018-09-18T20:38:12.000Z | 2022-03-30T20:16:02.000Z | pypeln/task/api/map_task_test.py | webclinic017/pypeln | 5231806f2cac9d2019dacbbcf913484fd268b8c1 | [
"MIT"
] | 88 | 2018-09-24T10:46:14.000Z | 2022-03-28T09:34:50.000Z | import asyncio
import sys
import time
import typing as tp
from unittest import TestCase
import hypothesis as hp
from hypothesis import strategies as st
import pypeln as pl
MAX_EXAMPLES = 10
T = tp.TypeVar("T")
@hp.given(nums=st.lists(st.integers()))
@hp.settings(max_examples=MAX_EXAMPLES)
def test_map_id(nums: tp.List[int]):
nums_py = nums
nums_pl = pl.task.map(lambda x: x, nums)
nums_pl = list(nums_pl)
assert nums_pl == nums_py
@hp.given(nums=st.lists(st.integers()))
@hp.settings(max_examples=MAX_EXAMPLES)
@pl.task.utils.run_test_async
async def test_map_id_async(nums: tp.List[int]):
nums_py = nums
nums_pl = pl.task.map(lambda x: x, nums)
nums_pl = await nums_pl
assert nums_pl == nums_py
@hp.given(nums=st.lists(st.integers()))
@hp.settings(max_examples=MAX_EXAMPLES)
@pl.task.utils.run_test_async
async def test_map_id_async_iterable(nums: tp.List[int]):
nums_py = nums
nums_pl = pl.task.map(lambda x: x, nums)
nums_pl = [x async for x in nums_pl]
assert nums_pl == nums_py
@hp.given(nums=st.lists(st.integers()))
@hp.settings(max_examples=MAX_EXAMPLES)
def test_map_id_pipe(nums: tp.List[int]):
nums_pl = nums | pl.task.map(lambda x: x) | list
assert nums_pl == nums
@hp.given(nums=st.lists(st.integers()))
@hp.settings(max_examples=MAX_EXAMPLES)
def test_map_square(nums: tp.List[int]):
nums_py = map(lambda x: x ** 2, nums)
nums_py = list(nums_py)
nums_pl = pl.task.map(lambda x: x ** 2, nums)
nums_pl = list(nums_pl)
assert nums_pl == nums_py
@hp.given(nums=st.lists(st.integers()))
@hp.settings(max_examples=MAX_EXAMPLES)
def test_map_square_event_start(nums: tp.List[int]):
nums_py = map(lambda x: x ** 2, nums)
nums_py = list(nums_py)
namespace = pl.task.Namespace()
namespace.x = 0
def on_start():
namespace.x = 1
nums_pl = pl.task.map(lambda x: x ** 2, nums, on_start=on_start)
nums_pl = list(nums_pl)
assert nums_pl == nums_py
assert namespace.x == 1
@hp.given(nums=st.lists(st.integers()))
@hp.settings(max_examples=MAX_EXAMPLES)
def test_map_square_event_start_async_1(nums: tp.List[int]):
nums_py = map(lambda x: x ** 2, nums)
nums_py = list(nums_py)
namespace = pl.task.Namespace()
namespace.x = 0
async def on_start():
namespace.x = 1
nums_pl = pl.task.map(lambda x: x ** 2, nums, on_start=on_start)
nums_pl = list(nums_pl)
assert nums_pl == nums_py
assert namespace.x == 1
@hp.given(nums=st.lists(st.integers()))
@hp.settings(max_examples=MAX_EXAMPLES)
@pl.task.utils.run_test_async
async def test_map_square_event_start_async_2(nums: tp.List[int]):
nums_py = map(lambda x: x ** 2, nums)
nums_py = list(nums_py)
namespace = pl.task.Namespace()
namespace.x = 0
async def on_start():
namespace.x = 1
nums_pl = pl.task.map(lambda x: x ** 2, nums, on_start=on_start)
nums_pl = await nums_pl
assert nums_pl == nums_py
assert namespace.x == 1
@hp.given(nums=st.lists(st.integers()))
@hp.settings(max_examples=MAX_EXAMPLES)
@pl.task.utils.run_test_async
async def test_map_square_event_start_async_2(nums: tp.List[int]):
nums_py = map(lambda x: x ** 2, nums)
nums_py = list(nums_py)
namespace = pl.task.Namespace()
namespace.x = 0
async def on_start():
await asyncio.sleep(0.01)
namespace.x = 1
nums_pl = pl.task.map(lambda x: x ** 2, nums, on_start=on_start)
nums_pl = await nums_pl
assert nums_pl == nums_py
assert namespace.x == 1
@hp.given(nums=st.lists(st.integers()))
@hp.settings(max_examples=MAX_EXAMPLES)
@pl.task.utils.run_test_async
async def test_map_square_event_start_async(nums: tp.List[int]):
nums_py = map(lambda x: x ** 2, nums)
nums_py = list(nums_py)
namespace = pl.task.Namespace()
namespace.x = 0
def on_start():
namespace.x = 1
nums_pl = pl.task.map(lambda x: x ** 2, nums, on_start=on_start)
nums_pl = await nums_pl
assert nums_pl == nums_py
assert namespace.x == 1
def test_timeout():
nums = list(range(10))
async def f(x):
if x == 2:
while True:
await asyncio.sleep(0.02)
return x
nums_pl = pl.task.map(f, nums, timeout=0.01)
nums_pl = list(nums_pl)
assert len(nums_pl) == 9
@pl.task.utils.run_test_async
async def test_timeout_async():
nums = list(range(10))
async def f(x):
if x == 2:
while True:
await asyncio.sleep(0.1)
return x
nums_pl = pl.task.map(f, nums, timeout=0.01)
nums_pl = await nums_pl
assert len(nums_pl) == 9
def test_worker_info():
nums = range(100)
n_workers = 4
def on_start(worker_info):
return dict(worker_info=worker_info)
nums_pl = pl.task.map(
lambda x, worker_info: worker_info.index,
nums,
on_start=on_start,
workers=n_workers,
)
nums_pl = set(nums_pl)
assert nums_pl.issubset(set(range(n_workers)))
@pl.task.utils.run_test_async
async def test_worker_info_async():
nums = range(100)
n_workers = 4
def on_start(worker_info):
return dict(worker_info=worker_info)
nums_pl = pl.task.map(
lambda x, worker_info: worker_info.index,
nums,
on_start=on_start,
workers=n_workers,
)
nums_pl = await nums_pl
assert set(nums_pl).issubset(set(range(n_workers)))
def test_kwargs():
nums = range(100)
n_workers = 4
letters = "abc"
namespace = pl.task.Namespace()
namespace.on_done = None
def on_start():
return dict(y=letters)
def on_done(y):
namespace.on_done = y
nums_pl = pl.task.map(
lambda x, y: y,
nums,
on_start=on_start,
on_done=on_done,
workers=n_workers,
)
nums_pl = list(nums_pl)
assert namespace.on_done == letters
assert nums_pl == [letters] * len(nums)
@pl.task.utils.run_test_async
async def test_kwargs_async():
nums = range(100)
n_workers = 4
letters = "abc"
namespace = pl.task.Namespace()
namespace.on_done = None
def on_start():
return dict(y=letters)
def on_done(y):
namespace.on_done = y
nums_pl = pl.task.map(
lambda x, y: y,
nums,
on_start=on_start,
on_done=on_done,
workers=n_workers,
)
nums_pl = await nums_pl
assert namespace.on_done == letters
assert nums_pl == [letters] * len(nums)
@hp.given(nums=st.lists(st.integers()))
@hp.settings(max_examples=MAX_EXAMPLES)
def test_map_square_event_end(nums: tp.List[int]):
namespace = pl.task.Namespace()
namespace.x = 0
namespace.done = False
namespace.active_workers = -1
def on_start():
namespace.x = 1
def on_done(stage_status):
namespace.x = 2
namespace.active_workers = stage_status.active_workers
namespace.done = stage_status.done
nums_pl = pl.task.map(
lambda x: x ** 2, nums, workers=3, on_start=on_start, on_done=on_done
)
nums_pl = list(nums_pl)
time.sleep(0.1)
assert namespace.x == 2
assert namespace.done == True
assert namespace.active_workers == 0
@hp.given(nums=st.lists(st.integers()))
@hp.settings(max_examples=MAX_EXAMPLES)
@pl.task.utils.run_test_async
async def test_map_square_event_end_async(nums: tp.List[int]):
namespace = pl.task.Namespace()
namespace.x = 0
namespace.done = False
namespace.active_workers = -1
def on_start():
namespace.x = 1
def on_done(stage_status):
namespace.x = 2
namespace.active_workers = stage_status.active_workers
namespace.done = stage_status.done
nums_pl = pl.task.map(
lambda x: x ** 2, nums, workers=3, on_start=on_start, on_done=on_done
)
nums_pl = await nums_pl
await asyncio.sleep(0.1)
assert namespace.x == 2
assert namespace.done == True
assert namespace.active_workers == 0
@hp.given(nums=st.lists(st.integers()))
@hp.settings(max_examples=MAX_EXAMPLES)
@pl.task.utils.run_test_async
async def test_map_square_workers_async(nums: tp.List[int]):
nums_py = map(lambda x: x ** 2, nums)
nums_py = list(nums_py)
nums_pl = pl.task.map(lambda x: x ** 2, nums, workers=2)
nums_pl = await nums_pl
assert sorted(nums_pl) == sorted(nums_py)
class MyError(Exception):
pass
def test_error_handling():
error = None
def raise_error(x):
raise MyError()
stage = pl.task.map(raise_error, range(10))
try:
list(stage)
except MyError as e:
error = e
assert isinstance(error, MyError)
@pl.task.utils.run_test_async
async def test_error_handling_async():
error = None
def raise_error(x):
raise MyError()
stage = pl.task.map(raise_error, range(10))
try:
await stage
except MyError as e:
error = e
assert isinstance(error, MyError)
def test_maxsize():
namespace = pl.task.utils.Namespace(count=0)
def f(x) -> tp.Any:
namespace.count += 1
return x
stage = pl.task.map(f, range(20))
stage = pl.task.to_iterable(stage, maxsize=3)
iterator = iter(stage)
next(iterator)
time.sleep(0.1)
# + 1 element which was yieled on next(...)
# + 3 elements which are on the queue.
# + 1 element which it pending to be put.
# -------------------------------------------
# + 5 total
assert namespace.count == 5
| 21.478652 | 77 | 0.645428 | 1,493 | 9,558 | 3.920295 | 0.081045 | 0.075859 | 0.041005 | 0.037588 | 0.878524 | 0.866906 | 0.856826 | 0.836494 | 0.836494 | 0.808816 | 0 | 0.01484 | 0.231534 | 9,558 | 444 | 78 | 21.527027 | 0.782029 | 0.017995 | 0 | 0.796552 | 0 | 0 | 0.000746 | 0 | 0 | 0 | 0 | 0 | 0.113793 | 1 | 0.089655 | false | 0.003448 | 0.027586 | 0.013793 | 0.144828 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
2bc6f97afcf8cc8c80f54c33cca76423d8160c0c | 90 | py | Python | bluecanary/scripts/update_tags/__init__.py | alex-voxy/blue-canary | b12af02ecaf242640b52ecac9355917e58da6427 | [
"MIT"
] | 7 | 2018-02-19T21:53:54.000Z | 2022-03-13T01:02:14.000Z | bluecanary/scripts/update_tags/__init__.py | alex-voxy/blue-canary | b12af02ecaf242640b52ecac9355917e58da6427 | [
"MIT"
] | 1 | 2017-02-23T21:03:36.000Z | 2017-02-23T21:03:36.000Z | bluecanary/scripts/update_tags/__init__.py | alex-voxy/blue-canary | b12af02ecaf242640b52ecac9355917e58da6427 | [
"MIT"
] | 1 | 2016-02-29T00:45:22.000Z | 2016-02-29T00:45:22.000Z | from .update_ec2_tags import update_ec2_tags
from .update_elb_tags import update_elb_tags
| 30 | 44 | 0.888889 | 16 | 90 | 4.5 | 0.375 | 0.277778 | 0.361111 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.02439 | 0.088889 | 90 | 2 | 45 | 45 | 0.853659 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
2bcdff68fc15c1d2be8bbcc54755eeab9bbf71db | 20,881 | py | Python | src/hydrotoolbox/baseflow_sep.py | timcera/hydrotoolbox | 4788f97d34f9148cec7b07df66606cf2d7ea1ef3 | [
"BSD-3-Clause"
] | 3 | 2022-02-15T00:59:39.000Z | 2022-02-15T07:37:05.000Z | src/hydrotoolbox/baseflow_sep.py | timcera/hydrotoolbox | 4788f97d34f9148cec7b07df66606cf2d7ea1ef3 | [
"BSD-3-Clause"
] | null | null | null | src/hydrotoolbox/baseflow_sep.py | timcera/hydrotoolbox | 4788f97d34f9148cec7b07df66606cf2d7ea1ef3 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""Tools for hydrology.
hydrotoolbox baseflow_sep --area 59.1 --area_units 'mile**2' linear < daily.csv
hydrotoolbox baseflow_sep sliding < daily.csv
hydrotoolbox baseflow_sep eckardt,sliding < daily.csv
...
hydrotoolbox recession """
from __future__ import absolute_import, division, print_function
import warnings
import numpy as np
import pandas as pd
from tstoolbox import tsutils
from .baseflow.comparison import strict_baseflow
from .baseflow.separation import separation
warnings.filterwarnings("ignore")
def bfsep(Q, method, print_input, bfi=False, date=None, area=None, ice_period=None):
ntsd = pd.DataFrame()
if print_input is True:
ntsd = Q.copy()
Qb = pd.DataFrame()
for col in Q.columns:
ndf = pd.DataFrame(
separation(
Q.loc[:, col].astype("float64").values,
date=date,
area=area,
ice_period=ice_period,
method=method,
)[bfi],
index=Q.index,
)
ndf.columns = [col]
Qb = Qb.join(ndf, how="outer")
return tsutils.return_input(print_input, ntsd, Qb, suffix=method.lower())
@tsutils.doc(tsutils.docstrings)
def boughton(
input_ts="-",
columns=None,
source_units=None,
start_date=None,
end_date=None,
dropna="no",
clean=False,
round_index=None,
skiprows=None,
index_type="datetime",
names=None,
target_units=None,
print_input=False,
):
"""Boughton double-parameter filter (Boughton, 2004)
! C k
! Qb = ----- Q + ----- Qb
! i 1 + C i 1 + C (i-1)
Parameters
----------
input_ts
Streamflow
${columns}
${source_units}
${start_date}
${end_date}
${dropna}
${clean}
${round_index}
${skiprows}
${index_type}
${names}
${target_units}
${print_input}
${tablefmt}
"""
Q = tsutils.common_kwds(
tsutils.read_iso_ts(
input_ts,
skiprows=skiprows,
names=names,
index_type=index_type,
),
start_date=start_date,
end_date=end_date,
pick=columns,
round_index=round_index,
dropna=dropna,
clean=clean,
source_units=source_units,
target_units=target_units,
)
return bfsep(Q, "Boughton", print_input)
@tsutils.doc(tsutils.docstrings)
def chapman(
input_ts="-",
columns=None,
source_units=None,
start_date=None,
end_date=None,
dropna="no",
clean=False,
round_index=None,
skiprows=None,
index_type="datetime",
names=None,
target_units=None,
print_input=False,
):
"""Chapman filter (Chapman, 1991)
Parameters
----------
input_ts
Streamflow
${columns}
${source_units}
${start_date}
${end_date}
${dropna}
${clean}
${round_index}
${skiprows}
${index_type}
${names}
${target_units}
${print_input}
${tablefmt}
"""
Q = tsutils.common_kwds(
tsutils.read_iso_ts(
input_ts,
skiprows=skiprows,
names=names,
index_type=index_type,
),
start_date=start_date,
end_date=end_date,
pick=columns,
round_index=round_index,
dropna=dropna,
clean=clean,
source_units=source_units,
target_units=target_units,
)
return bfsep(Q, "Chapman", print_input)
@tsutils.doc(tsutils.docstrings)
def cm(
input_ts="-",
columns=None,
source_units=None,
start_date=None,
end_date=None,
dropna="no",
clean=False,
round_index=None,
skiprows=None,
index_type="datetime",
names=None,
target_units=None,
print_input=False,
):
"""CM filter (Chapman and Maxwell, 1996)
! 1 - k k
! Qb = ----- Q + ----- Qb
! i 2 - k i 2 - k (i-1)
Parameters
----------
input_ts
Streamflow
${columns}
${source_units}
${start_date}
${end_date}
${dropna}
${clean}
${round_index}
${skiprows}
${index_type}
${names}
${target_units}
${print_input}
${tablefmt}
"""
Q = tsutils.common_kwds(
tsutils.read_iso_ts(
input_ts,
skiprows=skiprows,
names=names,
index_type=index_type,
),
start_date=start_date,
end_date=end_date,
pick=columns,
round_index=round_index,
dropna=dropna,
clean=clean,
source_units=source_units,
target_units=target_units,
)
return bfsep(Q, "CM", print_input)
@tsutils.doc(tsutils.docstrings)
def eckhardt(
input_ts="-",
columns=None,
source_units=None,
start_date=None,
end_date=None,
dropna="no",
clean=False,
round_index=None,
skiprows=None,
index_type="datetime",
names=None,
target_units=None,
print_input=False,
):
"""Eckhardt filter (Eckhardt, 2005)
Parameters
----------
input_ts
Streamflow
${columns}
${source_units}
${start_date}
${end_date}
${dropna}
${clean}
${round_index}
${skiprows}
${index_type}
${names}
${target_units}
${print_input}
${tablefmt}
"""
Q = tsutils.common_kwds(
tsutils.read_iso_ts(
input_ts,
skiprows=skiprows,
names=names,
index_type=index_type,
),
start_date=start_date,
end_date=end_date,
pick=columns,
round_index=round_index,
dropna=dropna,
clean=clean,
source_units=source_units,
target_units=target_units,
)
return bfsep(Q, "Eckhardt", print_input)
@tsutils.doc(tsutils.docstrings)
def ewma(
input_ts="-",
columns=None,
source_units=None,
start_date=None,
end_date=None,
dropna="no",
clean=False,
round_index=None,
skiprows=None,
index_type="datetime",
names=None,
target_units=None,
print_input=False,
):
"""Exponential Weighted Moving Average (EWMA) filter (Tularam and Ilahee, 2008)
Parameters
----------
input_ts
Streamflow
${columns}
${source_units}
${start_date}
${end_date}
${dropna}
${clean}
${round_index}
${skiprows}
${index_type}
${names}
${target_units}
${print_input}
${tablefmt}
"""
Q = tsutils.common_kwds(
tsutils.read_iso_ts(
input_ts,
skiprows=skiprows,
names=names,
index_type=index_type,
),
start_date=start_date,
end_date=end_date,
pick=columns,
round_index=round_index,
dropna=dropna,
clean=clean,
source_units=source_units,
target_units=target_units,
)
return bfsep(Q, "EWMA", print_input)
@tsutils.doc(tsutils.docstrings)
def usgs_hysep_fixed(
area=None,
input_ts="-",
columns=None,
source_units=None,
start_date=None,
end_date=None,
dropna="no",
clean=False,
round_index=None,
skiprows=None,
index_type="datetime",
names=None,
target_units=None,
print_input=False,
):
"""USGS HYSEP Fixed interval method.
Sloto, Ronald A., and Michele Y. Crouse. “HYSEP: A Computer Program for
Streamflow Hydrograph Separation and Analysis.” USGS Numbered Series.
Water-Resources Investigations Report. Geological Survey (U.S.), 1996.
http://pubs.er.usgs.gov/publication/wri964040
Parameters
----------
area: float
basin area in mile^2
input_ts
Streamflow
${columns}
${source_units}
${start_date}
${end_date}
${dropna}
${clean}
${round_index}
${skiprows}
${index_type}
${names}
${target_units}
${print_input}
${tablefmt}
"""
Q = tsutils.common_kwds(
tsutils.read_iso_ts(
input_ts,
skiprows=skiprows,
names=names,
index_type=index_type,
),
start_date=start_date,
end_date=end_date,
pick=columns,
round_index=round_index,
dropna=dropna,
clean=clean,
source_units=source_units,
target_units=target_units,
)
return bfsep(Q, "Fixed", print_input, area=area)
@tsutils.doc(tsutils.docstrings)
def furey(
input_ts="-",
columns=None,
source_units=None,
start_date=None,
end_date=None,
dropna="no",
clean=False,
round_index=None,
skiprows=None,
index_type="datetime",
names=None,
target_units=None,
print_input=False,
):
"""Furey digital filter (Furey and Gupta, 2001, 2003)
Parameters
----------
input_ts
Streamflow
${columns}
${source_units}
${start_date}
${end_date}
${dropna}
${clean}
${round_index}
${skiprows}
${index_type}
${names}
${target_units}
${print_input}
${tablefmt}
"""
Q = tsutils.common_kwds(
tsutils.read_iso_ts(
input_ts,
skiprows=skiprows,
names=names,
index_type=index_type,
),
start_date=start_date,
end_date=end_date,
pick=columns,
round_index=round_index,
dropna=dropna,
clean=clean,
source_units=source_units,
target_units=target_units,
)
return bfsep(Q, "Furey", print_input)
@tsutils.doc(tsutils.docstrings)
def lh(
input_ts="-",
columns=None,
source_units=None,
start_date=None,
end_date=None,
dropna="no",
clean=False,
round_index=None,
skiprows=None,
index_type="datetime",
names=None,
target_units=None,
print_input=False,
):
"""LH digital filter (Lyne and Hollick, 1979)
Parameters
----------
input_ts
Streamflow
${columns}
${source_units}
${start_date}
${end_date}
${dropna}
${clean}
${round_index}
${skiprows}
${index_type}
${names}
${target_units}
${print_input}
${tablefmt}
"""
Q = tsutils.common_kwds(
tsutils.read_iso_ts(
input_ts,
skiprows=skiprows,
names=names,
index_type=index_type,
),
start_date=start_date,
end_date=end_date,
pick=columns,
round_index=round_index,
dropna=dropna,
clean=clean,
source_units=source_units,
target_units=target_units,
)
return bfsep(Q, "LH", print_input)
@tsutils.doc(tsutils.docstrings)
def usgs_hysep_local(
area=None,
input_ts="-",
columns=None,
source_units=None,
start_date=None,
end_date=None,
dropna="no",
clean=False,
round_index=None,
skiprows=None,
index_type="datetime",
names=None,
target_units=None,
print_input=False,
):
"""USGS HYSEP Local minimum graphical method (Sloto and Crouse, 1996)
Parameters
----------
area: float
basin area in mile^2
input_ts
Streamflow
${columns}
${source_units}
${start_date}
${end_date}
${dropna}
${clean}
${round_index}
${skiprows}
${index_type}
${names}
${target_units}
${print_input}
${tablefmt}
"""
Q = tsutils.common_kwds(
tsutils.read_iso_ts(
input_ts,
skiprows=skiprows,
names=names,
index_type=index_type,
),
start_date=start_date,
end_date=end_date,
pick=columns,
round_index=round_index,
dropna=dropna,
clean=clean,
source_units=source_units,
target_units=target_units,
)
return bfsep(Q, "Local", print_input, area=area)
@tsutils.doc(tsutils.docstrings)
def ihacres(
k,
C,
a,
input_ts="-",
columns=None,
source_units=None,
start_date=None,
end_date=None,
dropna="no",
clean=False,
round_index=None,
skiprows=None,
index_type="datetime",
names=None,
target_units=None,
print_input=False,
):
"""IHACRES
Parameters
----------
k: float
k
coefficient
C: float
C
coefficient
a: float
a
coefficient
input_ts
Streamflow
${columns}
${source_units}
${start_date}
${end_date}
${dropna}
${clean}
${round_index}
${skiprows}
${index_type}
${names}
${target_units}
${print_input}
${tablefmt}
"""
Q = tsutils.common_kwds(
tsutils.read_iso_ts(
input_ts,
skiprows=skiprows,
names=names,
index_type=index_type,
),
start_date=start_date,
end_date=end_date,
pick=columns,
round_index=round_index,
dropna=dropna,
clean=clean,
source_units=source_units,
target_units=target_units,
)
ntsd = pd.DataFrame()
if print_input is True:
ntsd = Q.copy()
Qb = Q.copy()
for col in range(len(Q.columns)):
for row in range(1, len(Q.index)):
Qb.iloc[row, col] = k / (1 + C) * Qb.iloc[row - 1, col] + C / (1 + C) * (
Q.iloc[row, col] + a * Q.iloc[row - 1, col]
)
Qb.mask(Qb < Q, other=Q, inplace=True)
return tsutils.return_input(print_input, ntsd, Qb, suffix="ihacres")
@tsutils.doc(tsutils.docstrings)
def usgs_hysep_slide(
area=None,
input_ts="-",
columns=None,
source_units=None,
start_date=None,
end_date=None,
dropna="no",
clean=False,
round_index=None,
skiprows=None,
index_type="datetime",
names=None,
target_units=None,
print_input=False,
):
"""USGS HYSEP sliding interval method
The USGS HYSEP sliding interval method described in
`Sloto and Crouse, 1996`
The flow series is filter with scipy.ndimage.genericfilter1D using
numpy.nanmin function over a window of size `size`
Sloto, Ronald A., and Michele Y. Crouse. “HYSEP: A Computer Program for
Streamflow Hydrograph Separation and Analysis.” USGS Numbered Series.
Water-Resources Investigations Report. Geological Survey (U.S.), 1996.
http://pubs.er.usgs.gov/publication/wri964040.
Parameters
----------
area: float
Area of watershed in miles**2
input_ts
Streamflow
${columns}
${source_units}
${start_date}
${end_date}
${dropna}
${clean}
${round_index}
${skiprows}
${index_type}
${names}
${target_units}
${print_input}
${tablefmt}
"""
Q = tsutils.common_kwds(
tsutils.read_iso_ts(
input_ts,
skiprows=skiprows,
names=names,
index_type=index_type,
),
start_date=start_date,
end_date=end_date,
pick=columns,
round_index=round_index,
dropna=dropna,
clean=clean,
source_units=source_units,
target_units=target_units,
)
return bfsep(Q, "Slide", print_input, area=area)
@tsutils.doc(tsutils.docstrings)
def ukih(
input_ts="-",
columns=None,
source_units=None,
start_date=None,
end_date=None,
dropna="no",
clean=False,
round_index=None,
skiprows=None,
index_type="datetime",
names=None,
target_units=None,
print_input=False,
):
"""Graphical method developed by UK Institute of Hydrology (UKIH, 1980)
Parameters
----------
input_ts
Streamflow
${columns}
${source_units}
${start_date}
${end_date}
${dropna}
${clean}
${round_index}
${skiprows}
${index_type}
${names}
${target_units}
${print_input}
${tablefmt}
"""
Q = tsutils.common_kwds(
tsutils.read_iso_ts(
input_ts,
skiprows=skiprows,
names=names,
index_type=index_type,
),
start_date=start_date,
end_date=end_date,
pick=columns,
round_index=round_index,
dropna=dropna,
clean=clean,
source_units=source_units,
target_units=target_units,
)
return bfsep(Q, "UKIH", print_input)
@tsutils.doc(tsutils.docstrings)
def willems(
input_ts="-",
columns=None,
source_units=None,
start_date=None,
end_date=None,
dropna="no",
clean=False,
round_index=None,
skiprows=None,
index_type="datetime",
names=None,
target_units=None,
print_input=False,
):
"""Digital filter (Willems, 2009)
Parameters
----------
input_ts
Streamflow
${columns}
${source_units}
${start_date}
${end_date}
${dropna}
${clean}
${round_index}
${skiprows}
${index_type}
${names}
${target_units}
${print_input}
${tablefmt}
"""
Q = tsutils.common_kwds(
tsutils.read_iso_ts(
input_ts,
skiprows=skiprows,
names=names,
index_type=index_type,
),
start_date=start_date,
end_date=end_date,
pick=columns,
round_index=round_index,
dropna=dropna,
clean=clean,
source_units=source_units,
target_units=target_units,
)
return bfsep(Q, "Willems", print_input)
@tsutils.doc(tsutils.docstrings)
def five_day(
input_ts="-",
columns=None,
source_units=None,
start_date=None,
end_date=None,
dropna="no",
clean=False,
round_index=None,
skiprows=None,
index_type="datetime",
names=None,
target_units=None,
print_input=False,
):
"""Value kept if less than 90 percent of adjacent 5-day blocks.
Parameters
----------
input_ts
Streamflow
${columns}
${source_units}
${start_date}
${end_date}
${dropna}
${clean}
${round_index}
${skiprows}
${index_type}
${names}
${target_units}
${print_input}
${tablefmt}
"""
Q = tsutils.common_kwds(
tsutils.read_iso_ts(
input_ts,
skiprows=skiprows,
names=names,
index_type=index_type,
),
start_date=start_date,
end_date=end_date,
pick=columns,
round_index=round_index,
dropna=dropna,
clean=clean,
source_units=source_units,
target_units=target_units,
)
ntsd = pd.DataFrame()
if print_input is True:
ntsd = Q.copy()
ndf = pd.DataFrame()
for col in Q:
vals = Q[col].groupby(pd.Grouper(freq="5D")).min().astype("float64")
srccol = 0.9 * vals
prevrow = vals.shift(-1)
nextrow = vals.shift(1)
mask = (srccol > prevrow) | (srccol > nextrow)
nanmask = Q[col] == np.nan
vals.loc[mask] = None
vals = vals.interpolate(method="linear")
vals.loc[nanmask] = None
vals = vals.reindex(Q.index).ffill()
ltmask = Q[col] < vals
vals.loc[ltmask] = Q.loc[ltmask, col]
ndf = ndf.join(vals, how="outer")
return tsutils.return_input(print_input, ntsd, ndf, suffix="five_day")
@tsutils.doc(tsutils.docstrings)
def strict(
input_ts="-",
columns=None,
source_units=None,
start_date=None,
end_date=None,
dropna="no",
clean=False,
round_index=None,
skiprows=None,
index_type="datetime",
names=None,
target_units=None,
print_input=False,
):
"""Return "strict" baseflow.
Parameters
----------
input_ts
Streamflow
${columns}
${source_units}
${start_date}
${end_date}
${dropna}
${clean}
${round_index}
${skiprows}
${index_type}
${names}
${target_units}
${tablefmt}
"""
Q = tsutils.common_kwds(
tsutils.read_iso_ts(
input_ts,
skiprows=skiprows,
names=names,
index_type=index_type,
),
start_date=start_date,
end_date=end_date,
pick=columns,
round_index=round_index,
dropna=dropna,
clean=clean,
source_units=source_units,
target_units=target_units,
)
ntsd = pd.DataFrame()
if print_input is True:
ntsd = Q.copy()
Qb = pd.DataFrame()
for col in Q.columns:
nstrict = strict_baseflow(Q[col].astype("float64").values)
vals = (
pd.DataFrame(
Q.loc[nstrict, col].astype("float64").values, index=Q.index[nstrict]
)
.reindex(Q.index)
.interpolate(method="time", axis="index")
)
Qb = Qb.join(vals, how="outer")
Qb.columns = Q.columns
Qb.mask(Qb < Q, other=Q, inplace=True)
return tsutils.return_input(print_input, ntsd, Qb, suffix="strict")
| 21.796451 | 85 | 0.574637 | 2,351 | 20,881 | 4.879626 | 0.097831 | 0.057531 | 0.043149 | 0.041841 | 0.824093 | 0.807357 | 0.805614 | 0.777807 | 0.777807 | 0.747995 | 0 | 0.007218 | 0.303386 | 20,881 | 957 | 86 | 21.819227 | 0.781452 | 0.24994 | 0 | 0.823427 | 0 | 0 | 0.021869 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.027972 | false | 0 | 0.012238 | 0 | 0.068182 | 0.064685 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a6de04fe969b8c52672332ad11d80351ecab777c | 177 | py | Python | datasets/__init__.py | alex-kj-chin/ConstellationNet | 28bb44beb735654381a10c8c25d2cfdedbdb43bc | [
"Apache-2.0"
] | 8 | 2021-09-22T03:50:10.000Z | 2022-02-03T18:55:38.000Z | datasets/__init__.py | alex-kj-chin/ConstellationNet | 28bb44beb735654381a10c8c25d2cfdedbdb43bc | [
"Apache-2.0"
] | null | null | null | datasets/__init__.py | alex-kj-chin/ConstellationNet | 28bb44beb735654381a10c8c25d2cfdedbdb43bc | [
"Apache-2.0"
] | 6 | 2021-11-29T09:05:38.000Z | 2022-03-31T07:45:49.000Z | from .datasets import make
from . import mini_imagenet
from . import tiered_imagenet
from . import image_folder
from . import samplers
from . import cifar_fs
from . import fc100 | 25.285714 | 29 | 0.80791 | 26 | 177 | 5.346154 | 0.5 | 0.431655 | 0.258993 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.02 | 0.152542 | 177 | 7 | 30 | 25.285714 | 0.906667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
a6f0f6b519bf07092ddbcf788a60ab3d7c12a1b9 | 11,628 | py | Python | netapp_activeiq_api/apis/watchlist_api.py | woutercoppens/netapp-activeiq-api | a8f86355ecdd769953b69e38824b4db07c11c89e | [
"Apache-2.0"
] | 3 | 2021-09-28T23:22:59.000Z | 2021-11-23T14:53:54.000Z | netapp_activeiq_api/apis/watchlist_api.py | woutercoppens/netapp-activeiq-api | a8f86355ecdd769953b69e38824b4db07c11c89e | [
"Apache-2.0"
] | null | null | null | netapp_activeiq_api/apis/watchlist_api.py | woutercoppens/netapp-activeiq-api | a8f86355ecdd769953b69e38824b4db07c11c89e | [
"Apache-2.0"
] | 1 | 2021-04-01T11:22:23.000Z | 2021-04-01T11:22:23.000Z | # coding: utf-8
from .api_client import ApiClient
class WatchlistApi:
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_watchlist(self, request_body, **kwargs): # noqa: E501
"""Creates a watchlist with customers, sites, groups, global parents, domestic parents, districts, regions, serial numbers, partners, and sales representatives. # noqa: E501
Returns the watchlist ID and watchlist name after successful creation of watchlist. # noqa: E501
:param RequestBody2 request_body: (required)
"""
all_params = ["request_body"] # noqa: E501
params = locals()
for key, val in params["kwargs"].items():
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_watchlist" % key
)
params[key] = val
del params["kwargs"]
# verify the required parameter 'request_body' is set
if "request_body" not in params or params["request_body"] is None:
raise ValueError(
"Missing the required parameter `request_body` when calling `create_watchlist`"
) # noqa: E501
path_params = {}
query_params = []
header_params = {}
body_params = None
if "request_body" in params:
body_params = params["request_body"]
return self.api_client.call_api(
"/v1/watchlist/create",
"POST",
path_params,
query_params,
header_params,
body=body_params,
)
def default_watchlist(self, watchlist_id, **kwargs): # noqa: E501
"""Update one of the created watchlist as default, based on the user input. # noqa: E501
Update one of created watchlist as default as per User input. # noqa: E501
:param str authorization_token: Access token to use the API services. (required)
:param str watchlist_id: Unique identifier for the watchlist ID. (required)
"""
all_params = ["watchlist_id"] # noqa: E501
params = locals()
for key, val in params["kwargs"].items():
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method default_watchlist" % key
)
params[key] = val
del params["kwargs"]
# verify the required parameter 'watchlist_id' is set
if "watchlist_id" not in params or params["watchlist_id"] is None:
raise ValueError(
"Missing the required parameter `watchlist_id` when calling `default_watchlist`"
) # noqa: E501
path_params = {}
if "watchlist_id" in params:
path_params["watchlistId"] = params["watchlist_id"] # noqa: E501
query_params = []
header_params = {}
body_params = None
return self.api_client.call_api(
"/v1/watchlist/default/watchlistId/{watchlistId}",
"PUT",
path_params,
query_params,
header_params,
body=body_params,
)
def delete_watchlist(self, watchlist_id, **kwargs): # noqa: E501
"""Specifies the watchlist to be deleted by given Watchlist ID. # noqa: E501
Delete the watchlist for valid Watchlist ID. # noqa: E501
:param str watchlist_id: Unique identifier for the watchlist ID. (required)
"""
all_params = ["watchlist_id"] # noqa: E501
params = locals()
for key, val in params["kwargs"].items():
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_watchlist" % key
)
params[key] = val
del params["kwargs"]
# verify the required parameter 'watchlist_id' is set
if "watchlist_id" not in params or params["watchlist_id"] is None:
raise ValueError(
"Missing the required parameter `watchlist_id` when calling `delete_watchlist`"
) # noqa: E501
path_params = {}
if "watchlist_id" in params:
path_params["watchlistId"] = params["watchlist_id"] # noqa: E501
query_params = []
header_params = {}
body_params = None
return self.api_client.call_api(
"/v1/watchlist/delete/watchlistId/{watchlistId}",
"DELETE",
path_params,
query_params,
header_params,
body=body_params,
)
def list_watchlist(self, **kwargs): # noqa: E501
"""Provide list of watchlist details for the user. # noqa: E501
Returns the list of watchlist details based on User ID. # noqa: E501
"""
all_params = [] # noqa: E501
params = locals()
for key, val in params["kwargs"].items():
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_watchlist" % key
)
params[key] = val
del params["kwargs"]
path_params = {}
query_params = []
header_params = {}
body_params = None
return self.api_client.call_api(
"/v1/watchlist/all",
"GET",
path_params,
query_params,
header_params,
body=body_params,
)
def share_watchlist(self, request_body, **kwargs): # noqa: E501
"""The created watchlist, user can share among internal other users. # noqa: E501
User can share the created watchlist with other internal users. # noqa: E501
:param RequestBody3 request_body: (required)
"""
all_params = ["request_body"] # noqa: E501
params = locals()
for key, val in params["kwargs"].items():
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method share_watchlist" % key
)
params[key] = val
del params["kwargs"]
# verify the required parameter 'request_body' is set
if "request_body" not in params or params["request_body"] is None:
raise ValueError(
"Missing the required parameter `request_body` when calling `share_watchlist`"
) # noqa: E501
path_params = {}
query_params = []
header_params = {}
body_params = None
if "request_body" in params:
body_params = params["request_body"]
return self.api_client.call_api(
"/v1/watchlist/share",
"PUT",
path_params,
query_params,
header_params,
body=body_params,
)
def system_count(self, watchlist_id, **kwargs): # noqa: E501
"""Get the total system count for the given Watchlist ID. # noqa: E501
Get the total system count for the given Watchlist ID. # noqa: E501
:param str watchlist_id: Unique identifier for the Watchlist. (required)
"""
all_params = ["watchlist_id"] # noqa: E501
params = locals()
for key, val in params["kwargs"].items():
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method system_count" % key
)
params[key] = val
del params["kwargs"]
# verify the required parameter 'watchlist_id' is set
if "watchlist_id" not in params or params["watchlist_id"] is None:
raise ValueError(
"Missing the required parameter `watchlist_id` when calling `system_count`"
) # noqa: E501
path_params = {}
if "watchlist_id" in params:
path_params["watchlistId"] = params["watchlist_id"] # noqa: E501
query_params = []
header_params = {}
body_params = None
return self.api_client.call_api(
"/v1/watchlist/count/watchlistId/{watchlistId}",
"GET",
path_params,
query_params,
header_params,
body=body_params,
)
def update_watchlist(self, request_body, **kwargs): # noqa: E501
"""Updates a watchlist with customers, sites, groups, global parents, domestic parents, districts, regions, serial numbers, partners, and sales representatives. # noqa: E501
Returns the watchlist ID and watchlist name after successful updated of watchlist. # noqa: E501
:param RequestBody4 request_body: (required)
"""
all_params = ["request_body"] # noqa: E501
params = locals()
for key, val in params["kwargs"].items():
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_watchlist" % key
)
params[key] = val
del params["kwargs"]
# verify the required parameter 'request_body' is set
if "request_body" not in params or params["request_body"] is None:
raise ValueError(
"Missing the required parameter `request_body` when calling `update_watchlist`"
) # noqa: E501
path_params = {}
query_params = []
header_params = {}
body_params = None
if "request_body" in params:
body_params = params["request_body"]
return self.api_client.call_api(
"/v1/watchlist/update",
"PUT",
path_params,
query_params,
header_params,
body=body_params,
)
def view_watchlist(self, id, **kwargs): # noqa: E501
"""Provides details about the watchlists that a user created by using customers, sites, groups, global parents, domestic parents, districts, regions, serial numbers, partners, and sales representatives. # noqa: E501
Returns the watchlist details based on user provided watchlist ID. # noqa: E501
:param str id: Unique identifier for the Watchlist ID. (required)
"""
all_params = ["id"] # noqa: E501
params = locals()
for key, val in params["kwargs"].items():
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view_watchlist" % key
)
params[key] = val
del params["kwargs"]
# verify the required parameter 'id' is set
if "id" not in params or params["id"] is None:
raise ValueError(
"Missing the required parameter `id` when calling `view_watchlist`"
) # noqa: E501
path_params = {}
if "id" in params:
path_params["id"] = params["id"] # noqa: E501
query_params = []
header_params = {}
body_params = None
return self.api_client.call_api(
"/v1/watchlist/id/{id}",
"GET",
path_params,
query_params,
header_params,
body=body_params,
)
| 33.413793 | 224 | 0.560888 | 1,258 | 11,628 | 5.032591 | 0.108903 | 0.054336 | 0.042963 | 0.058127 | 0.835255 | 0.808877 | 0.793398 | 0.763387 | 0.757226 | 0.747749 | 0 | 0.01874 | 0.352941 | 11,628 | 347 | 225 | 33.510086 | 0.822701 | 0.235552 | 0 | 0.711207 | 0 | 0 | 0.209775 | 0.018458 | 0 | 0 | 0 | 0 | 0 | 1 | 0.038793 | false | 0 | 0.00431 | 0 | 0.081897 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a6ff6d891d9a5891bb39243adf0a5db6144a3141 | 55 | py | Python | dcc/packages/oop_dcc/core/dcc_environment.py | nicholas-silveira/art_pipeline | 44eda0657d97977d3cdb09fb4c3aff62bd1457e6 | [
"BSD-3-Clause"
] | 11 | 2015-04-28T06:18:47.000Z | 2022-02-06T01:53:50.000Z | dcc/packages/oop_dcc/core/dcc_environment.py | nicholas-silveira/art_pipeline | 44eda0657d97977d3cdb09fb4c3aff62bd1457e6 | [
"BSD-3-Clause"
] | null | null | null | dcc/packages/oop_dcc/core/dcc_environment.py | nicholas-silveira/art_pipeline | 44eda0657d97977d3cdb09fb4c3aff62bd1457e6 | [
"BSD-3-Clause"
] | 6 | 2015-12-15T06:22:00.000Z | 2021-11-08T04:54:58.000Z |
import sys
def get_environment():
return sys.path | 9.166667 | 22 | 0.727273 | 8 | 55 | 4.875 | 0.875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.2 | 55 | 6 | 23 | 9.166667 | 0.886364 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
5b3881ffd6b604d24146a55e958f836bd4fe557d | 291,238 | py | Python | dojo/db_migrations/0001_squashed_0090_index_duplicate_finding.py | mtcolman/django-DefectDojo | 76175aca446e077884bdb5e1d8e2a671a0840775 | [
"BSD-3-Clause"
] | 249 | 2016-09-06T21:04:40.000Z | 2018-01-19T15:59:44.000Z | dojo/db_migrations/0001_squashed_0090_index_duplicate_finding.py | mtcolman/django-DefectDojo | 76175aca446e077884bdb5e1d8e2a671a0840775 | [
"BSD-3-Clause"
] | 275 | 2021-02-19T15:16:15.000Z | 2022-03-31T21:09:29.000Z | dojo/db_migrations/0001_squashed_0090_index_duplicate_finding.py | mtcolman/django-DefectDojo | 76175aca446e077884bdb5e1d8e2a671a0840775 | [
"BSD-3-Clause"
] | 152 | 2016-09-06T21:04:54.000Z | 2018-01-18T08:52:24.000Z | # Generated by Django 3.1.13 on 2021-10-09 17:53
from django.conf import settings
import django.contrib.auth.models
import django.core.validators
from django.db import migrations, models
import django.db.migrations.operations.special
import django.db.models.deletion
import django.utils.timezone
import django_extensions.db.fields
import dojo.models
import multiselectfield.db.fields
import tagulous.models.fields
import tagulous.models.models
import logging
logger = logging.getLogger(__name__)
# Functions from the following migrations need manual copying.
# Move them and any dependencies into this file, then update the
# RunPython operations to refer to the local versions:
# dojo.db_migrations.0001_initial
# dojo.db_migrations.0018_sonarqube_api_integration
# dojo.db_migrations.0042_risk_acceptance_improvements
# dojo.db_migrations.0047_jira_minimum_severity_default
# dojo.db_migrations.0049_create_endpoint_status
# dojo.db_migrations.0061_jira_webhook_secret
# dojo.db_migrations.0064_jira_refactor_populate
# dojo.db_migrations.0065_delete_empty_jira_project_configs
# dojo.db_migrations.0066_django_tagulous
# dojo.db_migrations.0069_risk_acceptance
# dojo.db_migrations.0082_last_status_update_populate
# dojo.db_migrations.0090_index_duplicate_finding
# VS 2021-10-09: All RunPython statements above removed, as they are not needed for fresh installs!
def log_me_0001(apps, schema_editor):
logger.debug('0001 started!')
def log_me_0090(apps, schema_editor):
logger.debug('0090 started!')
def update_collation(apps, schema_editor):
print('Database vendor: {}'.format(schema_editor.connection.vendor))
if not schema_editor.connection.vendor.startswith('mysql'):
return
schema_editor.execute('ALTER TABLE dojo_sonarqube_issue CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin')
def rollback_collation(apps, schema_editor):
pass
class Migration(migrations.Migration):
replaces = [('dojo', '0001_initial'), ('dojo', '0002_auto_20190503_1817'), ('dojo', '0003_test_title'), ('dojo', '0004_cve_field'), ('dojo', '0005_repo_field'), ('dojo', '0006_django2_upgrade'), ('dojo', '0007_note_additions'), ('dojo', '0008_finding_mitigation'), ('dojo', '0009_endpoint_remediation'), ('dojo', '0010_jira_conf_configuration_name'), ('dojo', '0011_finding_template_activity'), ('dojo', '0012_jira_finding_age'), ('dojo', '0013_jira_info_level'), ('dojo', '0014_jira_conf_resolution_mappings'), ('dojo', '0015_findingimage_caption'), ('dojo', '0016_increase_filepath_length'), ('dojo', '0017_auto_20190827_1421'), ('dojo', '0018_sonarqube_api_integration'), ('dojo', '0019_notetype_additions'), ('dojo', '0020_system_settings_allow_anonymous_survey_repsonse'), ('dojo', '0021_cve_index'), ('dojo', '0022_google_sheet_sync_additions'), ('dojo', '0023_SAST_track_unique_vulnerabilities'), ('dojo', '0024_cve_fix_1553'), ('dojo', '0025_jira_security_issuetype'), ('dojo', '0026_login_banner'), ('dojo', '0027_jira_issue_type_settings'), ('dojo', '0028_finding_indices'), ('dojo', '0029_cve_regex'), ('dojo', '0030_prod_type_meta'), ('dojo', '0031_finding_component'), ('dojo', '0032_system_settings_enable_auditlog'), ('dojo', '0033_remove_finding_duplicate_list'), ('dojo', '0034_add_github_support'), ('dojo', '0035_push_all_issues_help_text_rename_gh_fields'), ('dojo', '0036_system_settings_email_address'), ('dojo', '0037_email_notification_overhaul'), ('dojo', '0038_timezone_update'), ('dojo', '0039_test_version'), ('dojo', '0040_finding_cwe_index'), ('dojo', '0041_engagement_survey_import'), ('dojo', '0042_risk_acceptance_improvements'), ('dojo', '0043_finding_hash_code_index'), ('dojo', '0044_required_prod_type'), ('dojo', '0045_slack_help_text'), ('dojo', '0046_endpoint_status'), ('dojo', '0047_jira_minimum_severity_default'), ('dojo', '0048_sla_notifications'), ('dojo', '0049_create_endpoint_status'), ('dojo', '0050_deduplication_on_engagement'), ('dojo', '0051_regulation_categories'), ('dojo', '0052_cvssv3_field'), ('dojo', '0053_engagement_notes'), ('dojo', '0054_dojometa_finding'), ('dojo', '0055_notifications_jira_update_verbose_name'), ('dojo', '0056_index_component_name'), ('dojo', '0057_ms_teams'), ('dojo', '0058_document_finding_model'), ('dojo', '0059_product_type_authorized_users'), ('dojo', '0060_false_p_dedupe_indices'), ('dojo', '0061_jira_webhook_secret'), ('dojo', '0062_add_vuln_id_from_tool'), ('dojo', '0063_jira_refactor'), ('dojo', '0064_jira_refactor_populate'), ('dojo', '0065_delete_empty_jira_project_configs'), ('dojo', '0066_django_tagulous'), ('dojo', '0067_max_dupes'), ('dojo', '0068_file_upload'), ('dojo', '0069_risk_acceptance'), ('dojo', '0070_increase_alert_field_size'), ('dojo', '0071_product_type_enhancement'), ('dojo', '0072_composite_index'), ('dojo', '0073_sheets_textfields'), ('dojo', '0074_notifications_close_engagement'), ('dojo', '0075_import_history'), ('dojo', '0076_authorization'), ('dojo', '0077_delete_dupulicates'), ('dojo', '0078_cvssv3_rename_verbose_name'), ('dojo', '0079_system_settings_disclaimer'), ('dojo', '0080_jira_issue_templates'), ('dojo', '0081_last_status_update'), ('dojo', '0082_last_status_update_populate'), ('dojo', '0083_remove_ipscan'), ('dojo', '0084_add_extras_in_tool'), ('dojo', '0085_add_publish_date_cvssv3_score'), ('dojo', '0086_finding_groups'), ('dojo', '0087_protect_jira_issues'), ('dojo', '0088_import_history_cicd'), ('dojo', '0089_unprotect_jira_project'), ('dojo', '0090_index_duplicate_finding')]
initial = True
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('auth', '0008_alter_user_username_max_length'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.RunPython(log_me_0001),
migrations.CreateModel(
name='Benchmark_Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=300)),
('objective', models.TextField()),
('references', models.TextField(blank=True, null=True)),
('enabled', models.BooleanField(default=True)),
('created', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('updated', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
],
options={
'ordering': ('name',),
},
),
migrations.CreateModel(
name='Benchmark_Type',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=300)),
('version', models.CharField(max_length=15)),
('benchmark_source', models.CharField(choices=[('PCI', 'PCI'), ('OWASP ASVS', 'OWASP ASVS'), ('OWASP Mobile ASVS', 'OWASP Mobile ASVS')], default='OWASP ASVS', max_length=20, null=True)),
('created', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('updated', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('enabled', models.BooleanField(default=True)),
],
),
migrations.CreateModel(
name='Contact',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('email', models.EmailField(max_length=254)),
('team', models.CharField(max_length=100)),
('is_admin', models.BooleanField(default=False)),
('is_globally_read_only', models.BooleanField(default=False)),
('updated', models.DateTimeField(editable=False)),
],
),
migrations.CreateModel(
name='CWE',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('url', models.CharField(max_length=1000)),
('description', models.CharField(max_length=2000)),
('number', models.IntegerField()),
],
),
migrations.CreateModel(
name='Development_Environment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='Endpoint',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('protocol', models.CharField(blank=True, help_text=b"The communication protocol such as 'http', 'ftp', etc.", max_length=10, null=True)),
('host', models.CharField(blank=True, help_text=b"The host name or IP address, you can also include the port number. For example'127.0.0.1', '127.0.0.1:8080', 'localhost', 'yourdomain.com'.", max_length=500, null=True)),
('fqdn', models.CharField(blank=True, max_length=500, null=True)),
('port', models.IntegerField(blank=True, help_text='The network port associated with the endpoint.', null=True)),
('path', models.CharField(blank=True, help_text=b"The location of the resource, it should start with a '/'. For example/endpoint/420/edit", max_length=500, null=True)),
('query', models.CharField(blank=True, help_text=b"The query string, the question mark should be omitted.For example 'group=4&team=8'", max_length=1000, null=True)),
('fragment', models.CharField(blank=True, help_text=b"The fragment identifier which follows the hash mark. The hash mark should be omitted. For example 'section-13', 'paragraph-2'.", max_length=500, null=True)),
],
options={
'ordering': ['product', 'protocol', 'host', 'path', 'query', 'fragment'],
},
),
migrations.CreateModel(
name='Endpoint_Params',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('param', models.CharField(max_length=150)),
('value', models.CharField(max_length=150)),
('method', models.CharField(choices=[('GET', 'GET'), ('POST', 'POST')], max_length=20, null=True)),
],
),
migrations.CreateModel(
name='Engagement',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=300, null=True)),
('description', models.CharField(blank=True, max_length=2000, null=True)),
('version', models.CharField(blank=True, help_text='Version of the product the engagement tested.', max_length=100, null=True)),
('first_contacted', models.DateField(blank=True, null=True)),
('target_start', models.DateField()),
('target_end', models.DateField()),
('reason', models.CharField(blank=True, max_length=2000, null=True)),
('updated', models.DateTimeField(auto_now=True, null=True)),
('created', models.DateTimeField(auto_now_add=True, null=True)),
('active', models.BooleanField(default=True, editable=False)),
('tracker', models.URLField(blank=True, help_text='Link to epic or ticket system with changes to version.', null=True)),
('test_strategy', models.URLField(blank=True, null=True)),
('threat_model', models.BooleanField(default=True)),
('api_test', models.BooleanField(default=True)),
('pen_test', models.BooleanField(default=True)),
('check_list', models.BooleanField(default=True)),
('status', models.CharField(choices=[('Not Started', 'Not Started'), ('Blocked', 'Blocked'), ('Cancelled', 'Cancelled'), ('Completed', 'Completed'), ('In Progress', 'In Progress'), ('On Hold', 'On Hold'), ('Waiting for Resource', 'Waiting for Resource')], default='', max_length=2000, null=True)),
('progress', models.CharField(default='threat_model', editable=False, max_length=100)),
('tmodel_path', models.CharField(blank=True, default='none', editable=False, max_length=1000, null=True)),
('risk_path', models.CharField(blank=True, default='none', editable=False, max_length=1000, null=True)),
('done_testing', models.BooleanField(default=False, editable=False)),
('engagement_type', models.CharField(choices=[('Interactive', 'Interactive'), ('CI/CD', 'CI/CD')], default='Interactive', max_length=30, null=True)),
('build_id', models.CharField(blank=True, help_text='Build ID of the product the engagement tested.', max_length=150, null=True, verbose_name='Build ID')),
('commit_hash', models.CharField(blank=True, help_text='Commit hash from repo', max_length=150, null=True, verbose_name='Commit Hash')),
('branch_tag', models.CharField(blank=True, help_text='Tag or branch of the product the engagement tested.', max_length=150, null=True, verbose_name='Branch/Tag')),
('source_code_management_uri', models.CharField(blank=True, help_text='Resource link to source code', max_length=600, null=True, verbose_name='Repo')),
('deduplication_on_engagement', models.BooleanField(default=False)),
],
options={
'ordering': ['-target_start'],
},
),
migrations.CreateModel(
name='Engagement_Type',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='FieldRule',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('field', models.CharField(max_length=200)),
('update_type', models.CharField(choices=[('Append', 'Append'), ('Replace', 'Replace')], max_length=30)),
('text', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='Finding',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.TextField(max_length=1000)),
('date', models.DateField(default=dojo.models.get_current_date)),
('cwe', models.IntegerField(blank=True, default=0, null=True)),
('url', models.TextField(blank=True, editable=False, null=True)),
('severity', models.CharField(help_text='The severity level of this flaw (Critical, High, Medium, Low, Informational)', max_length=200)),
('description', models.TextField()),
('mitigation', models.TextField()),
('impact', models.TextField()),
('steps_to_reproduce', models.TextField(blank=True, null=True)),
('severity_justification', models.TextField(blank=True, null=True)),
('references', models.TextField(blank=True, db_column='refs', null=True)),
('is_template', models.BooleanField(default=False)),
('active', models.BooleanField(default=True)),
('verified', models.BooleanField(default=True)),
('false_p', models.BooleanField(default=False, verbose_name='False Positive')),
('duplicate', models.BooleanField(default=False)),
('out_of_scope', models.BooleanField(default=False)),
('under_review', models.BooleanField(default=False)),
('under_defect_review', models.BooleanField(default=False)),
('thread_id', models.IntegerField(default=0, editable=False)),
('mitigated', models.DateTimeField(blank=True, editable=False, null=True)),
('numerical_severity', models.CharField(max_length=4)),
('last_reviewed', models.DateTimeField(editable=False, null=True)),
('line_number', models.CharField(blank=True, editable=False, max_length=200, null=True)),
('sourcefilepath', models.TextField(blank=True, editable=False, null=True)),
('sourcefile', models.TextField(blank=True, editable=False, null=True)),
('param', models.TextField(blank=True, editable=False, null=True)),
('payload', models.TextField(blank=True, editable=False, null=True)),
('hash_code', models.TextField(blank=True, null=True)),
('line', models.IntegerField(blank=True, null=True, verbose_name='Line number')),
('file_path', models.CharField(blank=True, max_length=1000, null=True)),
('static_finding', models.BooleanField(default=False)),
('dynamic_finding', models.BooleanField(default=False)),
('created', models.DateTimeField(auto_now_add=True, null=True)),
('scanner_confidence', models.IntegerField(blank=True, default=None, editable=False, help_text='Confidence level of vulnerability which is supplied by the scannner.', null=True)),
],
options={
'ordering': ('numerical_severity', '-date', 'title'),
},
),
migrations.CreateModel(
name='FindingImage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(upload_to=dojo.models.UniqueUploadNameProvider('finding_images'))),
('caption', models.CharField(blank=True, max_length=500)),
],
),
migrations.CreateModel(
name='JIRA_Clone',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('jira_id', models.CharField(max_length=200)),
('jira_clone_id', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='JIRA_Conf',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('url', models.URLField(help_text='For more information how to configure Jira, read the DefectDojo documentation.', max_length=2000, verbose_name='JIRA URL')),
('username', models.CharField(max_length=2000)),
('password', models.CharField(max_length=2000)),
('default_issue_type', models.CharField(choices=[('Task', 'Task'), ('Story', 'Story'), ('Epic', 'Epic'), ('Spike', 'Spike'), ('Bug', 'Bug')], default='Bug', max_length=9)),
('epic_name_id', models.IntegerField(help_text="To obtain the 'Epic name id' visit https://<YOUR JIRA URL>/rest/api/2/field and search for Epic Name. Copy the number out of cf[number] and paste it here.")),
('open_status_key', models.IntegerField(help_text='Transition ID to Re-Open JIRA issues, visit https://<YOUR JIRA URL>/rest/api/latest/issue/<ANY VALID ISSUE KEY>/transitions?expand=transitions.fields to find the ID for your JIRA instance', verbose_name='Reopen Transition ID')),
('close_status_key', models.IntegerField(help_text='Transition ID to Close JIRA issues, visit https://<YOUR JIRA URL>/rest/api/latest/issue/<ANY VALID ISSUE KEY>/transitions?expand=transitions.fields to find the ID for your JIRA instance', verbose_name='Close Transition ID')),
('low_mapping_severity', models.CharField(help_text="Maps to the 'Priority' field in Jira. For example: Low", max_length=200)),
('medium_mapping_severity', models.CharField(help_text="Maps to the 'Priority' field in Jira. For example: Medium", max_length=200)),
('high_mapping_severity', models.CharField(help_text="Maps to the 'Priority' field in Jira. For example: High", max_length=200)),
('critical_mapping_severity', models.CharField(help_text="Maps to the 'Priority' field in Jira. For example: Critical", max_length=200)),
('finding_text', models.TextField(blank=True, help_text='Additional text that will be added to the finding in Jira. For example including how the finding was created or who to contact for more information.', null=True)),
('configuration_name', models.CharField(default='', help_text='Enter a name to give to this configuration', max_length=2000)),
('info_mapping_severity', models.CharField(help_text="Maps to the 'Priority' field in Jira. For example: Info", max_length=200)),
('accepted_mapping_resolution', models.CharField(blank=True, help_text='JIRA resolution names (comma-separated values) that maps to an Accepted Finding', max_length=300, null=True)),
('false_positive_mapping_resolution', models.CharField(blank=True, help_text='JIRA resolution names (comma-separated values) that maps to a False Positive Finding', max_length=300, null=True)),
],
),
migrations.CreateModel(
name='JIRA_Details_Cache',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('jira_id', models.CharField(max_length=200)),
('jira_key', models.CharField(max_length=200)),
('jira_status', models.CharField(max_length=200)),
('jira_resolution', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='Language_Type',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('language', models.CharField(max_length=100)),
('color', models.CharField(max_length=7, null=True, verbose_name='HTML color')),
],
),
migrations.CreateModel(
name='Network_Locations',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('location', models.CharField(help_text='Location of network testing: Examples: VPN, Internet or Internal.', max_length=500)),
],
),
migrations.CreateModel(
name='Notes',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('entry', models.TextField()),
('date', models.DateTimeField(default=dojo.models.get_current_datetime, editable=False)),
('author', models.ForeignKey(editable=False, on_delete=django.db.models.deletion.CASCADE, related_name='editor_notes_set', to=settings.AUTH_USER_MODEL)),
('edit_time', models.DateTimeField(default=dojo.models.get_current_datetime, editable=False, null=True)),
('edited', models.BooleanField(default=False)),
('editor', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='author_notes_set', to=settings.AUTH_USER_MODEL)),
('private', models.BooleanField(default=False)),
],
options={
'ordering': ['-date'],
},
),
migrations.CreateModel(
name='Objects',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=100, null=True)),
('path', models.CharField(blank=True, max_length=600, null=True, verbose_name='Full file path')),
('folder', models.CharField(blank=True, max_length=400, null=True, verbose_name='Folder')),
('artifact', models.CharField(blank=True, max_length=400, null=True, verbose_name='Artifact')),
('created', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
],
),
migrations.CreateModel(
name='Objects_Engagement',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('build_id', models.CharField(max_length=150, null=True)),
('created', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('full_url', models.URLField(blank=True, max_length=400, null=True)),
('type', models.CharField(max_length=30, null=True)),
('percentUnchanged', models.CharField(max_length=10, null=True)),
('engagement', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.engagement')),
('object_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.objects')),
],
),
migrations.CreateModel(
name='Objects_Review',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, null=True)),
('created', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
],
),
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, unique=True)),
('description', models.CharField(max_length=4000)),
('prod_manager', models.CharField(blank=True, default=0, max_length=200, null=True)),
('tech_contact', models.CharField(blank=True, default=0, max_length=200, null=True)),
('manager', models.CharField(blank=True, default=0, max_length=200, null=True)),
('created', models.DateTimeField(blank=True, editable=False, null=True)),
('updated', models.DateTimeField(blank=True, editable=False, null=True)),
('tid', models.IntegerField(default=0, editable=False)),
('prod_numeric_grade', models.IntegerField(blank=True, null=True)),
('business_criticality', models.CharField(blank=True, choices=[('very high', 'Very High'), ('high', 'High'), ('medium', 'Medium'), ('low', 'Low'), ('very low', 'Very Low'), ('none', 'None')], max_length=9, null=True)),
('platform', models.CharField(blank=True, choices=[('web service', 'API'), ('desktop', 'Desktop'), ('iot', 'Internet of Things'), ('mobile', 'Mobile'), ('we', 'We')], max_length=11, null=True)),
('lifecycle', models.CharField(blank=True, choices=[('construction', 'Construction'), ('production', 'Production'), ('retirement', 'Retirement')], max_length=12, null=True)),
('origin', models.CharField(blank=True, choices=[('third party library', 'Third Party Library'), ('purchased', 'Purchased'), ('contractor', 'Contractor Developed'), ('internal', 'Internally Developed'), ('open source', 'Open Source'), ('outsourced', 'Outsourced')], max_length=19, null=True)),
('user_records', models.PositiveIntegerField(blank=True, help_text='Estimate the number of user records within the application.', null=True)),
('revenue', models.DecimalField(blank=True, decimal_places=2, help_text=b"Estimate the application's revenue.", max_digits=15, null=True)),
('external_audience', models.BooleanField(default=False, help_text='Specify if the application is used by people outside the organization.')),
('internet_accessible', models.BooleanField(default=False, help_text='Specify if the application is accessible from the public internet.')),
('authorized_users', models.ManyToManyField(blank=True, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('name',),
},
),
migrations.CreateModel(
name='Product_Line',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=300)),
('description', models.CharField(max_length=2000)),
],
),
migrations.CreateModel(
name='Product_Type',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, unique=True)),
('critical_product', models.BooleanField(default=False)),
('key_product', models.BooleanField(default=False)),
('updated', models.DateTimeField(auto_now=True, null=True)),
('created', models.DateTimeField(auto_now_add=True, null=True)),
],
),
migrations.CreateModel(
name='Regulation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(help_text='The name of the legislation.', max_length=128)),
('acronym', models.CharField(help_text='A shortened representation of the name.', max_length=20, unique=True)),
('category', models.CharField(choices=[('privacy', 'Privacy'), ('finance', 'Finance'), ('education', 'Education'), ('medical', 'Medical'), ('other', 'Other')], help_text='The subject of the regulation.', max_length=9)),
('jurisdiction', models.CharField(help_text='The territory over which the regulation applies.', max_length=64)),
('description', models.TextField(blank=True, help_text="Information about the regulation's purpose.")),
('reference', models.URLField(blank=True, help_text='An external URL for more information.')),
],
options={
'ordering': ['name'],
},
),
migrations.CreateModel(
name='Report_Type',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='Rule',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('enabled', models.BooleanField(default=True)),
('text', models.TextField()),
('operator', models.CharField(choices=[('Matches', 'Matches'), ('Contains', 'Contains')], max_length=30)),
('model_object', models.CharField(choices=[('Finding', 'Finding')], max_length=30)),
('match_field', models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('cwe', 'cwe'), ('cve', 'cve'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('is_template', 'is_template'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_Mitigated', 'is_Mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('line_number', 'line_number'), ('sourcefilepath', 'sourcefilepath'), ('sourcefile', 'sourcefile'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('jira_creation', 'jira_creation'), ('jira_change', 'jira_change'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue')], max_length=200)),
('match_text', models.TextField()),
('application', models.CharField(choices=[('Append', 'Append'), ('Replace', 'Replace')], max_length=200)),
('applies_to', models.CharField(choices=[('Finding', 'Finding')], max_length=30)),
('applied_field', models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('cwe', 'cwe'), ('cve', 'cve'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('is_template', 'is_template'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_Mitigated', 'is_Mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('line_number', 'line_number'), ('sourcefilepath', 'sourcefilepath'), ('sourcefile', 'sourcefile'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('jira_creation', 'jira_creation'), ('jira_change', 'jira_change'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue')], max_length=200)),
('child_rules', models.ManyToManyField(editable=False, related_name='_rule_child_rules_+', to='dojo.Rule')),
('parent_rule', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.rule')),
],
),
migrations.CreateModel(
name='Test',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.TextField(blank=True, null=True)),
('target_start', models.DateTimeField()),
('target_end', models.DateTimeField()),
('estimated_time', models.TimeField(blank=True, editable=False, null=True)),
('actual_time', models.TimeField(blank=True, editable=False, null=True)),
('percent_complete', models.IntegerField(blank=True, null=True)),
('updated', models.DateTimeField(auto_now=True, null=True)),
('created', models.DateTimeField(auto_now_add=True, null=True)),
('engagement', models.ForeignKey(editable=False, on_delete=django.db.models.deletion.CASCADE, to='dojo.engagement')),
('environment', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.development_environment')),
('lead', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('notes', models.ManyToManyField(blank=True, editable=False, to='dojo.Notes')),
],
),
migrations.CreateModel(
name='Test_Type',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200, unique=True)),
('static_tool', models.BooleanField(default=False)),
('dynamic_tool', models.BooleanField(default=False)),
],
options={
'ordering': ('name',),
},
),
migrations.CreateModel(
name='Testing_Guide_Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=300)),
('created', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('updated', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
],
options={
'ordering': ('name',),
},
),
migrations.CreateModel(
name='Tool_Configuration',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('description', models.CharField(blank=True, max_length=2000, null=True)),
('url', models.CharField(max_length=2000, null=True)),
('authentication_type', models.CharField(blank=True, choices=[('API', 'API Key'), ('Password', 'Username/Password'), ('SSH', 'SSH')], max_length=15, null=True)),
('username', models.CharField(blank=True, max_length=200, null=True)),
('password', models.CharField(blank=True, max_length=600, null=True)),
('auth_title', models.CharField(blank=True, max_length=200, null=True, verbose_name='Title for SSH/API Key')),
('ssh', models.CharField(blank=True, max_length=6000, null=True)),
('api_key', models.CharField(blank=True, max_length=600, null=True, verbose_name='API Key')),
],
options={
'ordering': ['name'],
},
),
migrations.CreateModel(
name='Tool_Product_Settings',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('description', models.CharField(blank=True, max_length=2000, null=True)),
('url', models.CharField(blank=True, max_length=2000, null=True)),
('tool_project_id', models.CharField(blank=True, max_length=200, null=True)),
('notes', models.ManyToManyField(blank=True, editable=False, to='dojo.Notes')),
('product', models.ForeignKey(default=1, editable=False, on_delete=django.db.models.deletion.CASCADE, to='dojo.product')),
('tool_configuration', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='tool_configuration', to='dojo.tool_configuration')),
],
options={
'ordering': ['name'],
},
),
migrations.CreateModel(
name='Tool_Type',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('description', models.CharField(max_length=2000, null=True)),
],
options={
'ordering': ['name'],
},
),
migrations.CreateModel(
name='Dojo_User',
fields=[
],
options={
'proxy': True,
'indexes': [],
'ordering': ['first_name'],
},
bases=('auth.user',),
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
migrations.CreateModel(
name='VA',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('address', models.TextField(default='none', editable=False)),
('status', models.BooleanField(default=False, editable=False)),
('start', models.CharField(max_length=100)),
('result', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.test')),
('user', models.ForeignKey(editable=False, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Tool_Product_History',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('last_scan', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('succesfull', models.BooleanField(default=True, verbose_name='Succesfully')),
('configuration_details', models.CharField(blank=True, max_length=2000, null=True)),
('product', models.ForeignKey(editable=False, on_delete=django.db.models.deletion.CASCADE, to='dojo.tool_product_settings')),
],
),
migrations.AddField(
model_name='tool_configuration',
name='tool_type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='tool_type', to='dojo.tool_type'),
),
migrations.CreateModel(
name='Testing_Guide',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('identifier', models.CharField(blank=True, help_text='Test Unique Identifier', max_length=20, null=True)),
('name', models.CharField(help_text='Name of the test', max_length=400)),
('summary', models.CharField(help_text='Summary of the test', max_length=800)),
('objective', models.CharField(help_text='Objective of the test', max_length=800)),
('how_to_test', models.TextField(default=None, help_text='How to test the objective')),
('results_expected', models.CharField(help_text='What the results look like for a test', max_length=800)),
('created', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('updated', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('testing_guide_category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.testing_guide_category')),
],
),
migrations.AddField(
model_name='test',
name='test_type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.test_type'),
),
migrations.CreateModel(
name='ScanSettings',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('addresses', models.TextField(default='none')),
('date', models.DateTimeField(blank=True, default=dojo.models.get_current_datetime, editable=False)),
('frequency', models.CharField(blank=True, max_length=10000, null=True)),
('email', models.CharField(max_length=512)),
('protocol', models.CharField(default='TCP', max_length=10)),
('product', models.ForeignKey(default=1, editable=False, on_delete=django.db.models.deletion.CASCADE, to='dojo.product')),
('user', models.ForeignKey(editable=False, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Scan',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateTimeField(blank=True, default=dojo.models.get_current_datetime, editable=False)),
('protocol', models.CharField(default='TCP', max_length=10)),
('status', models.CharField(default='Pending', editable=False, max_length=10)),
('baseline', models.BooleanField(default=False, verbose_name='Current Baseline')),
('scan_settings', models.ForeignKey(default=1, editable=False, on_delete=django.db.models.deletion.CASCADE, to='dojo.scansettings')),
],
),
migrations.CreateModel(
name='Risk_Acceptance',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('path', models.FileField(editable=False, upload_to='risk/%Y/%m/%d', verbose_name='Risk Acceptance File')),
('expiration_date', models.DateTimeField(blank=True, default=None, null=True)),
('accepted_by', models.CharField(blank=True, default=None, help_text='The entity or person that accepts the risk.', max_length=200, null=True, verbose_name='Accepted By')),
('compensating_control', models.TextField(blank=True, default=None, help_text='If a compensating control exists to mitigate the finding or reduce risk, then list the compensating control(s).', null=True)),
('created', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('updated', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('accepted_findings', models.ManyToManyField(to='dojo.Finding')),
('notes', models.ManyToManyField(editable=False, to='dojo.Notes')),
('reporter', models.ForeignKey(editable=False, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Report',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('type', models.CharField(default='Finding', max_length=100)),
('format', models.CharField(default='AsciiDoc', max_length=15)),
('task_id', models.CharField(max_length=50)),
('file', models.FileField(null=True, upload_to='reports/%Y/%m/%d', verbose_name='Report File')),
('status', models.CharField(default='requested', max_length=10)),
('options', models.TextField()),
('datetime', models.DateTimeField(auto_now_add=True)),
('done_datetime', models.DateTimeField(null=True)),
('requester', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-datetime'],
},
),
migrations.AddField(
model_name='product',
name='prod_type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='prod_type', to='dojo.product_type'),
),
migrations.AddField(
model_name='product',
name='product_manager',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='product_manager', to='dojo.dojo_user'),
),
migrations.AddField(
model_name='product',
name='regulations',
field=models.ManyToManyField(blank=True, to='dojo.Regulation'),
),
migrations.AddField(
model_name='product',
name='team_manager',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='team_manager', to='dojo.dojo_user'),
),
migrations.AddField(
model_name='product',
name='technical_contact',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='technical_contact', to='dojo.dojo_user'),
),
migrations.AddField(
model_name='objects',
name='product',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.product'),
),
migrations.AddField(
model_name='objects',
name='review_status',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.objects_review'),
),
migrations.CreateModel(
name='Notifications',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('product_added', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('hipchat', 'hipchat'), ('mail', 'mail'), ('alert', 'alert')], default='alert', max_length=24)),
('engagement_added', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('hipchat', 'hipchat'), ('mail', 'mail'), ('alert', 'alert')], default='alert', max_length=24)),
('test_added', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('hipchat', 'hipchat'), ('mail', 'mail'), ('alert', 'alert')], default='alert', max_length=24)),
('results_added', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('hipchat', 'hipchat'), ('mail', 'mail'), ('alert', 'alert')], default='alert', max_length=24)),
('report_created', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('hipchat', 'hipchat'), ('mail', 'mail'), ('alert', 'alert')], default='alert', max_length=24)),
('jira_update', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('hipchat', 'hipchat'), ('mail', 'mail'), ('alert', 'alert')], default='alert', max_length=24)),
('upcoming_engagement', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('hipchat', 'hipchat'), ('mail', 'mail'), ('alert', 'alert')], default='alert', max_length=24)),
('stale_engagement', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('hipchat', 'hipchat'), ('mail', 'mail'), ('alert', 'alert')], default='alert', max_length=24)),
('auto_close_engagement', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('hipchat', 'hipchat'), ('mail', 'mail'), ('alert', 'alert')], default='alert', max_length=24)),
('user_mentioned', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('hipchat', 'hipchat'), ('mail', 'mail'), ('alert', 'alert')], default='alert', max_length=24)),
('code_review', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('hipchat', 'hipchat'), ('mail', 'mail'), ('alert', 'alert')], default='alert', max_length=24)),
('review_requested', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('hipchat', 'hipchat'), ('mail', 'mail'), ('alert', 'alert')], default='alert', max_length=24)),
('other', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('hipchat', 'hipchat'), ('mail', 'mail'), ('alert', 'alert')], default='alert', max_length=24)),
('user', models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='IPScan',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('address', models.TextField(default='none', editable=False)),
('services', models.CharField(max_length=800, null=True)),
('scan', models.ForeignKey(default=1, editable=False, on_delete=django.db.models.deletion.CASCADE, to='dojo.scan')),
],
),
migrations.CreateModel(
name='FindingImageAccessToken',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('token', models.CharField(max_length=255)),
('size', models.CharField(choices=[('small', 'Small'), ('medium', 'Medium'), ('large', 'Large'), ('thumbnail', 'Thumbnail'), ('original', 'Original')], default='medium', max_length=9)),
('image', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.findingimage')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='finding',
name='defect_review_requested_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='defect_review_requested_by', to='dojo.dojo_user'),
),
migrations.AddField(
model_name='finding',
name='duplicate_finding',
field=models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='original_finding', to='dojo.finding'),
),
migrations.AddField(
model_name='finding',
name='duplicate_list',
field=models.ManyToManyField(blank=True, editable=False, related_name='_finding_duplicate_list_+', to='dojo.Finding'),
),
migrations.AddField(
model_name='finding',
name='endpoints',
field=models.ManyToManyField(blank=True, to='dojo.Endpoint'),
),
migrations.AddField(
model_name='finding',
name='found_by',
field=models.ManyToManyField(editable=False, to='dojo.Test_Type'),
),
migrations.AddField(
model_name='finding',
name='images',
field=models.ManyToManyField(blank=True, to='dojo.FindingImage'),
),
migrations.AddField(
model_name='finding',
name='last_reviewed_by',
field=models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='last_reviewed_by', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='finding',
name='mitigated_by',
field=models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='mitigated_by', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='finding',
name='notes',
field=models.ManyToManyField(blank=True, editable=False, to='dojo.Notes'),
),
migrations.AddField(
model_name='finding',
name='reporter',
field=models.ForeignKey(default=1, editable=False, on_delete=django.db.models.deletion.CASCADE, related_name='reporter', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='finding',
name='review_requested_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='review_requested_by', to='dojo.dojo_user'),
),
migrations.AddField(
model_name='finding',
name='reviewers',
field=models.ManyToManyField(blank=True, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='finding',
name='test',
field=models.ForeignKey(editable=False, on_delete=django.db.models.deletion.CASCADE, to='dojo.test'),
),
migrations.CreateModel(
name='Engagement_Presets',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(default=None, help_text='Brief description of preset.', max_length=500)),
('notes', models.CharField(blank=True, help_text='Description of what needs to be tested or setting up environment for testing', max_length=2000, null=True)),
('scope', models.CharField(blank=True, default=None, help_text="Scope of Engagement testing, IP's/Resources/URL's)", max_length=800)),
('created', models.DateTimeField(auto_now_add=True)),
('network_locations', models.ManyToManyField(blank=True, default=None, to='dojo.Network_Locations')),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.product')),
('test_type', models.ManyToManyField(blank=True, default=None, to='dojo.Test_Type')),
],
options={
'ordering': ['title'],
},
),
migrations.AddField(
model_name='engagement',
name='build_server',
field=models.ForeignKey(blank=True, help_text='Build server responsible for CI/CD test', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='build_server', to='dojo.tool_configuration', verbose_name='Build Server'),
),
migrations.AddField(
model_name='engagement',
name='eng_type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.engagement_type'),
),
migrations.AddField(
model_name='engagement',
name='lead',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='engagement',
name='orchestration_engine',
field=models.ForeignKey(blank=True, help_text='Orchestration service responsible for CI/CD test', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='orchestration', to='dojo.tool_configuration', verbose_name='Orchestration Engine'),
),
migrations.AddField(
model_name='engagement',
name='preset',
field=models.ForeignKey(blank=True, help_text='Settings and notes for performing this engagement.', null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.engagement_presets'),
),
migrations.AddField(
model_name='engagement',
name='product',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.product'),
),
migrations.AddField(
model_name='engagement',
name='report_type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.report_type'),
),
migrations.AddField(
model_name='engagement',
name='requester',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.contact'),
),
migrations.AddField(
model_name='engagement',
name='risk_acceptance',
field=models.ManyToManyField(blank=True, default=None, editable=False, to='dojo.Risk_Acceptance'),
),
migrations.AddField(
model_name='engagement',
name='source_code_management_server',
field=models.ForeignKey(blank=True, help_text='Source code server for CI/CD test', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='source_code_management_server', to='dojo.tool_configuration', verbose_name='SCM Server'),
),
migrations.AddField(
model_name='endpoint',
name='endpoint_params',
field=models.ManyToManyField(blank=True, editable=False, to='dojo.Endpoint_Params'),
),
migrations.AddField(
model_name='endpoint',
name='product',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.product'),
),
migrations.CreateModel(
name='Cred_User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('username', models.CharField(max_length=200)),
('password', models.CharField(max_length=600)),
('role', models.CharField(max_length=200)),
('authentication', models.CharField(choices=[('Form', 'Form Authentication'), ('SSO', 'SSO Redirect')], default='Form', max_length=15)),
('http_authentication', models.CharField(blank=True, choices=[('Basic', 'Basic'), ('NTLM', 'NTLM')], max_length=15, null=True)),
('description', models.CharField(blank=True, max_length=2000, null=True)),
('url', models.URLField(max_length=2000)),
('login_regex', models.CharField(blank=True, max_length=200, null=True)),
('logout_regex', models.CharField(blank=True, max_length=200, null=True)),
('is_valid', models.BooleanField(default=True, verbose_name='Login is valid')),
('environment', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.development_environment')),
('notes', models.ManyToManyField(blank=True, editable=False, to='dojo.Notes')),
],
options={
'ordering': ['name'],
},
),
migrations.CreateModel(
name='Cred_Mapping',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_authn_provider', models.BooleanField(default=False, verbose_name='Authentication Provider')),
('url', models.URLField(blank=True, max_length=2000, null=True)),
('cred_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='cred_user', to='dojo.cred_user', verbose_name='Credential')),
('engagement', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='engagement', to='dojo.engagement')),
('finding', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='finding', to='dojo.finding')),
('product', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='product', to='dojo.product')),
('test', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='test', to='dojo.test')),
],
),
migrations.CreateModel(
name='Check_List',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('session_management', models.CharField(default='none', max_length=50)),
('encryption_crypto', models.CharField(default='none', max_length=50)),
('configuration_management', models.CharField(default='', max_length=50)),
('authentication', models.CharField(default='none', max_length=50)),
('authorization_and_access_control', models.CharField(default='none', max_length=50)),
('data_input_sanitization_validation', models.CharField(default='none', max_length=50)),
('sensitive_data', models.CharField(default='none', max_length=50)),
('other', models.CharField(default='none', max_length=50)),
('auth_issues', models.ManyToManyField(blank=True, related_name='auth_issues', to='dojo.Finding')),
('author_issues', models.ManyToManyField(blank=True, related_name='author_issues', to='dojo.Finding')),
('config_issues', models.ManyToManyField(blank=True, related_name='config_issues', to='dojo.Finding')),
('crypto_issues', models.ManyToManyField(blank=True, related_name='crypto_issues', to='dojo.Finding')),
('data_issues', models.ManyToManyField(blank=True, related_name='data_issues', to='dojo.Finding')),
('engagement', models.ForeignKey(editable=False, on_delete=django.db.models.deletion.CASCADE, related_name='eng_for_check', to='dojo.engagement')),
('other_issues', models.ManyToManyField(blank=True, related_name='other_issues', to='dojo.Finding')),
('sensitive_issues', models.ManyToManyField(blank=True, related_name='sensitive_issues', to='dojo.Finding')),
('session_issues', models.ManyToManyField(blank=True, related_name='session_issues', to='dojo.Finding')),
],
),
migrations.CreateModel(
name='BurpRawRequestResponse',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('burpRequestBase64', models.BinaryField()),
('burpResponseBase64', models.BinaryField()),
('finding', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.finding')),
],
),
migrations.CreateModel(
name='Benchmark_Requirement',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('objective_number', models.CharField(max_length=15, null=True)),
('objective', models.TextField()),
('references', models.TextField(blank=True, null=True)),
('level_1', models.BooleanField(default=False)),
('level_2', models.BooleanField(default=False)),
('level_3', models.BooleanField(default=False)),
('enabled', models.BooleanField(default=True)),
('created', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('updated', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.benchmark_category')),
('cwe_mapping', models.ManyToManyField(blank=True, to='dojo.CWE')),
('testing_guide', models.ManyToManyField(blank=True, to='dojo.Testing_Guide')),
],
),
migrations.AddField(
model_name='benchmark_category',
name='type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.benchmark_type', verbose_name='Benchmark Type'),
),
migrations.CreateModel(
name='App_Analysis',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('confidence', models.IntegerField(blank=True, null=True, verbose_name='Confidence level')),
('version', models.CharField(blank=True, max_length=200, null=True, verbose_name='Version Number')),
('icon', models.CharField(blank=True, max_length=200, null=True)),
('website', models.URLField(blank=True, max_length=400, null=True)),
('website_found', models.URLField(blank=True, max_length=400, null=True)),
('created', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.product')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Alerts',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(default='', max_length=100)),
('description', models.CharField(max_length=2000, null=True)),
('url', models.URLField(max_length=2000, null=True)),
('source', models.CharField(default='Generic', max_length=100)),
('icon', models.CharField(default='icon-user-check', max_length=25)),
('created', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('user_id', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-created'],
},
),
migrations.CreateModel(
name='Languages',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('files', models.IntegerField(blank=True, null=True, verbose_name='Number of files')),
('blank', models.IntegerField(blank=True, null=True, verbose_name='Number of blank lines')),
('comment', models.IntegerField(blank=True, null=True, verbose_name='Number of comment lines')),
('code', models.IntegerField(blank=True, null=True, verbose_name='Number of code lines')),
('created', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('language', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.language_type')),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.product')),
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'unique_together': {('language', 'product')},
},
),
migrations.CreateModel(
name='DojoMeta',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=120)),
('value', models.CharField(max_length=300)),
('endpoint', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='endpoint_meta', to='dojo.endpoint')),
('product', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='product_meta', to='dojo.product')),
],
options={
'unique_together': {('product', 'name'), ('endpoint', 'name')},
},
),
migrations.CreateModel(
name='Benchmark_Product_Summary',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('desired_level', models.CharField(choices=[('Level 1', 'Level 1'), ('Level 2', 'Level 2'), ('Level 3', 'Level 3')], default='Level 1', max_length=15)),
('current_level', models.CharField(blank=True, choices=[('Level 1', 'Level 1'), ('Level 2', 'Level 2'), ('Level 3', 'Level 3')], default='None', max_length=15, null=True)),
('asvs_level_1_benchmark', models.IntegerField(default=0, help_text='Total number of active benchmarks for this application.')),
('asvs_level_1_score', models.IntegerField(default=0, help_text='ASVS Level 1 Score')),
('asvs_level_2_benchmark', models.IntegerField(default=0, help_text='Total number of active benchmarks for this application.')),
('asvs_level_2_score', models.IntegerField(default=0, help_text='ASVS Level 2 Score')),
('asvs_level_3_benchmark', models.IntegerField(default=0, help_text='Total number of active benchmarks for this application.')),
('asvs_level_3_score', models.IntegerField(default=0, help_text='ASVS Level 3 Score')),
('publish', models.BooleanField(default=False, help_text='Publish score to Product.')),
('created', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('updated', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('benchmark_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.benchmark_type')),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.product')),
],
options={
'unique_together': {('product', 'benchmark_type')},
},
),
migrations.CreateModel(
name='Benchmark_Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('pass_fail', models.BooleanField(default=False, help_text='Does the product meet the requirement?', verbose_name='Pass')),
('enabled', models.BooleanField(default=True, help_text='Applicable for this specific product.')),
('created', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('updated', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('control', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.benchmark_requirement')),
('notes', models.ManyToManyField(blank=True, editable=False, to='dojo.Notes')),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.product')),
],
options={
'unique_together': {('product', 'control')},
},
),
migrations.AlterField(
model_name='finding',
name='hash_code',
field=models.TextField(blank=True, editable=False, null=True),
),
migrations.AddField(
model_name='test',
name='title',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='engagement',
name='source_code_management_uri',
field=models.URLField(blank=True, help_text=b'Resource link to source code', max_length=600, null=True, verbose_name=b'Repo'),
),
migrations.AlterField(
model_name='endpoint',
name='fragment',
field=models.CharField(blank=True, help_text="The fragment identifier which follows the hash mark. The hash mark should be omitted. For example 'section-13', 'paragraph-2'.", max_length=500, null=True),
),
migrations.AlterField(
model_name='endpoint',
name='host',
field=models.CharField(blank=True, help_text="The host name or IP address, you can also include the port number. For example'127.0.0.1', '127.0.0.1:8080', 'localhost', 'yourdomain.com'.", max_length=500, null=True),
),
migrations.AlterField(
model_name='endpoint',
name='path',
field=models.CharField(blank=True, help_text="The location of the resource, it should start with a '/'. For example/endpoint/420/edit", max_length=500, null=True),
),
migrations.AlterField(
model_name='endpoint',
name='protocol',
field=models.CharField(blank=True, help_text="The communication protocol such as 'http', 'ftp', etc.", max_length=10, null=True),
),
migrations.AlterField(
model_name='endpoint',
name='query',
field=models.CharField(blank=True, help_text="The query string, the question mark should be omitted.For example 'group=4&team=8'", max_length=1000, null=True),
),
migrations.AlterField(
model_name='engagement',
name='source_code_management_uri',
field=models.URLField(blank=True, help_text='Resource link to source code', max_length=600, null=True, verbose_name='Repo'),
),
migrations.AddField(
model_name='finding',
name='cve',
field=models.TextField(max_length=20, null=True, validators=[django.core.validators.RegexValidator(message="CVE must be entered in the format: 'CVE-9999-9999'. ", regex='^CVE-\\d{4}-\\d{4,7}$')]),
),
migrations.CreateModel(
name='JIRA_PKey',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('project_key', models.CharField(blank=True, max_length=200)),
('component', models.CharField(blank=True, max_length=200)),
('push_all_issues', models.BooleanField(blank=True, default=False)),
('enable_engagement_epic_mapping', models.BooleanField(blank=True, default=False)),
('push_notes', models.BooleanField(blank=True, default=False)),
('conf', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.jira_conf', verbose_name='JIRA Configuration')),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.product')),
],
),
migrations.AlterField(
model_name='product',
name='platform',
field=models.CharField(blank=True, choices=[('web service', 'API'), ('desktop', 'Desktop'), ('iot', 'Internet of Things'), ('mobile', 'Mobile'), ('web', 'Web')], max_length=11, null=True),
),
migrations.AlterField(
model_name='product',
name='revenue',
field=models.DecimalField(blank=True, decimal_places=2, help_text="Estimate the application's revenue.", max_digits=15, null=True),
),
migrations.CreateModel(
name='Stub_Finding',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.TextField(max_length=1000)),
('date', models.DateField(default=dojo.models.get_current_date)),
('severity', models.CharField(blank=True, max_length=200, null=True)),
('description', models.TextField(blank=True, null=True)),
('reporter', models.ForeignKey(default=1, editable=False, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('test', models.ForeignKey(editable=False, on_delete=django.db.models.deletion.CASCADE, to='dojo.test')),
],
options={
'ordering': ('-date', 'title'),
},
),
migrations.CreateModel(
name='UserContactInfo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(blank=True, max_length=150, null=True)),
('phone_number', models.CharField(blank=True, help_text="Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.", max_length=15, validators=[django.core.validators.RegexValidator(message="Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.", regex='^\\+?1?\\d{9,15}$')])),
('cell_number', models.CharField(blank=True, help_text="Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.", max_length=15, validators=[django.core.validators.RegexValidator(message="Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.", regex='^\\+?1?\\d{9,15}$')])),
('twitter_username', models.CharField(blank=True, max_length=150, null=True)),
('github_username', models.CharField(blank=True, max_length=150, null=True)),
('slack_username', models.CharField(blank=True, help_text='Email address associated with your slack account', max_length=150, null=True, verbose_name='Slack Email Address')),
('slack_user_id', models.CharField(blank=True, max_length=25, null=True)),
('hipchat_username', models.CharField(blank=True, max_length=150, null=True)),
('block_execution', models.BooleanField(default=False, help_text="Instead of async deduping a finding the findings will be deduped synchronously and will 'block' the user until completion.")),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='NoteHistory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('data', models.TextField()),
('time', models.DateTimeField(default=dojo.models.get_current_datetime, editable=False, null=True)),
('current_editor', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='notes',
name='history',
field=models.ManyToManyField(blank=True, editable=False, to='dojo.NoteHistory'),
),
migrations.AddField(
model_name='finding',
name='is_Mitigated',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='endpoint',
name='remediated',
field=models.BooleanField(blank=True, default=False),
),
migrations.AlterField(
model_name='finding',
name='dynamic_finding',
field=models.BooleanField(default=True),
),
migrations.CreateModel(
name='Finding_Template',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.TextField(max_length=1000)),
('cwe', models.IntegerField(blank=True, default=None, null=True)),
('severity', models.CharField(blank=True, max_length=200, null=True)),
('description', models.TextField(blank=True, null=True)),
('mitigation', models.TextField(blank=True, null=True)),
('impact', models.TextField(blank=True, null=True)),
('references', models.TextField(blank=True, db_column='refs', null=True)),
('numerical_severity', models.CharField(blank=True, editable=False, max_length=4, null=True)),
('template_match', models.BooleanField(default=False, help_text='Enables this template for matching remediation advice. Match will be applied to all active, verified findings by CWE.', verbose_name='Template Match Enabled')),
('template_match_title', models.BooleanField(default=False, help_text='Matches by title text (contains search) and CWE.', verbose_name='Match Template by Title and CWE')),
('cve', models.TextField(max_length=20, null=True, validators=[django.core.validators.RegexValidator(message="CVE must be entered in the format: 'CVE-9999-9999'. ", regex='^CVE-\\d{4}-\\d{4,7}$')])),
('last_used', models.DateTimeField(editable=False, null=True)),
],
options={
'ordering': ['-cwe'],
},
),
migrations.AddField(
model_name='finding',
name='jira_change',
field=models.DateTimeField(null=True),
),
migrations.AddField(
model_name='finding',
name='jira_creation',
field=models.DateTimeField(null=True),
),
migrations.CreateModel(
name='System_Settings',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('enable_deduplication', models.BooleanField(default=False, help_text='With this setting turned on, Dojo deduplicates findings by comparing endpoints, cwe fields, and titles. If two findings share a URL and have the same CWE or title, Dojo marks the less recent finding as a duplicate. When deduplication is enabled, a list of deduplicated findings is added to the engagement view.', verbose_name='Deduplicate findings')),
('delete_dupulicates', models.BooleanField(default=False)),
('max_dupes', models.IntegerField(blank=True, help_text='When enabled, if a single issue reaches the maximum number of duplicates, the oldest will be deleted.', null=True, verbose_name='Max Duplicates')),
('enable_jira', models.BooleanField(default=False, verbose_name='Enable JIRA integration')),
('enable_jira_web_hook', models.BooleanField(default=False, verbose_name='Enable JIRA web hook. Please note: It is strongly recommended to whitelist the Jira server using a proxy such as Nginx.')),
('jira_minimum_severity', models.CharField(blank=True, choices=[('Critical', 'Critical'), ('High', 'High'), ('Medium', 'Medium'), ('Low', 'Low'), ('Info', 'Info')], default='None', max_length=20, null=True)),
('jira_labels', models.CharField(blank=True, help_text='JIRA issue labels space seperated', max_length=200, null=True)),
('enable_slack_notifications', models.BooleanField(default=False, verbose_name='Enable Slack notifications')),
('slack_channel', models.CharField(blank=True, default='', max_length=100)),
('slack_token', models.CharField(blank=True, default='', help_text='Token required for interacting with Slack. Get one at https://api.slack.com/tokens', max_length=100)),
('slack_username', models.CharField(blank=True, default='', max_length=100)),
('enable_hipchat_notifications', models.BooleanField(default=False, verbose_name='Enable HipChat notifications')),
('hipchat_site', models.CharField(blank=True, default='', help_text='The full fqdn of your hipchat site, e.g. "yoursite.hipchat.com"', max_length=100)),
('hipchat_channel', models.CharField(blank=True, default='', max_length=100)),
('hipchat_token', models.CharField(blank=True, default='', help_text='Token required for interacting with HipChat. Get one at https://patriktest.hipchat.com/addons/', max_length=100)),
('enable_mail_notifications', models.BooleanField(default=False)),
('mail_notifications_from', models.CharField(blank=True, default='from@example.com', max_length=200)),
('mail_notifications_to', models.CharField(blank=True, default='', max_length=200)),
('s_finding_severity_naming', models.BooleanField(default=False, help_text='With this setting turned on, Dojo will display S0, S1, S2, etc in most places, whereas if turned off Critical, High, Medium, etc will be displayed.')),
('false_positive_history', models.BooleanField(default=False, help_text='DefectDojo will automatically mark the finding as a false positive if the finding has been previously marked as a false positive.')),
('url_prefix', models.CharField(blank=True, default='', help_text="URL prefix if DefectDojo is installed in it's own virtual subdirectory.", max_length=300)),
('team_name', models.CharField(blank=True, default='', max_length=100)),
('time_zone', models.CharField(choices=[('Africa/Abidjan', 'Africa/Abidjan'), ('Africa/Accra', 'Africa/Accra'), ('Africa/Addis_Ababa', 'Africa/Addis_Ababa'), ('Africa/Algiers', 'Africa/Algiers'), ('Africa/Asmara', 'Africa/Asmara'), ('Africa/Asmera', 'Africa/Asmera'), ('Africa/Bamako', 'Africa/Bamako'), ('Africa/Bangui', 'Africa/Bangui'), ('Africa/Banjul', 'Africa/Banjul'), ('Africa/Bissau', 'Africa/Bissau'), ('Africa/Blantyre', 'Africa/Blantyre'), ('Africa/Brazzaville', 'Africa/Brazzaville'), ('Africa/Bujumbura', 'Africa/Bujumbura'), ('Africa/Cairo', 'Africa/Cairo'), ('Africa/Casablanca', 'Africa/Casablanca'), ('Africa/Ceuta', 'Africa/Ceuta'), ('Africa/Conakry', 'Africa/Conakry'), ('Africa/Dakar', 'Africa/Dakar'), ('Africa/Dar_es_Salaam', 'Africa/Dar_es_Salaam'), ('Africa/Djibouti', 'Africa/Djibouti'), ('Africa/Douala', 'Africa/Douala'), ('Africa/El_Aaiun', 'Africa/El_Aaiun'), ('Africa/Freetown', 'Africa/Freetown'), ('Africa/Gaborone', 'Africa/Gaborone'), ('Africa/Harare', 'Africa/Harare'), ('Africa/Johannesburg', 'Africa/Johannesburg'), ('Africa/Juba', 'Africa/Juba'), ('Africa/Kampala', 'Africa/Kampala'), ('Africa/Khartoum', 'Africa/Khartoum'), ('Africa/Kigali', 'Africa/Kigali'), ('Africa/Kinshasa', 'Africa/Kinshasa'), ('Africa/Lagos', 'Africa/Lagos'), ('Africa/Libreville', 'Africa/Libreville'), ('Africa/Lome', 'Africa/Lome'), ('Africa/Luanda', 'Africa/Luanda'), ('Africa/Lubumbashi', 'Africa/Lubumbashi'), ('Africa/Lusaka', 'Africa/Lusaka'), ('Africa/Malabo', 'Africa/Malabo'), ('Africa/Maputo', 'Africa/Maputo'), ('Africa/Maseru', 'Africa/Maseru'), ('Africa/Mbabane', 'Africa/Mbabane'), ('Africa/Mogadishu', 'Africa/Mogadishu'), ('Africa/Monrovia', 'Africa/Monrovia'), ('Africa/Nairobi', 'Africa/Nairobi'), ('Africa/Ndjamena', 'Africa/Ndjamena'), ('Africa/Niamey', 'Africa/Niamey'), ('Africa/Nouakchott', 'Africa/Nouakchott'), ('Africa/Ouagadougou', 'Africa/Ouagadougou'), ('Africa/Porto-Novo', 'Africa/Porto-Novo'), ('Africa/Sao_Tome', 'Africa/Sao_Tome'), ('Africa/Timbuktu', 'Africa/Timbuktu'), ('Africa/Tripoli', 'Africa/Tripoli'), ('Africa/Tunis', 'Africa/Tunis'), ('Africa/Windhoek', 'Africa/Windhoek'), ('America/Adak', 'America/Adak'), ('America/Anchorage', 'America/Anchorage'), ('America/Anguilla', 'America/Anguilla'), ('America/Antigua', 'America/Antigua'), ('America/Araguaina', 'America/Araguaina'), ('America/Argentina/Buenos_Aires', 'America/Argentina/Buenos_Aires'), ('America/Argentina/Catamarca', 'America/Argentina/Catamarca'), ('America/Argentina/ComodRivadavia', 'America/Argentina/ComodRivadavia'), ('America/Argentina/Cordoba', 'America/Argentina/Cordoba'), ('America/Argentina/Jujuy', 'America/Argentina/Jujuy'), ('America/Argentina/La_Rioja', 'America/Argentina/La_Rioja'), ('America/Argentina/Mendoza', 'America/Argentina/Mendoza'), ('America/Argentina/Rio_Gallegos', 'America/Argentina/Rio_Gallegos'), ('America/Argentina/Salta', 'America/Argentina/Salta'), ('America/Argentina/San_Juan', 'America/Argentina/San_Juan'), ('America/Argentina/San_Luis', 'America/Argentina/San_Luis'), ('America/Argentina/Tucuman', 'America/Argentina/Tucuman'), ('America/Argentina/Ushuaia', 'America/Argentina/Ushuaia'), ('America/Aruba', 'America/Aruba'), ('America/Asuncion', 'America/Asuncion'), ('America/Atikokan', 'America/Atikokan'), ('America/Atka', 'America/Atka'), ('America/Bahia', 'America/Bahia'), ('America/Bahia_Banderas', 'America/Bahia_Banderas'), ('America/Barbados', 'America/Barbados'), ('America/Belem', 'America/Belem'), ('America/Belize', 'America/Belize'), ('America/Blanc-Sablon', 'America/Blanc-Sablon'), ('America/Boa_Vista', 'America/Boa_Vista'), ('America/Bogota', 'America/Bogota'), ('America/Boise', 'America/Boise'), ('America/Buenos_Aires', 'America/Buenos_Aires'), ('America/Cambridge_Bay', 'America/Cambridge_Bay'), ('America/Campo_Grande', 'America/Campo_Grande'), ('America/Cancun', 'America/Cancun'), ('America/Caracas', 'America/Caracas'), ('America/Catamarca', 'America/Catamarca'), ('America/Cayenne', 'America/Cayenne'), ('America/Cayman', 'America/Cayman'), ('America/Chicago', 'America/Chicago'), ('America/Chihuahua', 'America/Chihuahua'), ('America/Coral_Harbour', 'America/Coral_Harbour'), ('America/Cordoba', 'America/Cordoba'), ('America/Costa_Rica', 'America/Costa_Rica'), ('America/Creston', 'America/Creston'), ('America/Cuiaba', 'America/Cuiaba'), ('America/Curacao', 'America/Curacao'), ('America/Danmarkshavn', 'America/Danmarkshavn'), ('America/Dawson', 'America/Dawson'), ('America/Dawson_Creek', 'America/Dawson_Creek'), ('America/Denver', 'America/Denver'), ('America/Detroit', 'America/Detroit'), ('America/Dominica', 'America/Dominica'), ('America/Edmonton', 'America/Edmonton'), ('America/Eirunepe', 'America/Eirunepe'), ('America/El_Salvador', 'America/El_Salvador'), ('America/Ensenada', 'America/Ensenada'), ('America/Fort_Nelson', 'America/Fort_Nelson'), ('America/Fort_Wayne', 'America/Fort_Wayne'), ('America/Fortaleza', 'America/Fortaleza'), ('America/Glace_Bay', 'America/Glace_Bay'), ('America/Godthab', 'America/Godthab'), ('America/Goose_Bay', 'America/Goose_Bay'), ('America/Grand_Turk', 'America/Grand_Turk'), ('America/Grenada', 'America/Grenada'), ('America/Guadeloupe', 'America/Guadeloupe'), ('America/Guatemala', 'America/Guatemala'), ('America/Guayaquil', 'America/Guayaquil'), ('America/Guyana', 'America/Guyana'), ('America/Halifax', 'America/Halifax'), ('America/Havana', 'America/Havana'), ('America/Hermosillo', 'America/Hermosillo'), ('America/Indiana/Indianapolis', 'America/Indiana/Indianapolis'), ('America/Indiana/Knox', 'America/Indiana/Knox'), ('America/Indiana/Marengo', 'America/Indiana/Marengo'), ('America/Indiana/Petersburg', 'America/Indiana/Petersburg'), ('America/Indiana/Tell_City', 'America/Indiana/Tell_City'), ('America/Indiana/Vevay', 'America/Indiana/Vevay'), ('America/Indiana/Vincennes', 'America/Indiana/Vincennes'), ('America/Indiana/Winamac', 'America/Indiana/Winamac'), ('America/Indianapolis', 'America/Indianapolis'), ('America/Inuvik', 'America/Inuvik'), ('America/Iqaluit', 'America/Iqaluit'), ('America/Jamaica', 'America/Jamaica'), ('America/Jujuy', 'America/Jujuy'), ('America/Juneau', 'America/Juneau'), ('America/Kentucky/Louisville', 'America/Kentucky/Louisville'), ('America/Kentucky/Monticello', 'America/Kentucky/Monticello'), ('America/Knox_IN', 'America/Knox_IN'), ('America/Kralendijk', 'America/Kralendijk'), ('America/La_Paz', 'America/La_Paz'), ('America/Lima', 'America/Lima'), ('America/Los_Angeles', 'America/Los_Angeles'), ('America/Louisville', 'America/Louisville'), ('America/Lower_Princes', 'America/Lower_Princes'), ('America/Maceio', 'America/Maceio'), ('America/Managua', 'America/Managua'), ('America/Manaus', 'America/Manaus'), ('America/Marigot', 'America/Marigot'), ('America/Martinique', 'America/Martinique'), ('America/Matamoros', 'America/Matamoros'), ('America/Mazatlan', 'America/Mazatlan'), ('America/Mendoza', 'America/Mendoza'), ('America/Menominee', 'America/Menominee'), ('America/Merida', 'America/Merida'), ('America/Metlakatla', 'America/Metlakatla'), ('America/Mexico_City', 'America/Mexico_City'), ('America/Miquelon', 'America/Miquelon'), ('America/Moncton', 'America/Moncton'), ('America/Monterrey', 'America/Monterrey'), ('America/Montevideo', 'America/Montevideo'), ('America/Montreal', 'America/Montreal'), ('America/Montserrat', 'America/Montserrat'), ('America/Nassau', 'America/Nassau'), ('America/New_York', 'America/New_York'), ('America/Nipigon', 'America/Nipigon'), ('America/Nome', 'America/Nome'), ('America/Noronha', 'America/Noronha'), ('America/North_Dakota/Beulah', 'America/North_Dakota/Beulah'), ('America/North_Dakota/Center', 'America/North_Dakota/Center'), ('America/North_Dakota/New_Salem', 'America/North_Dakota/New_Salem'), ('America/Ojinaga', 'America/Ojinaga'), ('America/Panama', 'America/Panama'), ('America/Pangnirtung', 'America/Pangnirtung'), ('America/Paramaribo', 'America/Paramaribo'), ('America/Phoenix', 'America/Phoenix'), ('America/Port-au-Prince', 'America/Port-au-Prince'), ('America/Port_of_Spain', 'America/Port_of_Spain'), ('America/Porto_Acre', 'America/Porto_Acre'), ('America/Porto_Velho', 'America/Porto_Velho'), ('America/Puerto_Rico', 'America/Puerto_Rico'), ('America/Punta_Arenas', 'America/Punta_Arenas'), ('America/Rainy_River', 'America/Rainy_River'), ('America/Rankin_Inlet', 'America/Rankin_Inlet'), ('America/Recife', 'America/Recife'), ('America/Regina', 'America/Regina'), ('America/Resolute', 'America/Resolute'), ('America/Rio_Branco', 'America/Rio_Branco'), ('America/Rosario', 'America/Rosario'), ('America/Santa_Isabel', 'America/Santa_Isabel'), ('America/Santarem', 'America/Santarem'), ('America/Santiago', 'America/Santiago'), ('America/Santo_Domingo', 'America/Santo_Domingo'), ('America/Sao_Paulo', 'America/Sao_Paulo'), ('America/Scoresbysund', 'America/Scoresbysund'), ('America/Shiprock', 'America/Shiprock'), ('America/Sitka', 'America/Sitka'), ('America/St_Barthelemy', 'America/St_Barthelemy'), ('America/St_Johns', 'America/St_Johns'), ('America/St_Kitts', 'America/St_Kitts'), ('America/St_Lucia', 'America/St_Lucia'), ('America/St_Thomas', 'America/St_Thomas'), ('America/St_Vincent', 'America/St_Vincent'), ('America/Swift_Current', 'America/Swift_Current'), ('America/Tegucigalpa', 'America/Tegucigalpa'), ('America/Thule', 'America/Thule'), ('America/Thunder_Bay', 'America/Thunder_Bay'), ('America/Tijuana', 'America/Tijuana'), ('America/Toronto', 'America/Toronto'), ('America/Tortola', 'America/Tortola'), ('America/Vancouver', 'America/Vancouver'), ('America/Virgin', 'America/Virgin'), ('America/Whitehorse', 'America/Whitehorse'), ('America/Winnipeg', 'America/Winnipeg'), ('America/Yakutat', 'America/Yakutat'), ('America/Yellowknife', 'America/Yellowknife'), ('Antarctica/Casey', 'Antarctica/Casey'), ('Antarctica/Davis', 'Antarctica/Davis'), ('Antarctica/DumontDUrville', 'Antarctica/DumontDUrville'), ('Antarctica/Macquarie', 'Antarctica/Macquarie'), ('Antarctica/Mawson', 'Antarctica/Mawson'), ('Antarctica/McMurdo', 'Antarctica/McMurdo'), ('Antarctica/Palmer', 'Antarctica/Palmer'), ('Antarctica/Rothera', 'Antarctica/Rothera'), ('Antarctica/South_Pole', 'Antarctica/South_Pole'), ('Antarctica/Syowa', 'Antarctica/Syowa'), ('Antarctica/Troll', 'Antarctica/Troll'), ('Antarctica/Vostok', 'Antarctica/Vostok'), ('Arctic/Longyearbyen', 'Arctic/Longyearbyen'), ('Asia/Aden', 'Asia/Aden'), ('Asia/Almaty', 'Asia/Almaty'), ('Asia/Amman', 'Asia/Amman'), ('Asia/Anadyr', 'Asia/Anadyr'), ('Asia/Aqtau', 'Asia/Aqtau'), ('Asia/Aqtobe', 'Asia/Aqtobe'), ('Asia/Ashgabat', 'Asia/Ashgabat'), ('Asia/Ashkhabad', 'Asia/Ashkhabad'), ('Asia/Atyrau', 'Asia/Atyrau'), ('Asia/Baghdad', 'Asia/Baghdad'), ('Asia/Bahrain', 'Asia/Bahrain'), ('Asia/Baku', 'Asia/Baku'), ('Asia/Bangkok', 'Asia/Bangkok'), ('Asia/Barnaul', 'Asia/Barnaul'), ('Asia/Beirut', 'Asia/Beirut'), ('Asia/Bishkek', 'Asia/Bishkek'), ('Asia/Brunei', 'Asia/Brunei'), ('Asia/Calcutta', 'Asia/Calcutta'), ('Asia/Chita', 'Asia/Chita'), ('Asia/Choibalsan', 'Asia/Choibalsan'), ('Asia/Chongqing', 'Asia/Chongqing'), ('Asia/Chungking', 'Asia/Chungking'), ('Asia/Colombo', 'Asia/Colombo'), ('Asia/Dacca', 'Asia/Dacca'), ('Asia/Damascus', 'Asia/Damascus'), ('Asia/Dhaka', 'Asia/Dhaka'), ('Asia/Dili', 'Asia/Dili'), ('Asia/Dubai', 'Asia/Dubai'), ('Asia/Dushanbe', 'Asia/Dushanbe'), ('Asia/Famagusta', 'Asia/Famagusta'), ('Asia/Gaza', 'Asia/Gaza'), ('Asia/Harbin', 'Asia/Harbin'), ('Asia/Hebron', 'Asia/Hebron'), ('Asia/Ho_Chi_Minh', 'Asia/Ho_Chi_Minh'), ('Asia/Hong_Kong', 'Asia/Hong_Kong'), ('Asia/Hovd', 'Asia/Hovd'), ('Asia/Irkutsk', 'Asia/Irkutsk'), ('Asia/Istanbul', 'Asia/Istanbul'), ('Asia/Jakarta', 'Asia/Jakarta'), ('Asia/Jayapura', 'Asia/Jayapura'), ('Asia/Jerusalem', 'Asia/Jerusalem'), ('Asia/Kabul', 'Asia/Kabul'), ('Asia/Kamchatka', 'Asia/Kamchatka'), ('Asia/Karachi', 'Asia/Karachi'), ('Asia/Kashgar', 'Asia/Kashgar'), ('Asia/Kathmandu', 'Asia/Kathmandu'), ('Asia/Katmandu', 'Asia/Katmandu'), ('Asia/Khandyga', 'Asia/Khandyga'), ('Asia/Kolkata', 'Asia/Kolkata'), ('Asia/Krasnoyarsk', 'Asia/Krasnoyarsk'), ('Asia/Kuala_Lumpur', 'Asia/Kuala_Lumpur'), ('Asia/Kuching', 'Asia/Kuching'), ('Asia/Kuwait', 'Asia/Kuwait'), ('Asia/Macao', 'Asia/Macao'), ('Asia/Macau', 'Asia/Macau'), ('Asia/Magadan', 'Asia/Magadan'), ('Asia/Makassar', 'Asia/Makassar'), ('Asia/Manila', 'Asia/Manila'), ('Asia/Muscat', 'Asia/Muscat'), ('Asia/Nicosia', 'Asia/Nicosia'), ('Asia/Novokuznetsk', 'Asia/Novokuznetsk'), ('Asia/Novosibirsk', 'Asia/Novosibirsk'), ('Asia/Omsk', 'Asia/Omsk'), ('Asia/Oral', 'Asia/Oral'), ('Asia/Phnom_Penh', 'Asia/Phnom_Penh'), ('Asia/Pontianak', 'Asia/Pontianak'), ('Asia/Pyongyang', 'Asia/Pyongyang'), ('Asia/Qatar', 'Asia/Qatar'), ('Asia/Qostanay', 'Asia/Qostanay'), ('Asia/Qyzylorda', 'Asia/Qyzylorda'), ('Asia/Rangoon', 'Asia/Rangoon'), ('Asia/Riyadh', 'Asia/Riyadh'), ('Asia/Saigon', 'Asia/Saigon'), ('Asia/Sakhalin', 'Asia/Sakhalin'), ('Asia/Samarkand', 'Asia/Samarkand'), ('Asia/Seoul', 'Asia/Seoul'), ('Asia/Shanghai', 'Asia/Shanghai'), ('Asia/Singapore', 'Asia/Singapore'), ('Asia/Srednekolymsk', 'Asia/Srednekolymsk'), ('Asia/Taipei', 'Asia/Taipei'), ('Asia/Tashkent', 'Asia/Tashkent'), ('Asia/Tbilisi', 'Asia/Tbilisi'), ('Asia/Tehran', 'Asia/Tehran'), ('Asia/Tel_Aviv', 'Asia/Tel_Aviv'), ('Asia/Thimbu', 'Asia/Thimbu'), ('Asia/Thimphu', 'Asia/Thimphu'), ('Asia/Tokyo', 'Asia/Tokyo'), ('Asia/Tomsk', 'Asia/Tomsk'), ('Asia/Ujung_Pandang', 'Asia/Ujung_Pandang'), ('Asia/Ulaanbaatar', 'Asia/Ulaanbaatar'), ('Asia/Ulan_Bator', 'Asia/Ulan_Bator'), ('Asia/Urumqi', 'Asia/Urumqi'), ('Asia/Ust-Nera', 'Asia/Ust-Nera'), ('Asia/Vientiane', 'Asia/Vientiane'), ('Asia/Vladivostok', 'Asia/Vladivostok'), ('Asia/Yakutsk', 'Asia/Yakutsk'), ('Asia/Yangon', 'Asia/Yangon'), ('Asia/Yekaterinburg', 'Asia/Yekaterinburg'), ('Asia/Yerevan', 'Asia/Yerevan'), ('Atlantic/Azores', 'Atlantic/Azores'), ('Atlantic/Bermuda', 'Atlantic/Bermuda'), ('Atlantic/Canary', 'Atlantic/Canary'), ('Atlantic/Cape_Verde', 'Atlantic/Cape_Verde'), ('Atlantic/Faeroe', 'Atlantic/Faeroe'), ('Atlantic/Faroe', 'Atlantic/Faroe'), ('Atlantic/Jan_Mayen', 'Atlantic/Jan_Mayen'), ('Atlantic/Madeira', 'Atlantic/Madeira'), ('Atlantic/Reykjavik', 'Atlantic/Reykjavik'), ('Atlantic/South_Georgia', 'Atlantic/South_Georgia'), ('Atlantic/St_Helena', 'Atlantic/St_Helena'), ('Atlantic/Stanley', 'Atlantic/Stanley'), ('Australia/ACT', 'Australia/ACT'), ('Australia/Adelaide', 'Australia/Adelaide'), ('Australia/Brisbane', 'Australia/Brisbane'), ('Australia/Broken_Hill', 'Australia/Broken_Hill'), ('Australia/Canberra', 'Australia/Canberra'), ('Australia/Currie', 'Australia/Currie'), ('Australia/Darwin', 'Australia/Darwin'), ('Australia/Eucla', 'Australia/Eucla'), ('Australia/Hobart', 'Australia/Hobart'), ('Australia/LHI', 'Australia/LHI'), ('Australia/Lindeman', 'Australia/Lindeman'), ('Australia/Lord_Howe', 'Australia/Lord_Howe'), ('Australia/Melbourne', 'Australia/Melbourne'), ('Australia/NSW', 'Australia/NSW'), ('Australia/North', 'Australia/North'), ('Australia/Perth', 'Australia/Perth'), ('Australia/Queensland', 'Australia/Queensland'), ('Australia/South', 'Australia/South'), ('Australia/Sydney', 'Australia/Sydney'), ('Australia/Tasmania', 'Australia/Tasmania'), ('Australia/Victoria', 'Australia/Victoria'), ('Australia/West', 'Australia/West'), ('Australia/Yancowinna', 'Australia/Yancowinna'), ('Brazil/Acre', 'Brazil/Acre'), ('Brazil/DeNoronha', 'Brazil/DeNoronha'), ('Brazil/East', 'Brazil/East'), ('Brazil/West', 'Brazil/West'), ('CET', 'CET'), ('CST6CDT', 'CST6CDT'), ('Canada/Atlantic', 'Canada/Atlantic'), ('Canada/Central', 'Canada/Central'), ('Canada/Eastern', 'Canada/Eastern'), ('Canada/Mountain', 'Canada/Mountain'), ('Canada/Newfoundland', 'Canada/Newfoundland'), ('Canada/Pacific', 'Canada/Pacific'), ('Canada/Saskatchewan', 'Canada/Saskatchewan'), ('Canada/Yukon', 'Canada/Yukon'), ('Chile/Continental', 'Chile/Continental'), ('Chile/EasterIsland', 'Chile/EasterIsland'), ('Cuba', 'Cuba'), ('EET', 'EET'), ('EST', 'EST'), ('EST5EDT', 'EST5EDT'), ('Egypt', 'Egypt'), ('Eire', 'Eire'), ('Etc/GMT', 'Etc/GMT'), ('Etc/GMT+0', 'Etc/GMT+0'), ('Etc/GMT+1', 'Etc/GMT+1'), ('Etc/GMT+10', 'Etc/GMT+10'), ('Etc/GMT+11', 'Etc/GMT+11'), ('Etc/GMT+12', 'Etc/GMT+12'), ('Etc/GMT+2', 'Etc/GMT+2'), ('Etc/GMT+3', 'Etc/GMT+3'), ('Etc/GMT+4', 'Etc/GMT+4'), ('Etc/GMT+5', 'Etc/GMT+5'), ('Etc/GMT+6', 'Etc/GMT+6'), ('Etc/GMT+7', 'Etc/GMT+7'), ('Etc/GMT+8', 'Etc/GMT+8'), ('Etc/GMT+9', 'Etc/GMT+9'), ('Etc/GMT-0', 'Etc/GMT-0'), ('Etc/GMT-1', 'Etc/GMT-1'), ('Etc/GMT-10', 'Etc/GMT-10'), ('Etc/GMT-11', 'Etc/GMT-11'), ('Etc/GMT-12', 'Etc/GMT-12'), ('Etc/GMT-13', 'Etc/GMT-13'), ('Etc/GMT-14', 'Etc/GMT-14'), ('Etc/GMT-2', 'Etc/GMT-2'), ('Etc/GMT-3', 'Etc/GMT-3'), ('Etc/GMT-4', 'Etc/GMT-4'), ('Etc/GMT-5', 'Etc/GMT-5'), ('Etc/GMT-6', 'Etc/GMT-6'), ('Etc/GMT-7', 'Etc/GMT-7'), ('Etc/GMT-8', 'Etc/GMT-8'), ('Etc/GMT-9', 'Etc/GMT-9'), ('Etc/GMT0', 'Etc/GMT0'), ('Etc/Greenwich', 'Etc/Greenwich'), ('Etc/UCT', 'Etc/UCT'), ('Etc/UTC', 'Etc/UTC'), ('Etc/Universal', 'Etc/Universal'), ('Etc/Zulu', 'Etc/Zulu'), ('Europe/Amsterdam', 'Europe/Amsterdam'), ('Europe/Andorra', 'Europe/Andorra'), ('Europe/Astrakhan', 'Europe/Astrakhan'), ('Europe/Athens', 'Europe/Athens'), ('Europe/Belfast', 'Europe/Belfast'), ('Europe/Belgrade', 'Europe/Belgrade'), ('Europe/Berlin', 'Europe/Berlin'), ('Europe/Bratislava', 'Europe/Bratislava'), ('Europe/Brussels', 'Europe/Brussels'), ('Europe/Bucharest', 'Europe/Bucharest'), ('Europe/Budapest', 'Europe/Budapest'), ('Europe/Busingen', 'Europe/Busingen'), ('Europe/Chisinau', 'Europe/Chisinau'), ('Europe/Copenhagen', 'Europe/Copenhagen'), ('Europe/Dublin', 'Europe/Dublin'), ('Europe/Gibraltar', 'Europe/Gibraltar'), ('Europe/Guernsey', 'Europe/Guernsey'), ('Europe/Helsinki', 'Europe/Helsinki'), ('Europe/Isle_of_Man', 'Europe/Isle_of_Man'), ('Europe/Istanbul', 'Europe/Istanbul'), ('Europe/Jersey', 'Europe/Jersey'), ('Europe/Kaliningrad', 'Europe/Kaliningrad'), ('Europe/Kiev', 'Europe/Kiev'), ('Europe/Kirov', 'Europe/Kirov'), ('Europe/Lisbon', 'Europe/Lisbon'), ('Europe/Ljubljana', 'Europe/Ljubljana'), ('Europe/London', 'Europe/London'), ('Europe/Luxembourg', 'Europe/Luxembourg'), ('Europe/Madrid', 'Europe/Madrid'), ('Europe/Malta', 'Europe/Malta'), ('Europe/Mariehamn', 'Europe/Mariehamn'), ('Europe/Minsk', 'Europe/Minsk'), ('Europe/Monaco', 'Europe/Monaco'), ('Europe/Moscow', 'Europe/Moscow'), ('Europe/Nicosia', 'Europe/Nicosia'), ('Europe/Oslo', 'Europe/Oslo'), ('Europe/Paris', 'Europe/Paris'), ('Europe/Podgorica', 'Europe/Podgorica'), ('Europe/Prague', 'Europe/Prague'), ('Europe/Riga', 'Europe/Riga'), ('Europe/Rome', 'Europe/Rome'), ('Europe/Samara', 'Europe/Samara'), ('Europe/San_Marino', 'Europe/San_Marino'), ('Europe/Sarajevo', 'Europe/Sarajevo'), ('Europe/Saratov', 'Europe/Saratov'), ('Europe/Simferopol', 'Europe/Simferopol'), ('Europe/Skopje', 'Europe/Skopje'), ('Europe/Sofia', 'Europe/Sofia'), ('Europe/Stockholm', 'Europe/Stockholm'), ('Europe/Tallinn', 'Europe/Tallinn'), ('Europe/Tirane', 'Europe/Tirane'), ('Europe/Tiraspol', 'Europe/Tiraspol'), ('Europe/Ulyanovsk', 'Europe/Ulyanovsk'), ('Europe/Uzhgorod', 'Europe/Uzhgorod'), ('Europe/Vaduz', 'Europe/Vaduz'), ('Europe/Vatican', 'Europe/Vatican'), ('Europe/Vienna', 'Europe/Vienna'), ('Europe/Vilnius', 'Europe/Vilnius'), ('Europe/Volgograd', 'Europe/Volgograd'), ('Europe/Warsaw', 'Europe/Warsaw'), ('Europe/Zagreb', 'Europe/Zagreb'), ('Europe/Zaporozhye', 'Europe/Zaporozhye'), ('Europe/Zurich', 'Europe/Zurich'), ('GB', 'GB'), ('GB-Eire', 'GB-Eire'), ('GMT', 'GMT'), ('GMT+0', 'GMT+0'), ('GMT-0', 'GMT-0'), ('GMT0', 'GMT0'), ('Greenwich', 'Greenwich'), ('HST', 'HST'), ('Hongkong', 'Hongkong'), ('Iceland', 'Iceland'), ('Indian/Antananarivo', 'Indian/Antananarivo'), ('Indian/Chagos', 'Indian/Chagos'), ('Indian/Christmas', 'Indian/Christmas'), ('Indian/Cocos', 'Indian/Cocos'), ('Indian/Comoro', 'Indian/Comoro'), ('Indian/Kerguelen', 'Indian/Kerguelen'), ('Indian/Mahe', 'Indian/Mahe'), ('Indian/Maldives', 'Indian/Maldives'), ('Indian/Mauritius', 'Indian/Mauritius'), ('Indian/Mayotte', 'Indian/Mayotte'), ('Indian/Reunion', 'Indian/Reunion'), ('Iran', 'Iran'), ('Israel', 'Israel'), ('Jamaica', 'Jamaica'), ('Japan', 'Japan'), ('Kwajalein', 'Kwajalein'), ('Libya', 'Libya'), ('MET', 'MET'), ('MST', 'MST'), ('MST7MDT', 'MST7MDT'), ('Mexico/BajaNorte', 'Mexico/BajaNorte'), ('Mexico/BajaSur', 'Mexico/BajaSur'), ('Mexico/General', 'Mexico/General'), ('NZ', 'NZ'), ('NZ-CHAT', 'NZ-CHAT'), ('Navajo', 'Navajo'), ('PRC', 'PRC'), ('PST8PDT', 'PST8PDT'), ('Pacific/Apia', 'Pacific/Apia'), ('Pacific/Auckland', 'Pacific/Auckland'), ('Pacific/Bougainville', 'Pacific/Bougainville'), ('Pacific/Chatham', 'Pacific/Chatham'), ('Pacific/Chuuk', 'Pacific/Chuuk'), ('Pacific/Easter', 'Pacific/Easter'), ('Pacific/Efate', 'Pacific/Efate'), ('Pacific/Enderbury', 'Pacific/Enderbury'), ('Pacific/Fakaofo', 'Pacific/Fakaofo'), ('Pacific/Fiji', 'Pacific/Fiji'), ('Pacific/Funafuti', 'Pacific/Funafuti'), ('Pacific/Galapagos', 'Pacific/Galapagos'), ('Pacific/Gambier', 'Pacific/Gambier'), ('Pacific/Guadalcanal', 'Pacific/Guadalcanal'), ('Pacific/Guam', 'Pacific/Guam'), ('Pacific/Honolulu', 'Pacific/Honolulu'), ('Pacific/Johnston', 'Pacific/Johnston'), ('Pacific/Kiritimati', 'Pacific/Kiritimati'), ('Pacific/Kosrae', 'Pacific/Kosrae'), ('Pacific/Kwajalein', 'Pacific/Kwajalein'), ('Pacific/Majuro', 'Pacific/Majuro'), ('Pacific/Marquesas', 'Pacific/Marquesas'), ('Pacific/Midway', 'Pacific/Midway'), ('Pacific/Nauru', 'Pacific/Nauru'), ('Pacific/Niue', 'Pacific/Niue'), ('Pacific/Norfolk', 'Pacific/Norfolk'), ('Pacific/Noumea', 'Pacific/Noumea'), ('Pacific/Pago_Pago', 'Pacific/Pago_Pago'), ('Pacific/Palau', 'Pacific/Palau'), ('Pacific/Pitcairn', 'Pacific/Pitcairn'), ('Pacific/Pohnpei', 'Pacific/Pohnpei'), ('Pacific/Ponape', 'Pacific/Ponape'), ('Pacific/Port_Moresby', 'Pacific/Port_Moresby'), ('Pacific/Rarotonga', 'Pacific/Rarotonga'), ('Pacific/Saipan', 'Pacific/Saipan'), ('Pacific/Samoa', 'Pacific/Samoa'), ('Pacific/Tahiti', 'Pacific/Tahiti'), ('Pacific/Tarawa', 'Pacific/Tarawa'), ('Pacific/Tongatapu', 'Pacific/Tongatapu'), ('Pacific/Truk', 'Pacific/Truk'), ('Pacific/Wake', 'Pacific/Wake'), ('Pacific/Wallis', 'Pacific/Wallis'), ('Pacific/Yap', 'Pacific/Yap'), ('Poland', 'Poland'), ('Portugal', 'Portugal'), ('ROC', 'ROC'), ('ROK', 'ROK'), ('Singapore', 'Singapore'), ('Turkey', 'Turkey'), ('UCT', 'UCT'), ('US/Alaska', 'US/Alaska'), ('US/Aleutian', 'US/Aleutian'), ('US/Arizona', 'US/Arizona'), ('US/Central', 'US/Central'), ('US/East-Indiana', 'US/East-Indiana'), ('US/Eastern', 'US/Eastern'), ('US/Hawaii', 'US/Hawaii'), ('US/Indiana-Starke', 'US/Indiana-Starke'), ('US/Michigan', 'US/Michigan'), ('US/Mountain', 'US/Mountain'), ('US/Pacific', 'US/Pacific'), ('US/Samoa', 'US/Samoa'), ('UTC', 'UTC'), ('Universal', 'Universal'), ('W-SU', 'W-SU'), ('WET', 'WET'), ('Zulu', 'Zulu')], default='UTC', max_length=50)),
('display_endpoint_uri', models.BooleanField(default=False, help_text='Displays the full endpoint URI in the endpoint view.', verbose_name='Display Endpoint Full URI')),
('enable_product_grade', models.BooleanField(default=False, help_text='Displays a grade letter next to a product to show the overall health.', verbose_name='Enable Product Grading')),
('product_grade', models.CharField(blank=True, max_length=800)),
('product_grade_a', models.IntegerField(default=90, help_text="Percentage score for an 'A' >=", verbose_name='Grade A')),
('product_grade_b', models.IntegerField(default=80, help_text="Percentage score for a 'B' >=", verbose_name='Grade B')),
('product_grade_c', models.IntegerField(default=70, help_text="Percentage score for a 'C' >=", verbose_name='Grade C')),
('product_grade_d', models.IntegerField(default=60, help_text="Percentage score for a 'D' >=", verbose_name='Grade D')),
('product_grade_f', models.IntegerField(default=59, help_text="Percentage score for an 'F' <=", verbose_name='Grade F')),
('enable_benchmark', models.BooleanField(default=True, help_text='Enables Benchmarks such as the OWASP ASVS (Application Security Verification Standard)', verbose_name='Enable Benchmarks')),
('enable_template_match', models.BooleanField(default=False, help_text='Enables global remediation advice and matching on CWE and Title. The text will be replaced for mitigation, impact and references on a finding. Useful for providing consistent impact and remediation advice regardless of the scanner.', verbose_name='Enable Remediation Advice')),
('engagement_auto_close', models.BooleanField(default=False, help_text='Closes an engagement after 3 days (default) past due date including last update.', verbose_name='Enable Engagement Auto-Close')),
('engagement_auto_close_days', models.IntegerField(default=3, help_text='Closes an engagement after the specified number of days past due date including last update.', verbose_name='Engagement Auto-Close Days')),
('enable_finding_sla', models.BooleanField(default=True, help_text="Enables Finding SLA's for time to remediate.", verbose_name="Enable Finding SLA's")),
('sla_critical', models.IntegerField(default=7, help_text='# of days to remediate a critical finding.', verbose_name='Critical Finding SLA Days')),
('sla_high', models.IntegerField(default=30, help_text='# of days to remediate a high finding.', verbose_name='High Finding SLA Days')),
('sla_medium', models.IntegerField(default=90, help_text='# of days to remediate a medium finding.', verbose_name='Medium Finding SLA Days')),
('sla_low', models.IntegerField(default=120, help_text='# of days to remediate a low finding.', verbose_name='Low Finding SLA Days')),
],
),
migrations.AlterField(
model_name='finding',
name='file_path',
field=models.CharField(blank=True, max_length=4000, null=True),
),
migrations.CreateModel(
name='JIRA_Issue',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('jira_id', models.CharField(max_length=200)),
('jira_key', models.CharField(max_length=200)),
('engagement', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.engagement')),
('finding', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.finding')),
],
),
migrations.CreateModel(
name='Sonarqube_Issue',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('key', models.CharField(help_text='SonarQube issue key', max_length=30, unique=True)),
('status', models.CharField(help_text='SonarQube issue status', max_length=20)),
('type', models.CharField(help_text='SonarQube issue type', max_length=15)),
],
),
migrations.CreateModel(
name='Child_Rule',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('operator', models.CharField(choices=[('Matches', 'Matches'), ('Contains', 'Contains')], max_length=30)),
('model_object', models.CharField(choices=[('Finding', 'Finding')], max_length=30)),
('match_field', models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('cwe', 'cwe'), ('cve', 'cve'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('is_template', 'is_template'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_Mitigated', 'is_Mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('line_number', 'line_number'), ('sourcefilepath', 'sourcefilepath'), ('sourcefile', 'sourcefile'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('jira_creation', 'jira_creation'), ('jira_change', 'jira_change'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue')], max_length=200)),
('match_text', models.TextField()),
('parent_rule', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.rule')),
],
),
migrations.CreateModel(
name='Sonarqube_Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sonarqube_project_key', models.CharField(blank=True, max_length=200, null=True, verbose_name='SonarQube Project Key')),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.product')),
('sonarqube_tool_config', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.tool_configuration', verbose_name='SonarQube Configuration')),
],
),
migrations.CreateModel(
name='Sonarqube_Issue_Transition',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('finding_status', models.CharField(max_length=100)),
('sonarqube_status', models.CharField(max_length=50)),
('transitions', models.CharField(max_length=100)),
('sonarqube_issue', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.sonarqube_issue')),
],
options={
'ordering': ('-created',),
},
),
migrations.AddField(
model_name='finding',
name='sonarqube_issue',
field=models.ForeignKey(blank=True, help_text='SonarQube issue', null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.sonarqube_issue'),
),
migrations.RunPython(
code=update_collation,
reverse_code=rollback_collation,
atomic=False,
),
migrations.CreateModel(
name='Note_Type',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, unique=True)),
('description', models.CharField(max_length=200)),
('is_single', models.BooleanField(default=False)),
('is_active', models.BooleanField(default=True)),
('is_mandatory', models.BooleanField(default=True)),
],
),
migrations.AddField(
model_name='notehistory',
name='note_type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.note_type'),
),
migrations.AddField(
model_name='notes',
name='note_type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='note_type', to='dojo.note_type'),
),
migrations.AddField(
model_name='system_settings',
name='allow_anonymous_survey_repsonse',
field=models.BooleanField(default=False, help_text='Enable anyone with a link to the survey to answer a survey', verbose_name='Allow Anonymous Survey Responses'),
),
migrations.AlterField(
model_name='finding',
name='cve',
field=models.CharField(max_length=20, null=True, validators=[django.core.validators.RegexValidator(message="CVE must be entered in the format: 'CVE-9999-9999'. ", regex='^CVE-\\d{4}-\\d{4,7}$')]),
),
migrations.AddIndex(
model_name='finding',
index=models.Index(fields=['cve'], name='dojo_findin_cve_dccd4b_idx'),
),
migrations.AddField(
model_name='system_settings',
name='credentials',
field=models.CharField(blank=True, max_length=3000),
),
migrations.AddField(
model_name='system_settings',
name='column_widths',
field=models.CharField(blank=True, max_length=1500),
),
migrations.AddField(
model_name='system_settings',
name='drive_folder_ID',
field=models.CharField(blank=True, max_length=100),
),
migrations.AddField(
model_name='system_settings',
name='enable_google_sheets',
field=models.BooleanField(blank=True, default=False, null=True),
),
migrations.AddField(
model_name='finding',
name='nb_occurences',
field=models.IntegerField(blank=True, help_text='Number of occurences in the source tool when several vulnerabilites were found and aggregated by the scanner', null=True, verbose_name='Number of occurences'),
),
migrations.AddField(
model_name='finding',
name='sast_sink_object',
field=models.CharField(blank=True, help_text='Sink object (variable, function...) of the attack vector', max_length=500, null=True),
),
migrations.AddField(
model_name='finding',
name='sast_source_file_path',
field=models.CharField(blank=True, help_text='Source filepath of the attack vector', max_length=4000, null=True),
),
migrations.AddField(
model_name='finding',
name='sast_source_line',
field=models.IntegerField(blank=True, help_text='Source line number of the attack vector', null=True, verbose_name='Line number'),
),
migrations.AddField(
model_name='finding',
name='sast_source_object',
field=models.CharField(blank=True, help_text='Source object (variable, function...) of the attack vector', max_length=500, null=True),
),
migrations.AddField(
model_name='finding',
name='unique_id_from_tool',
field=models.CharField(blank=True, help_text='Vulnerability technical id from the source tool. Allows to track unique vulnerabilities', max_length=500, null=True),
),
migrations.AlterField(
model_name='child_rule',
name='match_field',
field=models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('cwe', 'cwe'), ('cve', 'cve'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('is_template', 'is_template'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_Mitigated', 'is_Mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('line_number', 'line_number'), ('sourcefilepath', 'sourcefilepath'), ('sourcefile', 'sourcefile'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('jira_creation', 'jira_creation'), ('jira_change', 'jira_change'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue'), ('unique_id_from_tool', 'unique_id_from_tool'), ('sast_source_object', 'sast_source_object'), ('sast_sink_object', 'sast_sink_object'), ('sast_source_line', 'sast_source_line'), ('sast_source_file_path', 'sast_source_file_path'), ('nb_occurences', 'nb_occurences')], max_length=200),
),
migrations.AlterField(
model_name='finding',
name='file_path',
field=models.CharField(blank=True, help_text='File name with path. For SAST, when source (start of the attack vector) and sink (end of the attack vector) information are available, put sink information here', max_length=4000, null=True),
),
migrations.AlterField(
model_name='finding',
name='line',
field=models.IntegerField(blank=True, help_text='Line number. For SAST, when source (start of the attack vector) and sink (end of the attack vector) information are available, put sink information here', null=True, verbose_name='Line number'),
),
migrations.AlterField(
model_name='rule',
name='applied_field',
field=models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('cwe', 'cwe'), ('cve', 'cve'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('is_template', 'is_template'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_Mitigated', 'is_Mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('line_number', 'line_number'), ('sourcefilepath', 'sourcefilepath'), ('sourcefile', 'sourcefile'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('jira_creation', 'jira_creation'), ('jira_change', 'jira_change'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue'), ('unique_id_from_tool', 'unique_id_from_tool'), ('sast_source_object', 'sast_source_object'), ('sast_sink_object', 'sast_sink_object'), ('sast_source_line', 'sast_source_line'), ('sast_source_file_path', 'sast_source_file_path'), ('nb_occurences', 'nb_occurences')], max_length=200),
),
migrations.AlterField(
model_name='rule',
name='match_field',
field=models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('cwe', 'cwe'), ('cve', 'cve'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('is_template', 'is_template'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_Mitigated', 'is_Mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('line_number', 'line_number'), ('sourcefilepath', 'sourcefilepath'), ('sourcefile', 'sourcefile'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('jira_creation', 'jira_creation'), ('jira_change', 'jira_change'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue'), ('unique_id_from_tool', 'unique_id_from_tool'), ('sast_source_object', 'sast_source_object'), ('sast_sink_object', 'sast_sink_object'), ('sast_source_line', 'sast_source_line'), ('sast_source_file_path', 'sast_source_file_path'), ('nb_occurences', 'nb_occurences')], max_length=200),
),
migrations.AlterField(
model_name='finding',
name='cve',
field=models.CharField(max_length=28, null=True, validators=[django.core.validators.RegexValidator(message="Vulnerability ID must be entered in the format: 'ABC-9999-9999'. ", regex='^[A-Z]{1,10}-\\d{4}-\\d{4,12}$')]),
),
migrations.AlterField(
model_name='finding_template',
name='cve',
field=models.CharField(max_length=28, null=True, validators=[django.core.validators.RegexValidator(message="Vulnerability ID must be entered in the format: 'ABC-9999-9999'. ", regex='^[A-Z]{1,10}-\\d{4}-\\d{4,12}$')]),
),
migrations.AlterField(
model_name='jira_conf',
name='default_issue_type',
field=models.CharField(choices=[('Task', 'Task'), ('Story', 'Story'), ('Epic', 'Epic'), ('Spike', 'Spike'), ('Bug', 'Bug'), ('Security', 'Security')], default='Bug', max_length=9),
),
migrations.CreateModel(
name='BannerConf',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('banner_enable', models.BooleanField(blank=True, default=False, null=True)),
('banner_message', models.CharField(default='', help_text="This message will be displayed on the login page. It can contain basic html tags, for example <a href='https://www.fred.com' style='color: #337ab7;' target='_blank'>https://example.com</a>", max_length=500)),
],
),
migrations.AlterField(
model_name='jira_conf',
name='default_issue_type',
field=models.CharField(choices=[('Task', 'Task'), ('Story', 'Story'), ('Epic', 'Epic'), ('Spike', 'Spike'), ('Bug', 'Bug'), ('Security', 'Security')], default='Bug', help_text='You can define extra issue types in settings.py', max_length=15),
),
migrations.AddIndex(
model_name='finding',
index=models.Index(fields=['false_p'], name='dojo_findin_false_p_aac0c7_idx'),
),
migrations.AddIndex(
model_name='finding',
index=models.Index(fields=['verified'], name='dojo_findin_verifie_beb0fc_idx'),
),
migrations.AddIndex(
model_name='finding',
index=models.Index(fields=['mitigated'], name='dojo_findin_mitigat_946a13_idx'),
),
migrations.AddIndex(
model_name='finding',
index=models.Index(fields=['active'], name='dojo_findin_active_d51077_idx'),
),
migrations.AddIndex(
model_name='finding',
index=models.Index(fields=['date'], name='dojo_findin_date_8e9143_idx'),
),
migrations.AddIndex(
model_name='finding',
index=models.Index(fields=['out_of_scope'], name='dojo_findin_out_of__26856e_idx'),
),
migrations.AddIndex(
model_name='finding',
index=models.Index(fields=['numerical_severity'], name='dojo_findin_numeric_83d93b_idx'),
),
migrations.AlterField(
model_name='finding',
name='title',
field=models.CharField(max_length=511),
),
migrations.AddIndex(
model_name='finding',
index=models.Index(fields=['title'], name='dojo_findin_title_78f900_idx'),
),
migrations.AlterField(
model_name='finding',
name='cve',
field=models.CharField(help_text='CVE or other vulnerability identifier', max_length=28, null=True, validators=[django.core.validators.RegexValidator(message="Vulnerability ID must be entered in the format: 'ABC-9999-9999'.", regex='^[A-Z]{1,10}(-\\d+)+$')]),
),
migrations.AlterField(
model_name='finding_template',
name='cve',
field=models.CharField(max_length=28, null=True, validators=[django.core.validators.RegexValidator(message="Vulnerability ID must be entered in the format: 'ABC-9999-9999'.", regex='^[A-Z]{1,10}(-\\d+)+$')]),
),
migrations.AlterModelOptions(
name='product_type',
options={'ordering': ('name',)},
),
migrations.AddField(
model_name='finding',
name='component_name',
field=models.CharField(blank=True, help_text='Name of the component containing the finding. ', max_length=200, null=True),
),
migrations.AddField(
model_name='finding',
name='component_version',
field=models.CharField(blank=True, help_text='Version of the component.', max_length=100, null=True),
),
migrations.AlterField(
model_name='child_rule',
name='match_field',
field=models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('cwe', 'cwe'), ('cve', 'cve'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('is_template', 'is_template'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_Mitigated', 'is_Mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('line_number', 'line_number'), ('sourcefilepath', 'sourcefilepath'), ('sourcefile', 'sourcefile'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('component_name', 'component_name'), ('component_version', 'component_version'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('jira_creation', 'jira_creation'), ('jira_change', 'jira_change'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue'), ('unique_id_from_tool', 'unique_id_from_tool'), ('sast_source_object', 'sast_source_object'), ('sast_sink_object', 'sast_sink_object'), ('sast_source_line', 'sast_source_line'), ('sast_source_file_path', 'sast_source_file_path'), ('nb_occurences', 'nb_occurences')], max_length=200),
),
migrations.AlterField(
model_name='rule',
name='applied_field',
field=models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('cwe', 'cwe'), ('cve', 'cve'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('is_template', 'is_template'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_Mitigated', 'is_Mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('line_number', 'line_number'), ('sourcefilepath', 'sourcefilepath'), ('sourcefile', 'sourcefile'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('component_name', 'component_name'), ('component_version', 'component_version'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('jira_creation', 'jira_creation'), ('jira_change', 'jira_change'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue'), ('unique_id_from_tool', 'unique_id_from_tool'), ('sast_source_object', 'sast_source_object'), ('sast_sink_object', 'sast_sink_object'), ('sast_source_line', 'sast_source_line'), ('sast_source_file_path', 'sast_source_file_path'), ('nb_occurences', 'nb_occurences')], max_length=200),
),
migrations.AlterField(
model_name='rule',
name='match_field',
field=models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('cwe', 'cwe'), ('cve', 'cve'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('is_template', 'is_template'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_Mitigated', 'is_Mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('line_number', 'line_number'), ('sourcefilepath', 'sourcefilepath'), ('sourcefile', 'sourcefile'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('component_name', 'component_name'), ('component_version', 'component_version'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('jira_creation', 'jira_creation'), ('jira_change', 'jira_change'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue'), ('unique_id_from_tool', 'unique_id_from_tool'), ('sast_source_object', 'sast_source_object'), ('sast_sink_object', 'sast_sink_object'), ('sast_source_line', 'sast_source_line'), ('sast_source_file_path', 'sast_source_file_path'), ('nb_occurences', 'nb_occurences')], max_length=200),
),
migrations.AddField(
model_name='system_settings',
name='enable_auditlog',
field=models.BooleanField(default=True, help_text="With this setting turned on, Dojo maintains an audit log of changes made to entities (Findings, Tests, Engagements, Procuts, ...)If you run big import you may want to disable this because the way django-auditlog currently works, there's a big performance hit. Especially during (re-)imports.", verbose_name='Enable audit logging'),
),
migrations.RemoveField(
model_name='finding',
name='duplicate_list',
),
migrations.CreateModel(
name='GITHUB_Clone',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('github_id', models.CharField(max_length=200)),
('github_clone_id', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='GITHUB_Conf',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('configuration_name', models.CharField(default='', help_text='Enter a name to give to this configuration', max_length=2000)),
('api_key', models.CharField(default='', help_text='Enter your Github API Key', max_length=2000)),
],
),
migrations.CreateModel(
name='GITHUB_Details_Cache',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('github_id', models.CharField(max_length=200)),
('github_key', models.CharField(max_length=200)),
('github_status', models.CharField(max_length=200)),
('github_resolution', models.CharField(max_length=200)),
],
),
migrations.AddField(
model_name='system_settings',
name='enable_github',
field=models.BooleanField(default=False, verbose_name='Enable GITHUB integration'),
),
migrations.CreateModel(
name='GITHUB_Issue',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('issue_id', models.CharField(max_length=200)),
('issue_url', models.URLField(max_length=2000, verbose_name='GitHub issue URL')),
('finding', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.finding')),
],
),
migrations.AlterField(
model_name='jira_pkey',
name='push_all_issues',
field=models.BooleanField(blank=True, default=False, help_text='Automatically maintain parity with JIRA. Always create and update JIRA tickets for findings in this Product.'),
),
migrations.CreateModel(
name='GITHUB_PKey',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('git_project', models.CharField(blank=True, help_text='Specify your project location. (:user/:repo)', max_length=200, verbose_name='Github project')),
('git_push_notes', models.BooleanField(blank=True, default=False, help_text='Notes added to findings will be automatically added to the corresponding github issue')),
('git_conf', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.github_conf', verbose_name='Github Configuration')),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.product')),
],
),
migrations.AddField(
model_name='system_settings',
name='email_address',
field=models.EmailField(blank=True, max_length=100),
),
migrations.RenameField(
model_name='notifications',
old_name='results_added',
new_name='scan_added',
),
migrations.AlterField(
model_name='notifications',
name='user',
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.dojo_user'),
),
migrations.AddField(
model_name='notifications',
name='product',
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.product'),
),
migrations.AddConstraint(
model_name='notifications',
constraint=models.UniqueConstraint(fields=('user', 'product'), name='notifications_user_product'),
),
migrations.AlterField(
model_name='notifications',
name='scan_added',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('hipchat', 'hipchat'), ('mail', 'mail'), ('alert', 'alert')], default='alert', help_text='Triggered whenever an (re-)import has been done that created/updated/closed findings.', max_length=24),
),
migrations.AlterField(
model_name='system_settings',
name='time_zone',
field=models.CharField(choices=[('Africa/Abidjan', 'Africa/Abidjan'), ('Africa/Accra', 'Africa/Accra'), ('Africa/Addis_Ababa', 'Africa/Addis_Ababa'), ('Africa/Algiers', 'Africa/Algiers'), ('Africa/Asmara', 'Africa/Asmara'), ('Africa/Asmera', 'Africa/Asmera'), ('Africa/Bamako', 'Africa/Bamako'), ('Africa/Bangui', 'Africa/Bangui'), ('Africa/Banjul', 'Africa/Banjul'), ('Africa/Bissau', 'Africa/Bissau'), ('Africa/Blantyre', 'Africa/Blantyre'), ('Africa/Brazzaville', 'Africa/Brazzaville'), ('Africa/Bujumbura', 'Africa/Bujumbura'), ('Africa/Cairo', 'Africa/Cairo'), ('Africa/Casablanca', 'Africa/Casablanca'), ('Africa/Ceuta', 'Africa/Ceuta'), ('Africa/Conakry', 'Africa/Conakry'), ('Africa/Dakar', 'Africa/Dakar'), ('Africa/Dar_es_Salaam', 'Africa/Dar_es_Salaam'), ('Africa/Djibouti', 'Africa/Djibouti'), ('Africa/Douala', 'Africa/Douala'), ('Africa/El_Aaiun', 'Africa/El_Aaiun'), ('Africa/Freetown', 'Africa/Freetown'), ('Africa/Gaborone', 'Africa/Gaborone'), ('Africa/Harare', 'Africa/Harare'), ('Africa/Johannesburg', 'Africa/Johannesburg'), ('Africa/Juba', 'Africa/Juba'), ('Africa/Kampala', 'Africa/Kampala'), ('Africa/Khartoum', 'Africa/Khartoum'), ('Africa/Kigali', 'Africa/Kigali'), ('Africa/Kinshasa', 'Africa/Kinshasa'), ('Africa/Lagos', 'Africa/Lagos'), ('Africa/Libreville', 'Africa/Libreville'), ('Africa/Lome', 'Africa/Lome'), ('Africa/Luanda', 'Africa/Luanda'), ('Africa/Lubumbashi', 'Africa/Lubumbashi'), ('Africa/Lusaka', 'Africa/Lusaka'), ('Africa/Malabo', 'Africa/Malabo'), ('Africa/Maputo', 'Africa/Maputo'), ('Africa/Maseru', 'Africa/Maseru'), ('Africa/Mbabane', 'Africa/Mbabane'), ('Africa/Mogadishu', 'Africa/Mogadishu'), ('Africa/Monrovia', 'Africa/Monrovia'), ('Africa/Nairobi', 'Africa/Nairobi'), ('Africa/Ndjamena', 'Africa/Ndjamena'), ('Africa/Niamey', 'Africa/Niamey'), ('Africa/Nouakchott', 'Africa/Nouakchott'), ('Africa/Ouagadougou', 'Africa/Ouagadougou'), ('Africa/Porto-Novo', 'Africa/Porto-Novo'), ('Africa/Sao_Tome', 'Africa/Sao_Tome'), ('Africa/Timbuktu', 'Africa/Timbuktu'), ('Africa/Tripoli', 'Africa/Tripoli'), ('Africa/Tunis', 'Africa/Tunis'), ('Africa/Windhoek', 'Africa/Windhoek'), ('America/Adak', 'America/Adak'), ('America/Anchorage', 'America/Anchorage'), ('America/Anguilla', 'America/Anguilla'), ('America/Antigua', 'America/Antigua'), ('America/Araguaina', 'America/Araguaina'), ('America/Argentina/Buenos_Aires', 'America/Argentina/Buenos_Aires'), ('America/Argentina/Catamarca', 'America/Argentina/Catamarca'), ('America/Argentina/ComodRivadavia', 'America/Argentina/ComodRivadavia'), ('America/Argentina/Cordoba', 'America/Argentina/Cordoba'), ('America/Argentina/Jujuy', 'America/Argentina/Jujuy'), ('America/Argentina/La_Rioja', 'America/Argentina/La_Rioja'), ('America/Argentina/Mendoza', 'America/Argentina/Mendoza'), ('America/Argentina/Rio_Gallegos', 'America/Argentina/Rio_Gallegos'), ('America/Argentina/Salta', 'America/Argentina/Salta'), ('America/Argentina/San_Juan', 'America/Argentina/San_Juan'), ('America/Argentina/San_Luis', 'America/Argentina/San_Luis'), ('America/Argentina/Tucuman', 'America/Argentina/Tucuman'), ('America/Argentina/Ushuaia', 'America/Argentina/Ushuaia'), ('America/Aruba', 'America/Aruba'), ('America/Asuncion', 'America/Asuncion'), ('America/Atikokan', 'America/Atikokan'), ('America/Atka', 'America/Atka'), ('America/Bahia', 'America/Bahia'), ('America/Bahia_Banderas', 'America/Bahia_Banderas'), ('America/Barbados', 'America/Barbados'), ('America/Belem', 'America/Belem'), ('America/Belize', 'America/Belize'), ('America/Blanc-Sablon', 'America/Blanc-Sablon'), ('America/Boa_Vista', 'America/Boa_Vista'), ('America/Bogota', 'America/Bogota'), ('America/Boise', 'America/Boise'), ('America/Buenos_Aires', 'America/Buenos_Aires'), ('America/Cambridge_Bay', 'America/Cambridge_Bay'), ('America/Campo_Grande', 'America/Campo_Grande'), ('America/Cancun', 'America/Cancun'), ('America/Caracas', 'America/Caracas'), ('America/Catamarca', 'America/Catamarca'), ('America/Cayenne', 'America/Cayenne'), ('America/Cayman', 'America/Cayman'), ('America/Chicago', 'America/Chicago'), ('America/Chihuahua', 'America/Chihuahua'), ('America/Coral_Harbour', 'America/Coral_Harbour'), ('America/Cordoba', 'America/Cordoba'), ('America/Costa_Rica', 'America/Costa_Rica'), ('America/Creston', 'America/Creston'), ('America/Cuiaba', 'America/Cuiaba'), ('America/Curacao', 'America/Curacao'), ('America/Danmarkshavn', 'America/Danmarkshavn'), ('America/Dawson', 'America/Dawson'), ('America/Dawson_Creek', 'America/Dawson_Creek'), ('America/Denver', 'America/Denver'), ('America/Detroit', 'America/Detroit'), ('America/Dominica', 'America/Dominica'), ('America/Edmonton', 'America/Edmonton'), ('America/Eirunepe', 'America/Eirunepe'), ('America/El_Salvador', 'America/El_Salvador'), ('America/Ensenada', 'America/Ensenada'), ('America/Fort_Nelson', 'America/Fort_Nelson'), ('America/Fort_Wayne', 'America/Fort_Wayne'), ('America/Fortaleza', 'America/Fortaleza'), ('America/Glace_Bay', 'America/Glace_Bay'), ('America/Godthab', 'America/Godthab'), ('America/Goose_Bay', 'America/Goose_Bay'), ('America/Grand_Turk', 'America/Grand_Turk'), ('America/Grenada', 'America/Grenada'), ('America/Guadeloupe', 'America/Guadeloupe'), ('America/Guatemala', 'America/Guatemala'), ('America/Guayaquil', 'America/Guayaquil'), ('America/Guyana', 'America/Guyana'), ('America/Halifax', 'America/Halifax'), ('America/Havana', 'America/Havana'), ('America/Hermosillo', 'America/Hermosillo'), ('America/Indiana/Indianapolis', 'America/Indiana/Indianapolis'), ('America/Indiana/Knox', 'America/Indiana/Knox'), ('America/Indiana/Marengo', 'America/Indiana/Marengo'), ('America/Indiana/Petersburg', 'America/Indiana/Petersburg'), ('America/Indiana/Tell_City', 'America/Indiana/Tell_City'), ('America/Indiana/Vevay', 'America/Indiana/Vevay'), ('America/Indiana/Vincennes', 'America/Indiana/Vincennes'), ('America/Indiana/Winamac', 'America/Indiana/Winamac'), ('America/Indianapolis', 'America/Indianapolis'), ('America/Inuvik', 'America/Inuvik'), ('America/Iqaluit', 'America/Iqaluit'), ('America/Jamaica', 'America/Jamaica'), ('America/Jujuy', 'America/Jujuy'), ('America/Juneau', 'America/Juneau'), ('America/Kentucky/Louisville', 'America/Kentucky/Louisville'), ('America/Kentucky/Monticello', 'America/Kentucky/Monticello'), ('America/Knox_IN', 'America/Knox_IN'), ('America/Kralendijk', 'America/Kralendijk'), ('America/La_Paz', 'America/La_Paz'), ('America/Lima', 'America/Lima'), ('America/Los_Angeles', 'America/Los_Angeles'), ('America/Louisville', 'America/Louisville'), ('America/Lower_Princes', 'America/Lower_Princes'), ('America/Maceio', 'America/Maceio'), ('America/Managua', 'America/Managua'), ('America/Manaus', 'America/Manaus'), ('America/Marigot', 'America/Marigot'), ('America/Martinique', 'America/Martinique'), ('America/Matamoros', 'America/Matamoros'), ('America/Mazatlan', 'America/Mazatlan'), ('America/Mendoza', 'America/Mendoza'), ('America/Menominee', 'America/Menominee'), ('America/Merida', 'America/Merida'), ('America/Metlakatla', 'America/Metlakatla'), ('America/Mexico_City', 'America/Mexico_City'), ('America/Miquelon', 'America/Miquelon'), ('America/Moncton', 'America/Moncton'), ('America/Monterrey', 'America/Monterrey'), ('America/Montevideo', 'America/Montevideo'), ('America/Montreal', 'America/Montreal'), ('America/Montserrat', 'America/Montserrat'), ('America/Nassau', 'America/Nassau'), ('America/New_York', 'America/New_York'), ('America/Nipigon', 'America/Nipigon'), ('America/Nome', 'America/Nome'), ('America/Noronha', 'America/Noronha'), ('America/North_Dakota/Beulah', 'America/North_Dakota/Beulah'), ('America/North_Dakota/Center', 'America/North_Dakota/Center'), ('America/North_Dakota/New_Salem', 'America/North_Dakota/New_Salem'), ('America/Nuuk', 'America/Nuuk'), ('America/Ojinaga', 'America/Ojinaga'), ('America/Panama', 'America/Panama'), ('America/Pangnirtung', 'America/Pangnirtung'), ('America/Paramaribo', 'America/Paramaribo'), ('America/Phoenix', 'America/Phoenix'), ('America/Port-au-Prince', 'America/Port-au-Prince'), ('America/Port_of_Spain', 'America/Port_of_Spain'), ('America/Porto_Acre', 'America/Porto_Acre'), ('America/Porto_Velho', 'America/Porto_Velho'), ('America/Puerto_Rico', 'America/Puerto_Rico'), ('America/Punta_Arenas', 'America/Punta_Arenas'), ('America/Rainy_River', 'America/Rainy_River'), ('America/Rankin_Inlet', 'America/Rankin_Inlet'), ('America/Recife', 'America/Recife'), ('America/Regina', 'America/Regina'), ('America/Resolute', 'America/Resolute'), ('America/Rio_Branco', 'America/Rio_Branco'), ('America/Rosario', 'America/Rosario'), ('America/Santa_Isabel', 'America/Santa_Isabel'), ('America/Santarem', 'America/Santarem'), ('America/Santiago', 'America/Santiago'), ('America/Santo_Domingo', 'America/Santo_Domingo'), ('America/Sao_Paulo', 'America/Sao_Paulo'), ('America/Scoresbysund', 'America/Scoresbysund'), ('America/Shiprock', 'America/Shiprock'), ('America/Sitka', 'America/Sitka'), ('America/St_Barthelemy', 'America/St_Barthelemy'), ('America/St_Johns', 'America/St_Johns'), ('America/St_Kitts', 'America/St_Kitts'), ('America/St_Lucia', 'America/St_Lucia'), ('America/St_Thomas', 'America/St_Thomas'), ('America/St_Vincent', 'America/St_Vincent'), ('America/Swift_Current', 'America/Swift_Current'), ('America/Tegucigalpa', 'America/Tegucigalpa'), ('America/Thule', 'America/Thule'), ('America/Thunder_Bay', 'America/Thunder_Bay'), ('America/Tijuana', 'America/Tijuana'), ('America/Toronto', 'America/Toronto'), ('America/Tortola', 'America/Tortola'), ('America/Vancouver', 'America/Vancouver'), ('America/Virgin', 'America/Virgin'), ('America/Whitehorse', 'America/Whitehorse'), ('America/Winnipeg', 'America/Winnipeg'), ('America/Yakutat', 'America/Yakutat'), ('America/Yellowknife', 'America/Yellowknife'), ('Antarctica/Casey', 'Antarctica/Casey'), ('Antarctica/Davis', 'Antarctica/Davis'), ('Antarctica/DumontDUrville', 'Antarctica/DumontDUrville'), ('Antarctica/Macquarie', 'Antarctica/Macquarie'), ('Antarctica/Mawson', 'Antarctica/Mawson'), ('Antarctica/McMurdo', 'Antarctica/McMurdo'), ('Antarctica/Palmer', 'Antarctica/Palmer'), ('Antarctica/Rothera', 'Antarctica/Rothera'), ('Antarctica/South_Pole', 'Antarctica/South_Pole'), ('Antarctica/Syowa', 'Antarctica/Syowa'), ('Antarctica/Troll', 'Antarctica/Troll'), ('Antarctica/Vostok', 'Antarctica/Vostok'), ('Arctic/Longyearbyen', 'Arctic/Longyearbyen'), ('Asia/Aden', 'Asia/Aden'), ('Asia/Almaty', 'Asia/Almaty'), ('Asia/Amman', 'Asia/Amman'), ('Asia/Anadyr', 'Asia/Anadyr'), ('Asia/Aqtau', 'Asia/Aqtau'), ('Asia/Aqtobe', 'Asia/Aqtobe'), ('Asia/Ashgabat', 'Asia/Ashgabat'), ('Asia/Ashkhabad', 'Asia/Ashkhabad'), ('Asia/Atyrau', 'Asia/Atyrau'), ('Asia/Baghdad', 'Asia/Baghdad'), ('Asia/Bahrain', 'Asia/Bahrain'), ('Asia/Baku', 'Asia/Baku'), ('Asia/Bangkok', 'Asia/Bangkok'), ('Asia/Barnaul', 'Asia/Barnaul'), ('Asia/Beirut', 'Asia/Beirut'), ('Asia/Bishkek', 'Asia/Bishkek'), ('Asia/Brunei', 'Asia/Brunei'), ('Asia/Calcutta', 'Asia/Calcutta'), ('Asia/Chita', 'Asia/Chita'), ('Asia/Choibalsan', 'Asia/Choibalsan'), ('Asia/Chongqing', 'Asia/Chongqing'), ('Asia/Chungking', 'Asia/Chungking'), ('Asia/Colombo', 'Asia/Colombo'), ('Asia/Dacca', 'Asia/Dacca'), ('Asia/Damascus', 'Asia/Damascus'), ('Asia/Dhaka', 'Asia/Dhaka'), ('Asia/Dili', 'Asia/Dili'), ('Asia/Dubai', 'Asia/Dubai'), ('Asia/Dushanbe', 'Asia/Dushanbe'), ('Asia/Famagusta', 'Asia/Famagusta'), ('Asia/Gaza', 'Asia/Gaza'), ('Asia/Harbin', 'Asia/Harbin'), ('Asia/Hebron', 'Asia/Hebron'), ('Asia/Ho_Chi_Minh', 'Asia/Ho_Chi_Minh'), ('Asia/Hong_Kong', 'Asia/Hong_Kong'), ('Asia/Hovd', 'Asia/Hovd'), ('Asia/Irkutsk', 'Asia/Irkutsk'), ('Asia/Istanbul', 'Asia/Istanbul'), ('Asia/Jakarta', 'Asia/Jakarta'), ('Asia/Jayapura', 'Asia/Jayapura'), ('Asia/Jerusalem', 'Asia/Jerusalem'), ('Asia/Kabul', 'Asia/Kabul'), ('Asia/Kamchatka', 'Asia/Kamchatka'), ('Asia/Karachi', 'Asia/Karachi'), ('Asia/Kashgar', 'Asia/Kashgar'), ('Asia/Kathmandu', 'Asia/Kathmandu'), ('Asia/Katmandu', 'Asia/Katmandu'), ('Asia/Khandyga', 'Asia/Khandyga'), ('Asia/Kolkata', 'Asia/Kolkata'), ('Asia/Krasnoyarsk', 'Asia/Krasnoyarsk'), ('Asia/Kuala_Lumpur', 'Asia/Kuala_Lumpur'), ('Asia/Kuching', 'Asia/Kuching'), ('Asia/Kuwait', 'Asia/Kuwait'), ('Asia/Macao', 'Asia/Macao'), ('Asia/Macau', 'Asia/Macau'), ('Asia/Magadan', 'Asia/Magadan'), ('Asia/Makassar', 'Asia/Makassar'), ('Asia/Manila', 'Asia/Manila'), ('Asia/Muscat', 'Asia/Muscat'), ('Asia/Nicosia', 'Asia/Nicosia'), ('Asia/Novokuznetsk', 'Asia/Novokuznetsk'), ('Asia/Novosibirsk', 'Asia/Novosibirsk'), ('Asia/Omsk', 'Asia/Omsk'), ('Asia/Oral', 'Asia/Oral'), ('Asia/Phnom_Penh', 'Asia/Phnom_Penh'), ('Asia/Pontianak', 'Asia/Pontianak'), ('Asia/Pyongyang', 'Asia/Pyongyang'), ('Asia/Qatar', 'Asia/Qatar'), ('Asia/Qostanay', 'Asia/Qostanay'), ('Asia/Qyzylorda', 'Asia/Qyzylorda'), ('Asia/Rangoon', 'Asia/Rangoon'), ('Asia/Riyadh', 'Asia/Riyadh'), ('Asia/Saigon', 'Asia/Saigon'), ('Asia/Sakhalin', 'Asia/Sakhalin'), ('Asia/Samarkand', 'Asia/Samarkand'), ('Asia/Seoul', 'Asia/Seoul'), ('Asia/Shanghai', 'Asia/Shanghai'), ('Asia/Singapore', 'Asia/Singapore'), ('Asia/Srednekolymsk', 'Asia/Srednekolymsk'), ('Asia/Taipei', 'Asia/Taipei'), ('Asia/Tashkent', 'Asia/Tashkent'), ('Asia/Tbilisi', 'Asia/Tbilisi'), ('Asia/Tehran', 'Asia/Tehran'), ('Asia/Tel_Aviv', 'Asia/Tel_Aviv'), ('Asia/Thimbu', 'Asia/Thimbu'), ('Asia/Thimphu', 'Asia/Thimphu'), ('Asia/Tokyo', 'Asia/Tokyo'), ('Asia/Tomsk', 'Asia/Tomsk'), ('Asia/Ujung_Pandang', 'Asia/Ujung_Pandang'), ('Asia/Ulaanbaatar', 'Asia/Ulaanbaatar'), ('Asia/Ulan_Bator', 'Asia/Ulan_Bator'), ('Asia/Urumqi', 'Asia/Urumqi'), ('Asia/Ust-Nera', 'Asia/Ust-Nera'), ('Asia/Vientiane', 'Asia/Vientiane'), ('Asia/Vladivostok', 'Asia/Vladivostok'), ('Asia/Yakutsk', 'Asia/Yakutsk'), ('Asia/Yangon', 'Asia/Yangon'), ('Asia/Yekaterinburg', 'Asia/Yekaterinburg'), ('Asia/Yerevan', 'Asia/Yerevan'), ('Atlantic/Azores', 'Atlantic/Azores'), ('Atlantic/Bermuda', 'Atlantic/Bermuda'), ('Atlantic/Canary', 'Atlantic/Canary'), ('Atlantic/Cape_Verde', 'Atlantic/Cape_Verde'), ('Atlantic/Faeroe', 'Atlantic/Faeroe'), ('Atlantic/Faroe', 'Atlantic/Faroe'), ('Atlantic/Jan_Mayen', 'Atlantic/Jan_Mayen'), ('Atlantic/Madeira', 'Atlantic/Madeira'), ('Atlantic/Reykjavik', 'Atlantic/Reykjavik'), ('Atlantic/South_Georgia', 'Atlantic/South_Georgia'), ('Atlantic/St_Helena', 'Atlantic/St_Helena'), ('Atlantic/Stanley', 'Atlantic/Stanley'), ('Australia/ACT', 'Australia/ACT'), ('Australia/Adelaide', 'Australia/Adelaide'), ('Australia/Brisbane', 'Australia/Brisbane'), ('Australia/Broken_Hill', 'Australia/Broken_Hill'), ('Australia/Canberra', 'Australia/Canberra'), ('Australia/Currie', 'Australia/Currie'), ('Australia/Darwin', 'Australia/Darwin'), ('Australia/Eucla', 'Australia/Eucla'), ('Australia/Hobart', 'Australia/Hobart'), ('Australia/LHI', 'Australia/LHI'), ('Australia/Lindeman', 'Australia/Lindeman'), ('Australia/Lord_Howe', 'Australia/Lord_Howe'), ('Australia/Melbourne', 'Australia/Melbourne'), ('Australia/NSW', 'Australia/NSW'), ('Australia/North', 'Australia/North'), ('Australia/Perth', 'Australia/Perth'), ('Australia/Queensland', 'Australia/Queensland'), ('Australia/South', 'Australia/South'), ('Australia/Sydney', 'Australia/Sydney'), ('Australia/Tasmania', 'Australia/Tasmania'), ('Australia/Victoria', 'Australia/Victoria'), ('Australia/West', 'Australia/West'), ('Australia/Yancowinna', 'Australia/Yancowinna'), ('Brazil/Acre', 'Brazil/Acre'), ('Brazil/DeNoronha', 'Brazil/DeNoronha'), ('Brazil/East', 'Brazil/East'), ('Brazil/West', 'Brazil/West'), ('CET', 'CET'), ('CST6CDT', 'CST6CDT'), ('Canada/Atlantic', 'Canada/Atlantic'), ('Canada/Central', 'Canada/Central'), ('Canada/Eastern', 'Canada/Eastern'), ('Canada/Mountain', 'Canada/Mountain'), ('Canada/Newfoundland', 'Canada/Newfoundland'), ('Canada/Pacific', 'Canada/Pacific'), ('Canada/Saskatchewan', 'Canada/Saskatchewan'), ('Canada/Yukon', 'Canada/Yukon'), ('Chile/Continental', 'Chile/Continental'), ('Chile/EasterIsland', 'Chile/EasterIsland'), ('Cuba', 'Cuba'), ('EET', 'EET'), ('EST', 'EST'), ('EST5EDT', 'EST5EDT'), ('Egypt', 'Egypt'), ('Eire', 'Eire'), ('Etc/GMT', 'Etc/GMT'), ('Etc/GMT+0', 'Etc/GMT+0'), ('Etc/GMT+1', 'Etc/GMT+1'), ('Etc/GMT+10', 'Etc/GMT+10'), ('Etc/GMT+11', 'Etc/GMT+11'), ('Etc/GMT+12', 'Etc/GMT+12'), ('Etc/GMT+2', 'Etc/GMT+2'), ('Etc/GMT+3', 'Etc/GMT+3'), ('Etc/GMT+4', 'Etc/GMT+4'), ('Etc/GMT+5', 'Etc/GMT+5'), ('Etc/GMT+6', 'Etc/GMT+6'), ('Etc/GMT+7', 'Etc/GMT+7'), ('Etc/GMT+8', 'Etc/GMT+8'), ('Etc/GMT+9', 'Etc/GMT+9'), ('Etc/GMT-0', 'Etc/GMT-0'), ('Etc/GMT-1', 'Etc/GMT-1'), ('Etc/GMT-10', 'Etc/GMT-10'), ('Etc/GMT-11', 'Etc/GMT-11'), ('Etc/GMT-12', 'Etc/GMT-12'), ('Etc/GMT-13', 'Etc/GMT-13'), ('Etc/GMT-14', 'Etc/GMT-14'), ('Etc/GMT-2', 'Etc/GMT-2'), ('Etc/GMT-3', 'Etc/GMT-3'), ('Etc/GMT-4', 'Etc/GMT-4'), ('Etc/GMT-5', 'Etc/GMT-5'), ('Etc/GMT-6', 'Etc/GMT-6'), ('Etc/GMT-7', 'Etc/GMT-7'), ('Etc/GMT-8', 'Etc/GMT-8'), ('Etc/GMT-9', 'Etc/GMT-9'), ('Etc/GMT0', 'Etc/GMT0'), ('Etc/Greenwich', 'Etc/Greenwich'), ('Etc/UCT', 'Etc/UCT'), ('Etc/UTC', 'Etc/UTC'), ('Etc/Universal', 'Etc/Universal'), ('Etc/Zulu', 'Etc/Zulu'), ('Europe/Amsterdam', 'Europe/Amsterdam'), ('Europe/Andorra', 'Europe/Andorra'), ('Europe/Astrakhan', 'Europe/Astrakhan'), ('Europe/Athens', 'Europe/Athens'), ('Europe/Belfast', 'Europe/Belfast'), ('Europe/Belgrade', 'Europe/Belgrade'), ('Europe/Berlin', 'Europe/Berlin'), ('Europe/Bratislava', 'Europe/Bratislava'), ('Europe/Brussels', 'Europe/Brussels'), ('Europe/Bucharest', 'Europe/Bucharest'), ('Europe/Budapest', 'Europe/Budapest'), ('Europe/Busingen', 'Europe/Busingen'), ('Europe/Chisinau', 'Europe/Chisinau'), ('Europe/Copenhagen', 'Europe/Copenhagen'), ('Europe/Dublin', 'Europe/Dublin'), ('Europe/Gibraltar', 'Europe/Gibraltar'), ('Europe/Guernsey', 'Europe/Guernsey'), ('Europe/Helsinki', 'Europe/Helsinki'), ('Europe/Isle_of_Man', 'Europe/Isle_of_Man'), ('Europe/Istanbul', 'Europe/Istanbul'), ('Europe/Jersey', 'Europe/Jersey'), ('Europe/Kaliningrad', 'Europe/Kaliningrad'), ('Europe/Kiev', 'Europe/Kiev'), ('Europe/Kirov', 'Europe/Kirov'), ('Europe/Lisbon', 'Europe/Lisbon'), ('Europe/Ljubljana', 'Europe/Ljubljana'), ('Europe/London', 'Europe/London'), ('Europe/Luxembourg', 'Europe/Luxembourg'), ('Europe/Madrid', 'Europe/Madrid'), ('Europe/Malta', 'Europe/Malta'), ('Europe/Mariehamn', 'Europe/Mariehamn'), ('Europe/Minsk', 'Europe/Minsk'), ('Europe/Monaco', 'Europe/Monaco'), ('Europe/Moscow', 'Europe/Moscow'), ('Europe/Nicosia', 'Europe/Nicosia'), ('Europe/Oslo', 'Europe/Oslo'), ('Europe/Paris', 'Europe/Paris'), ('Europe/Podgorica', 'Europe/Podgorica'), ('Europe/Prague', 'Europe/Prague'), ('Europe/Riga', 'Europe/Riga'), ('Europe/Rome', 'Europe/Rome'), ('Europe/Samara', 'Europe/Samara'), ('Europe/San_Marino', 'Europe/San_Marino'), ('Europe/Sarajevo', 'Europe/Sarajevo'), ('Europe/Saratov', 'Europe/Saratov'), ('Europe/Simferopol', 'Europe/Simferopol'), ('Europe/Skopje', 'Europe/Skopje'), ('Europe/Sofia', 'Europe/Sofia'), ('Europe/Stockholm', 'Europe/Stockholm'), ('Europe/Tallinn', 'Europe/Tallinn'), ('Europe/Tirane', 'Europe/Tirane'), ('Europe/Tiraspol', 'Europe/Tiraspol'), ('Europe/Ulyanovsk', 'Europe/Ulyanovsk'), ('Europe/Uzhgorod', 'Europe/Uzhgorod'), ('Europe/Vaduz', 'Europe/Vaduz'), ('Europe/Vatican', 'Europe/Vatican'), ('Europe/Vienna', 'Europe/Vienna'), ('Europe/Vilnius', 'Europe/Vilnius'), ('Europe/Volgograd', 'Europe/Volgograd'), ('Europe/Warsaw', 'Europe/Warsaw'), ('Europe/Zagreb', 'Europe/Zagreb'), ('Europe/Zaporozhye', 'Europe/Zaporozhye'), ('Europe/Zurich', 'Europe/Zurich'), ('GB', 'GB'), ('GB-Eire', 'GB-Eire'), ('GMT', 'GMT'), ('GMT+0', 'GMT+0'), ('GMT-0', 'GMT-0'), ('GMT0', 'GMT0'), ('Greenwich', 'Greenwich'), ('HST', 'HST'), ('Hongkong', 'Hongkong'), ('Iceland', 'Iceland'), ('Indian/Antananarivo', 'Indian/Antananarivo'), ('Indian/Chagos', 'Indian/Chagos'), ('Indian/Christmas', 'Indian/Christmas'), ('Indian/Cocos', 'Indian/Cocos'), ('Indian/Comoro', 'Indian/Comoro'), ('Indian/Kerguelen', 'Indian/Kerguelen'), ('Indian/Mahe', 'Indian/Mahe'), ('Indian/Maldives', 'Indian/Maldives'), ('Indian/Mauritius', 'Indian/Mauritius'), ('Indian/Mayotte', 'Indian/Mayotte'), ('Indian/Reunion', 'Indian/Reunion'), ('Iran', 'Iran'), ('Israel', 'Israel'), ('Jamaica', 'Jamaica'), ('Japan', 'Japan'), ('Kwajalein', 'Kwajalein'), ('Libya', 'Libya'), ('MET', 'MET'), ('MST', 'MST'), ('MST7MDT', 'MST7MDT'), ('Mexico/BajaNorte', 'Mexico/BajaNorte'), ('Mexico/BajaSur', 'Mexico/BajaSur'), ('Mexico/General', 'Mexico/General'), ('NZ', 'NZ'), ('NZ-CHAT', 'NZ-CHAT'), ('Navajo', 'Navajo'), ('PRC', 'PRC'), ('PST8PDT', 'PST8PDT'), ('Pacific/Apia', 'Pacific/Apia'), ('Pacific/Auckland', 'Pacific/Auckland'), ('Pacific/Bougainville', 'Pacific/Bougainville'), ('Pacific/Chatham', 'Pacific/Chatham'), ('Pacific/Chuuk', 'Pacific/Chuuk'), ('Pacific/Easter', 'Pacific/Easter'), ('Pacific/Efate', 'Pacific/Efate'), ('Pacific/Enderbury', 'Pacific/Enderbury'), ('Pacific/Fakaofo', 'Pacific/Fakaofo'), ('Pacific/Fiji', 'Pacific/Fiji'), ('Pacific/Funafuti', 'Pacific/Funafuti'), ('Pacific/Galapagos', 'Pacific/Galapagos'), ('Pacific/Gambier', 'Pacific/Gambier'), ('Pacific/Guadalcanal', 'Pacific/Guadalcanal'), ('Pacific/Guam', 'Pacific/Guam'), ('Pacific/Honolulu', 'Pacific/Honolulu'), ('Pacific/Johnston', 'Pacific/Johnston'), ('Pacific/Kiritimati', 'Pacific/Kiritimati'), ('Pacific/Kosrae', 'Pacific/Kosrae'), ('Pacific/Kwajalein', 'Pacific/Kwajalein'), ('Pacific/Majuro', 'Pacific/Majuro'), ('Pacific/Marquesas', 'Pacific/Marquesas'), ('Pacific/Midway', 'Pacific/Midway'), ('Pacific/Nauru', 'Pacific/Nauru'), ('Pacific/Niue', 'Pacific/Niue'), ('Pacific/Norfolk', 'Pacific/Norfolk'), ('Pacific/Noumea', 'Pacific/Noumea'), ('Pacific/Pago_Pago', 'Pacific/Pago_Pago'), ('Pacific/Palau', 'Pacific/Palau'), ('Pacific/Pitcairn', 'Pacific/Pitcairn'), ('Pacific/Pohnpei', 'Pacific/Pohnpei'), ('Pacific/Ponape', 'Pacific/Ponape'), ('Pacific/Port_Moresby', 'Pacific/Port_Moresby'), ('Pacific/Rarotonga', 'Pacific/Rarotonga'), ('Pacific/Saipan', 'Pacific/Saipan'), ('Pacific/Samoa', 'Pacific/Samoa'), ('Pacific/Tahiti', 'Pacific/Tahiti'), ('Pacific/Tarawa', 'Pacific/Tarawa'), ('Pacific/Tongatapu', 'Pacific/Tongatapu'), ('Pacific/Truk', 'Pacific/Truk'), ('Pacific/Wake', 'Pacific/Wake'), ('Pacific/Wallis', 'Pacific/Wallis'), ('Pacific/Yap', 'Pacific/Yap'), ('Poland', 'Poland'), ('Portugal', 'Portugal'), ('ROC', 'ROC'), ('ROK', 'ROK'), ('Singapore', 'Singapore'), ('Turkey', 'Turkey'), ('UCT', 'UCT'), ('US/Alaska', 'US/Alaska'), ('US/Aleutian', 'US/Aleutian'), ('US/Arizona', 'US/Arizona'), ('US/Central', 'US/Central'), ('US/East-Indiana', 'US/East-Indiana'), ('US/Eastern', 'US/Eastern'), ('US/Hawaii', 'US/Hawaii'), ('US/Indiana-Starke', 'US/Indiana-Starke'), ('US/Michigan', 'US/Michigan'), ('US/Mountain', 'US/Mountain'), ('US/Pacific', 'US/Pacific'), ('US/Samoa', 'US/Samoa'), ('UTC', 'UTC'), ('Universal', 'Universal'), ('W-SU', 'W-SU'), ('WET', 'WET'), ('Zulu', 'Zulu')], default='UTC', max_length=50),
),
migrations.AddField(
model_name='test',
name='version',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddIndex(
model_name='finding',
index=models.Index(fields=['cwe'], name='dojo_findin_cwe_a8da22_idx'),
),
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('order', models.PositiveIntegerField(default=1, help_text='The render order')),
('optional', models.BooleanField(default=False, help_text="If selected, user doesn't have to answer this question")),
('text', models.TextField(default='', help_text='The question text')),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_dojo.question_set+', to='contenttypes.contenttype')),
],
options={
'ordering': ['order'],
},
),
migrations.CreateModel(
name='TextQuestion',
fields=[
('question_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='dojo.question')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('dojo.question',),
),
migrations.CreateModel(
name='Choice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('order', models.PositiveIntegerField(default=1)),
('label', models.TextField(default='')),
],
options={
'ordering': ['order'],
},
),
migrations.CreateModel(
name='Answer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
),
migrations.CreateModel(
name='TextAnswer',
fields=[
('answer_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='dojo.answer')),
('answer', models.TextField(default='', help_text='The answer text')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('dojo.answer',),
),
migrations.CreateModel(
name='ChoiceAnswer',
fields=[
('answer_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='dojo.answer')),
('answer', models.ManyToManyField(help_text='The selected choices as the answer', to='dojo.Choice')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('dojo.answer',),
),
migrations.CreateModel(
name='ChoiceQuestion',
fields=[
('question_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='dojo.question')),
('multichoice', models.BooleanField(default=False, help_text='Select one or more')),
('choices', models.ManyToManyField(to='dojo.Choice')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('dojo.question',),
),
migrations.CreateModel(
name='Engagement_Survey',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(default='', max_length=200)),
('description', models.TextField(default='')),
('active', models.BooleanField(default=True)),
('questions', models.ManyToManyField(to='dojo.Question')),
],
options={
'verbose_name': 'Engagement Survey',
'verbose_name_plural': 'Engagement Surveys',
'ordering': ('-active', 'name'),
},
),
migrations.CreateModel(
name='Answered_Survey',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('completed', models.BooleanField(default=False)),
('answered_on', models.DateField(null=True)),
('engagement', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='engagement+', to='dojo.engagement')),
('responder', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='responder', to=settings.AUTH_USER_MODEL)),
('assignee', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='assignee', to=settings.AUTH_USER_MODEL)),
('survey', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.engagement_survey')),
],
options={
'verbose_name': 'Answered Engagement Survey',
'verbose_name_plural': 'Answered Engagement Surveys',
},
),
migrations.CreateModel(
name='General_Survey',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('num_responses', models.IntegerField(default=0)),
('generated', models.DateTimeField(auto_now_add=True, null=True)),
('expiration', models.DateTimeField()),
('survey', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.engagement_survey')),
],
options={
'verbose_name': 'General Engagement Survey',
'verbose_name_plural': 'General Engagement Surveys',
},
),
migrations.AddField(
model_name='answer',
name='answered_survey',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.answered_survey'),
),
migrations.AddField(
model_name='answer',
name='polymorphic_ctype',
field=models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_dojo.answer_set+', to='contenttypes.contenttype'),
),
migrations.AddField(
model_name='answer',
name='question',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.question'),
),
migrations.AddField(
model_name='risk_acceptance',
name='name',
field=models.CharField(default='Legacy acceptance', help_text='Descriptive name which in the future may also be used to group risk acceptances together across engagements and products', max_length=100),
),
migrations.AlterField(
model_name='risk_acceptance',
name='name',
field=models.CharField(help_text='Descriptive name which in the future may also be used to group risk acceptances together across engagements and products', max_length=100),
),
migrations.RenameField(
model_name='risk_acceptance',
old_name='reporter',
new_name='owner',
),
migrations.AlterField(
model_name='risk_acceptance',
name='owner',
field=models.ForeignKey(help_text='Only the owner and staff users can edit the risk acceptance.', on_delete=django.db.models.deletion.CASCADE, to='dojo.dojo_user'),
),
migrations.AlterField(
model_name='risk_acceptance',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='risk_acceptance',
name='updated',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='finding',
name='hash_code',
field=models.CharField(blank=True, editable=False, max_length=64, null=True),
),
migrations.AddIndex(
model_name='finding',
index=models.Index(fields=['hash_code'], name='dojo_findin_hash_co_09df6a_idx'),
),
migrations.AddIndex(
model_name='finding',
index=models.Index(fields=['unique_id_from_tool'], name='dojo_findin_unique__f76d47_idx'),
),
migrations.AddIndex(
model_name='finding',
index=models.Index(fields=['line'], name='dojo_findin_line_fea329_idx'),
),
migrations.AlterField(
model_name='product',
name='prod_type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='prod_type', to='dojo.product_type'),
),
migrations.AlterField(
model_name='system_settings',
name='slack_channel',
field=models.CharField(blank=True, default='', help_text='Optional. Needed if you want to send global notifications.', max_length=100),
),
migrations.AlterField(
model_name='system_settings',
name='slack_username',
field=models.CharField(blank=True, default='', help_text='Optional. Will take your bot name otherwise.', max_length=100),
),
migrations.RenameField(
model_name='endpoint',
old_name='remediated',
new_name='mitigated',
),
migrations.CreateModel(
name='Endpoint_Status',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateTimeField(default=dojo.models.get_current_date)),
('last_modified', models.DateTimeField(default=dojo.models.get_current_datetime, editable=False, null=True)),
('mitigated', models.BooleanField(blank=True, default=False)),
('mitigated_time', models.DateTimeField(blank=True, editable=False, null=True)),
('false_positive', models.BooleanField(blank=True, default=False)),
('out_of_scope', models.BooleanField(blank=True, default=False)),
('risk_accepted', models.BooleanField(blank=True, default=False)),
('endpoint', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='status_endpoint', to='dojo.endpoint')),
('finding', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='status_finding', to='dojo.finding')),
('mitigated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='endpoint',
name='endpoint_status',
field=models.ManyToManyField(blank=True, related_name='endpoint_endpoint_status', to='dojo.Endpoint_Status'),
),
migrations.AddField(
model_name='finding',
name='endpoint_status',
field=models.ManyToManyField(blank=True, related_name='finding_endpoint_status', to='dojo.Endpoint_Status'),
),
migrations.AlterField(
model_name='system_settings',
name='jira_minimum_severity',
field=models.CharField(blank=True, choices=[('Critical', 'Critical'), ('High', 'High'), ('Medium', 'Medium'), ('Low', 'Low'), ('Info', 'Info')], default='Low', max_length=20, null=True),
),
migrations.AddField(
model_name='jira_conf',
name='global_jira_sla_notification',
field=models.BooleanField(default=True, help_text='This setting can be overidden at the Product level', verbose_name='Globally send SLA notifications as comment?'),
),
migrations.AddField(
model_name='notifications',
name='sla_breach',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('hipchat', 'hipchat'), ('mail', 'mail'), ('alert', 'alert')], default='alert', help_text='Get notified of upcoming SLA breaches', max_length=24, verbose_name='SLA breach'),
),
migrations.AddField(
model_name='jira_pkey',
name='product_jira_sla_notification',
field=models.BooleanField(blank=True, default=False, verbose_name='Send SLA notifications as comment?'),
),
migrations.AlterField(
model_name='engagement',
name='deduplication_on_engagement',
field=models.BooleanField(default=False, help_text='If enabled deduplication will only mark a finding in this engagement as duplicate of another finding if both findings are in this engagement. If disabled, deduplication is on the product level.', verbose_name='Deduplication within this engagement only'),
),
migrations.AlterField(
model_name='regulation',
name='category',
field=models.CharField(choices=[('privacy', 'Privacy'), ('finance', 'Finance'), ('education', 'Education'), ('medical', 'Medical'), ('corporate', 'Corporate'), ('other', 'Other')], help_text='The subject of the regulation.', max_length=9),
),
migrations.AlterField(
model_name='regulation',
name='name',
field=models.CharField(help_text='The name of the regulation.', max_length=128, unique=True),
),
migrations.AddField(
model_name='finding',
name='cvssv3',
field=models.TextField(max_length=117, null=True, validators=[django.core.validators.RegexValidator(message="CVSS must be entered in format: 'AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H'", regex='^AV:[NALP]|AC:[LH]|PR:[UNLH]|UI:[NR]|S:[UC]|[CIA]:[NLH]')]),
),
migrations.AddField(
model_name='finding_template',
name='cvssv3',
field=models.TextField(max_length=117, null=True, validators=[django.core.validators.RegexValidator(message="CVSS must be entered in format: 'AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H'", regex='^AV:[NALP]|AC:[LH]|PR:[UNLH]|UI:[NR]|S:[UC]|[CIA]:[NLH]')]),
),
migrations.AlterField(
model_name='child_rule',
name='match_field',
field=models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('cwe', 'cwe'), ('cve', 'cve'), ('cvssv3', 'cvssv3'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('is_template', 'is_template'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_Mitigated', 'is_Mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('line_number', 'line_number'), ('sourcefilepath', 'sourcefilepath'), ('sourcefile', 'sourcefile'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('component_name', 'component_name'), ('component_version', 'component_version'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('jira_creation', 'jira_creation'), ('jira_change', 'jira_change'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue'), ('unique_id_from_tool', 'unique_id_from_tool'), ('sast_source_object', 'sast_source_object'), ('sast_sink_object', 'sast_sink_object'), ('sast_source_line', 'sast_source_line'), ('sast_source_file_path', 'sast_source_file_path'), ('nb_occurences', 'nb_occurences')], max_length=200),
),
migrations.AlterField(
model_name='rule',
name='applied_field',
field=models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('cwe', 'cwe'), ('cve', 'cve'), ('cvssv3', 'cvssv3'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('is_template', 'is_template'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_Mitigated', 'is_Mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('line_number', 'line_number'), ('sourcefilepath', 'sourcefilepath'), ('sourcefile', 'sourcefile'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('component_name', 'component_name'), ('component_version', 'component_version'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('jira_creation', 'jira_creation'), ('jira_change', 'jira_change'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue'), ('unique_id_from_tool', 'unique_id_from_tool'), ('sast_source_object', 'sast_source_object'), ('sast_sink_object', 'sast_sink_object'), ('sast_source_line', 'sast_source_line'), ('sast_source_file_path', 'sast_source_file_path'), ('nb_occurences', 'nb_occurences')], max_length=200),
),
migrations.AlterField(
model_name='rule',
name='match_field',
field=models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('cwe', 'cwe'), ('cve', 'cve'), ('cvssv3', 'cvssv3'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('is_template', 'is_template'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_Mitigated', 'is_Mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('line_number', 'line_number'), ('sourcefilepath', 'sourcefilepath'), ('sourcefile', 'sourcefile'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('component_name', 'component_name'), ('component_version', 'component_version'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('jira_creation', 'jira_creation'), ('jira_change', 'jira_change'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue'), ('unique_id_from_tool', 'unique_id_from_tool'), ('sast_source_object', 'sast_source_object'), ('sast_sink_object', 'sast_sink_object'), ('sast_source_line', 'sast_source_line'), ('sast_source_file_path', 'sast_source_file_path'), ('nb_occurences', 'nb_occurences')], max_length=200),
),
migrations.AddField(
model_name='engagement',
name='notes',
field=models.ManyToManyField(blank=True, editable=False, to='dojo.Notes'),
),
migrations.AddField(
model_name='dojometa',
name='finding',
field=models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='finding_meta', to='dojo.finding'),
),
migrations.AlterUniqueTogether(
name='dojometa',
unique_together={('product', 'name'), ('finding', 'name'), ('endpoint', 'name')},
),
migrations.AlterField(
model_name='notifications',
name='jira_update',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('hipchat', 'hipchat'), ('mail', 'mail'), ('alert', 'alert')], default='alert', help_text='JIRA sync happens in the background, errors will be shown as notifications/alerts so make sure to subscribe', max_length=24, verbose_name='JIRA problems'),
),
migrations.AddIndex(
model_name='finding',
index=models.Index(fields=['component_name'], name='dojo_findin_compone_920323_idx'),
),
migrations.RemoveField(
model_name='system_settings',
name='enable_hipchat_notifications',
),
migrations.RemoveField(
model_name='system_settings',
name='hipchat_channel',
),
migrations.RemoveField(
model_name='system_settings',
name='hipchat_site',
),
migrations.RemoveField(
model_name='system_settings',
name='hipchat_token',
),
migrations.RemoveField(
model_name='usercontactinfo',
name='hipchat_username',
),
migrations.AddField(
model_name='system_settings',
name='enable_msteams_notifications',
field=models.BooleanField(default=False, verbose_name='Enable Microsoft Teams notifications'),
),
migrations.AddField(
model_name='system_settings',
name='msteams_url',
field=models.CharField(blank=True, default='', help_text='The full URL of the incoming webhook', max_length=400),
),
migrations.AlterField(
model_name='notifications',
name='auto_close_engagement',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default='alert', max_length=24),
),
migrations.AlterField(
model_name='notifications',
name='code_review',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default='alert', max_length=24),
),
migrations.AlterField(
model_name='notifications',
name='engagement_added',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default='alert', max_length=24),
),
migrations.AlterField(
model_name='notifications',
name='jira_update',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default='alert', help_text='JIRA sync happens in the background, errors will be shown as notifications/alerts so make sure to subscribe', max_length=24, verbose_name='JIRA problems'),
),
migrations.AlterField(
model_name='notifications',
name='other',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default='alert', max_length=24),
),
migrations.AlterField(
model_name='notifications',
name='jira_update',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default='alert', help_text='JIRA sync happens in the background, errors will be shown as notifications/alerts so make sure to subscribe', max_length=24, verbose_name='JIRA problems'),
),
migrations.AlterField(
model_name='notifications',
name='product_added',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default='alert', max_length=24),
),
migrations.AlterField(
model_name='notifications',
name='report_created',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default='alert', max_length=24),
),
migrations.AlterField(
model_name='notifications',
name='review_requested',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default='alert', max_length=24),
),
migrations.AlterField(
model_name='notifications',
name='scan_added',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default='alert', help_text='Triggered whenever an (re-)import has been done that created/updated/closed findings.', max_length=24),
),
migrations.AlterField(
model_name='notifications',
name='sla_breach',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default='alert', help_text='Get notified of upcoming SLA breaches', max_length=24, verbose_name='SLA breach'),
),
migrations.AlterField(
model_name='notifications',
name='stale_engagement',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default='alert', max_length=24),
),
migrations.AlterField(
model_name='notifications',
name='test_added',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default='alert', max_length=24),
),
migrations.AlterField(
model_name='notifications',
name='upcoming_engagement',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default='alert', max_length=24),
),
migrations.AlterField(
model_name='notifications',
name='user_mentioned',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default='alert', max_length=24),
),
migrations.AlterField(
model_name='finding',
name='active',
field=models.BooleanField(default=True, help_text='Denotes if this flaw is active or not.', verbose_name='Active'),
),
migrations.AlterField(
model_name='finding',
name='component_name',
field=models.CharField(blank=True, help_text='Name of the affected component (library name, part of a system, ...).', max_length=200, null=True, verbose_name='Component name'),
),
migrations.AlterField(
model_name='finding',
name='component_version',
field=models.CharField(blank=True, help_text='Version of the affected component.', max_length=100, null=True, verbose_name='Component version'),
),
migrations.AlterField(
model_name='finding',
name='created',
field=models.DateTimeField(auto_now_add=True, help_text='The date the finding was created inside DefectDojo.', null=True, verbose_name='Created'),
),
migrations.AlterField(
model_name='finding',
name='cve',
field=models.CharField(help_text='The Common Vulnerabilities and Exposures (CVE) associated with this flaw.', max_length=28, null=True, validators=[django.core.validators.RegexValidator(message="Vulnerability ID must be entered in the format: 'ABC-9999-9999'.", regex='^[A-Z]{1,10}(-\\d+)+$')], verbose_name='CVE'),
),
migrations.AlterField(
model_name='finding',
name='cvssv3',
field=models.TextField(help_text='Common Vulnerability Scoring System version 3 (CVSSv3) score associated with this flaw.', max_length=117, null=True, validators=[django.core.validators.RegexValidator(message="CVSS must be entered in format: 'AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H'", regex='^AV:[NALP]|AC:[LH]|PR:[UNLH]|UI:[NR]|S:[UC]|[CIA]:[NLH]')], verbose_name='CVSSv3'),
),
migrations.AlterField(
model_name='finding',
name='cwe',
field=models.IntegerField(blank=True, default=0, help_text='The CWE number associated with this flaw.', null=True, verbose_name='CWE'),
),
migrations.AlterField(
model_name='finding',
name='date',
field=models.DateField(default=dojo.models.get_current_date, help_text='The date the flaw was discovered.', verbose_name='Date'),
),
migrations.AlterField(
model_name='finding',
name='defect_review_requested_by',
field=models.ForeignKey(blank=True, help_text='Documents who requested a defect review for this flaw.', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='defect_review_requested_by', to='dojo.dojo_user', verbose_name='Defect Review Requested By'),
),
migrations.AlterField(
model_name='finding',
name='description',
field=models.TextField(help_text='Longer more descriptive information about the flaw.', verbose_name='Description'),
),
migrations.AlterField(
model_name='finding',
name='duplicate',
field=models.BooleanField(default=False, help_text='Denotes if this flaw is a duplicate of other flaws reported.', verbose_name='Duplicate'),
),
migrations.AlterField(
model_name='finding',
name='duplicate_finding',
field=models.ForeignKey(blank=True, editable=False, help_text='Link to the original finding if this finding is a duplicate.', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='original_finding', to='dojo.finding', verbose_name='Duplicate Finding'),
),
migrations.AlterField(
model_name='finding',
name='dynamic_finding',
field=models.BooleanField(default=True, help_text='Flaw has been detected from a Dynamic Application Security Testing tool (DAST).', verbose_name='Dynamic finding (DAST)'),
),
migrations.AlterField(
model_name='finding',
name='endpoint_status',
field=models.ManyToManyField(blank=True, help_text='The status of the endpoint associated with this flaw (Vulnerable, Mitigated, ...).', related_name='finding_endpoint_status', to='dojo.Endpoint_Status', verbose_name='Endpoint Status'),
),
migrations.AlterField(
model_name='finding',
name='endpoints',
field=models.ManyToManyField(blank=True, help_text='The hosts within the product that are susceptible to this flaw.', to='dojo.Endpoint', verbose_name='Endpoints'),
),
migrations.AlterField(
model_name='finding',
name='false_p',
field=models.BooleanField(default=False, help_text='Denotes if this flaw has been deemed a false positive by the tester.', verbose_name='False Positive'),
),
migrations.AlterField(
model_name='finding',
name='file_path',
field=models.CharField(blank=True, help_text='Identified file(s) containing the flaw.', max_length=4000, null=True, verbose_name='File path'),
),
migrations.AlterField(
model_name='finding',
name='found_by',
field=models.ManyToManyField(editable=False, help_text='The name of the scanner that identified the flaw.', to='dojo.Test_Type', verbose_name='Found by'),
),
migrations.AlterField(
model_name='finding',
name='hash_code',
field=models.CharField(blank=True, editable=False, help_text='A hash over a configurable set of fields that is used for findings deduplication.', max_length=64, null=True, verbose_name='Hash Code'),
),
migrations.AlterField(
model_name='finding',
name='images',
field=models.ManyToManyField(blank=True, help_text='Image(s) / Screenshot(s) related to the flaw.', to='dojo.FindingImage', verbose_name='Images'),
),
migrations.AlterField(
model_name='finding',
name='impact',
field=models.TextField(help_text='Text describing the impact this flaw has on systems, products, enterprise, etc.', verbose_name='Impact'),
),
migrations.AlterField(
model_name='finding',
name='is_Mitigated',
field=models.BooleanField(default=False, help_text='Denotes if this flaw has been fixed.', verbose_name='Is Mitigated'),
),
migrations.AlterField(
model_name='finding',
name='is_template',
field=models.BooleanField(default=False, help_text='Denotes if this finding is a template and can be reused.', verbose_name='Is Template'),
),
migrations.AlterField(
model_name='finding',
name='jira_change',
field=models.DateTimeField(help_text='The date the linked Jira issue was last modified.', null=True, verbose_name='Jira change'),
),
migrations.AlterField(
model_name='finding',
name='jira_creation',
field=models.DateTimeField(help_text='The date a Jira issue was created from this finding.', null=True, verbose_name='Jira creation'),
),
migrations.AlterField(
model_name='finding',
name='last_reviewed',
field=models.DateTimeField(editable=False, help_text="Provides the date the flaw was last 'touched' by a tester.", null=True, verbose_name='Last Reviewed'),
),
migrations.AlterField(
model_name='finding',
name='last_reviewed_by',
field=models.ForeignKey(editable=False, help_text='Provides the person who last reviewed the flaw.', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='last_reviewed_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Reviewed By'),
),
migrations.AlterField(
model_name='finding',
name='line',
field=models.IntegerField(blank=True, help_text='Source line number of the attack vector.', null=True, verbose_name='Line number'),
),
migrations.AlterField(
model_name='finding',
name='line_number',
field=models.CharField(blank=True, editable=False, help_text='Deprecated will be removed, use line', max_length=200, null=True, verbose_name='Line Number'),
),
migrations.AlterField(
model_name='finding',
name='mitigated',
field=models.DateTimeField(blank=True, editable=False, help_text='Denotes if this flaw has been fixed by storing the date it was fixed.', null=True, verbose_name='Mitigated'),
),
migrations.AlterField(
model_name='finding',
name='mitigated_by',
field=models.ForeignKey(editable=False, help_text='Documents who has marked this flaw as fixed.', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='mitigated_by', to=settings.AUTH_USER_MODEL, verbose_name='Mitigated By'),
),
migrations.AlterField(
model_name='finding',
name='mitigation',
field=models.TextField(help_text='Text describing how to best fix the flaw.', verbose_name='Mitigation'),
),
migrations.AlterField(
model_name='finding',
name='nb_occurences',
field=models.IntegerField(blank=True, help_text='Number of occurences in the source tool when several vulnerabilites were found and aggregated by the scanner.', null=True, verbose_name='Number of occurences'),
),
migrations.AlterField(
model_name='finding',
name='notes',
field=models.ManyToManyField(blank=True, editable=False, help_text='Stores information pertinent to the flaw or the mitigation.', to='dojo.Notes', verbose_name='Notes'),
),
migrations.AlterField(
model_name='finding',
name='numerical_severity',
field=models.CharField(help_text='The numerical representation of the severity (S0, S1, S2, S3, S4).', max_length=4, verbose_name='Numerical Severity'),
),
migrations.AlterField(
model_name='finding',
name='out_of_scope',
field=models.BooleanField(default=False, help_text='Denotes if this flaw falls outside the scope of the test and/or engagement.', verbose_name='Out Of Scope'),
),
migrations.AlterField(
model_name='finding',
name='param',
field=models.TextField(blank=True, editable=False, help_text='Parameter used to trigger the issue (DAST).', null=True, verbose_name='Parameter'),
),
migrations.AlterField(
model_name='finding',
name='payload',
field=models.TextField(blank=True, editable=False, help_text='Payload used to attack the service / application and trigger the bug / problem.', null=True, verbose_name='Payload'),
),
migrations.AlterField(
model_name='finding',
name='references',
field=models.TextField(blank=True, db_column='refs', help_text='The external documentation available for this flaw.', null=True, verbose_name='References'),
),
migrations.AlterField(
model_name='finding',
name='reporter',
field=models.ForeignKey(default=1, editable=False, help_text='Documents who reported the flaw.', on_delete=django.db.models.deletion.CASCADE, related_name='reporter', to=settings.AUTH_USER_MODEL, verbose_name='Reporter'),
),
migrations.AlterField(
model_name='finding',
name='review_requested_by',
field=models.ForeignKey(blank=True, help_text='Documents who requested a review for this finding.', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='review_requested_by', to='dojo.dojo_user', verbose_name='Review Requested By'),
),
migrations.AlterField(
model_name='finding',
name='reviewers',
field=models.ManyToManyField(blank=True, help_text='Documents who reviewed the flaw.', to=settings.AUTH_USER_MODEL, verbose_name='Reviewers'),
),
migrations.AlterField(
model_name='finding',
name='sast_sink_object',
field=models.CharField(blank=True, help_text='Sink object (variable, function...) of the attack vector.', max_length=500, null=True, verbose_name='SAST Sink Object'),
),
migrations.AlterField(
model_name='finding',
name='sast_source_file_path',
field=models.CharField(blank=True, help_text='Source file path of the attack vector.', max_length=4000, null=True, verbose_name='SAST Source File Path'),
),
migrations.AlterField(
model_name='finding',
name='sast_source_line',
field=models.IntegerField(blank=True, help_text='Source line number of the attack vector.', null=True, verbose_name='SAST Source Line number'),
),
migrations.AlterField(
model_name='finding',
name='sast_source_object',
field=models.CharField(blank=True, help_text='Source object (variable, function...) of the attack vector.', max_length=500, null=True, verbose_name='SAST Source Object'),
),
migrations.AlterField(
model_name='finding',
name='scanner_confidence',
field=models.IntegerField(blank=True, default=None, editable=False, help_text='Confidence level of vulnerability which is supplied by the scanner.', null=True, verbose_name='Scanner confidence'),
),
migrations.AlterField(
model_name='finding',
name='severity',
field=models.CharField(help_text='The severity level of this flaw (Critical, High, Medium, Low, Informational).', max_length=200, verbose_name='Severity'),
),
migrations.AlterField(
model_name='finding',
name='severity_justification',
field=models.TextField(blank=True, help_text='Text describing why a certain severity was associated with this flaw.', null=True, verbose_name='Severity Justification'),
),
migrations.AlterField(
model_name='finding',
name='sonarqube_issue',
field=models.ForeignKey(blank=True, help_text='The SonarQube issue associated with this finding.', null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.sonarqube_issue', verbose_name='SonarQube issue'),
),
migrations.AlterField(
model_name='finding',
name='sourcefile',
field=models.TextField(blank=True, editable=False, help_text='Name of the source code file in which the flaw is located.', null=True, verbose_name='Source File'),
),
migrations.AlterField(
model_name='finding',
name='sourcefilepath',
field=models.TextField(blank=True, editable=False, help_text='Filepath of the source code file in which the flaw is located.', null=True, verbose_name='Source File Path'),
),
migrations.AlterField(
model_name='finding',
name='static_finding',
field=models.BooleanField(default=False, help_text='Flaw has been detected from a Static Application Security Testing tool (SAST).', verbose_name='Static finding (SAST)'),
),
migrations.AlterField(
model_name='finding',
name='steps_to_reproduce',
field=models.TextField(blank=True, help_text='Text describing the steps that must be followed in order to reproduce the flaw / bug.', null=True, verbose_name='Steps to Reproduce'),
),
migrations.AlterField(
model_name='finding',
name='test',
field=models.ForeignKey(editable=False, help_text='The test that is associated with this flaw.', on_delete=django.db.models.deletion.CASCADE, to='dojo.test', verbose_name='Test'),
),
migrations.AlterField(
model_name='finding',
name='thread_id',
field=models.IntegerField(default=0, editable=False, verbose_name='Thread ID'),
),
migrations.AlterField(
model_name='finding',
name='title',
field=models.CharField(help_text='A short description of the flaw.', max_length=511, verbose_name='Title'),
),
migrations.AlterField(
model_name='finding',
name='under_defect_review',
field=models.BooleanField(default=False, help_text='Denotes if this finding is under defect review.', verbose_name='Under Defect Review'),
),
migrations.AlterField(
model_name='finding',
name='under_review',
field=models.BooleanField(default=False, help_text='Denotes is this flaw is currently being reviewed.', verbose_name='Under Review'),
),
migrations.AlterField(
model_name='finding',
name='unique_id_from_tool',
field=models.CharField(blank=True, help_text='Vulnerability technical id from the source tool. Allows to track unique vulnerabilities.', max_length=500, null=True, verbose_name='Unique ID from tool'),
),
migrations.AlterField(
model_name='finding',
name='url',
field=models.TextField(blank=True, editable=False, help_text='External reference that provides more information about this flaw.', null=True, verbose_name='URL'),
),
migrations.AlterField(
model_name='finding',
name='verified',
field=models.BooleanField(default=True, help_text='Denotes if this flaw has been manually verified by the tester.', verbose_name='Verified'),
),
migrations.AddField(
model_name='product_type',
name='authorized_users',
field=models.ManyToManyField(blank=True, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='system_settings',
name='false_positive_history',
field=models.BooleanField(default=False, help_text='DefectDojo will automatically mark the finding as a false positive if the finding has been previously marked as a false positive. Not needed when using deduplication, advised to not combine these two.'),
),
migrations.AddIndex(
model_name='finding',
index=models.Index(fields=['duplicate'], name='dojo_findin_duplica_f60788_idx'),
),
migrations.AddIndex(
model_name='finding',
index=models.Index(fields=['is_Mitigated'], name='dojo_findin_is_Miti_aaa533_idx'),
),
migrations.AddField(
model_name='system_settings',
name='disable_jira_webhook_secret',
field=models.BooleanField(default=False, help_text='Allows incoming requests without a secret (discouraged legacy behaviour)', verbose_name='Disable web hook secret'),
),
migrations.AddField(
model_name='system_settings',
name='jira_webhook_secret',
field=models.CharField(help_text='Secret needed in URL for incoming JIRA Webhook', max_length=64, null=True, verbose_name='JIRA Webhook URL'),
),
migrations.AlterField(
model_name='system_settings',
name='enable_jira_web_hook',
field=models.BooleanField(default=False, help_text='Please note: It is strongly recommended to use a secret below and / or IP whitelist the JIRA server using a proxy such as Nginx.', verbose_name='Enable JIRA web hook'),
),
migrations.AddField(
model_name='finding',
name='vuln_id_from_tool',
field=models.CharField(blank=True, help_text='Non-unique technical id from the source tool associated with the vulnerability type.', max_length=500, null=True, verbose_name='Vulnerability ID from tool'),
),
migrations.AlterField(
model_name='child_rule',
name='match_field',
field=models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('cwe', 'cwe'), ('cve', 'cve'), ('cvssv3', 'cvssv3'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('is_template', 'is_template'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_Mitigated', 'is_Mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('line_number', 'line_number'), ('sourcefilepath', 'sourcefilepath'), ('sourcefile', 'sourcefile'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('component_name', 'component_name'), ('component_version', 'component_version'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('jira_creation', 'jira_creation'), ('jira_change', 'jira_change'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue'), ('unique_id_from_tool', 'unique_id_from_tool'), ('vuln_id_from_tool', 'vuln_id_from_tool'), ('sast_source_object', 'sast_source_object'), ('sast_sink_object', 'sast_sink_object'), ('sast_source_line', 'sast_source_line'), ('sast_source_file_path', 'sast_source_file_path'), ('nb_occurences', 'nb_occurences')], max_length=200),
),
migrations.AlterField(
model_name='rule',
name='applied_field',
field=models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('cwe', 'cwe'), ('cve', 'cve'), ('cvssv3', 'cvssv3'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('is_template', 'is_template'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_Mitigated', 'is_Mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('line_number', 'line_number'), ('sourcefilepath', 'sourcefilepath'), ('sourcefile', 'sourcefile'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('component_name', 'component_name'), ('component_version', 'component_version'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('jira_creation', 'jira_creation'), ('jira_change', 'jira_change'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue'), ('unique_id_from_tool', 'unique_id_from_tool'), ('vuln_id_from_tool', 'vuln_id_from_tool'), ('sast_source_object', 'sast_source_object'), ('sast_sink_object', 'sast_sink_object'), ('sast_source_line', 'sast_source_line'), ('sast_source_file_path', 'sast_source_file_path'), ('nb_occurences', 'nb_occurences')], max_length=200),
),
migrations.AlterField(
model_name='rule',
name='match_field',
field=models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('cwe', 'cwe'), ('cve', 'cve'), ('cvssv3', 'cvssv3'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('is_template', 'is_template'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_Mitigated', 'is_Mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('line_number', 'line_number'), ('sourcefilepath', 'sourcefilepath'), ('sourcefile', 'sourcefile'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('component_name', 'component_name'), ('component_version', 'component_version'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('jira_creation', 'jira_creation'), ('jira_change', 'jira_change'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue'), ('unique_id_from_tool', 'unique_id_from_tool'), ('vuln_id_from_tool', 'vuln_id_from_tool'), ('sast_source_object', 'sast_source_object'), ('sast_sink_object', 'sast_sink_object'), ('sast_source_line', 'sast_source_line'), ('sast_source_file_path', 'sast_source_file_path'), ('nb_occurences', 'nb_occurences')], max_length=200),
),
migrations.DeleteModel(
name='JIRA_Clone',
),
migrations.DeleteModel(
name='JIRA_Details_Cache',
),
migrations.RenameModel(
old_name='JIRA_PKey',
new_name='JIRA_Project',
),
migrations.AddField(
model_name='jira_issue',
name='jira_change',
field=models.DateTimeField(help_text='The date the linked Jira issue was last modified.', null=True, verbose_name='Jira last update'),
),
migrations.AddField(
model_name='jira_issue',
name='jira_creation',
field=models.DateTimeField(help_text='The date a Jira issue was created from this finding.', null=True, verbose_name='Jira creation'),
),
migrations.RenameModel(
old_name='JIRA_Conf',
new_name='JIRA_Instance',
),
migrations.RenameField(
model_name='jira_project',
old_name='conf',
new_name='jira_instance',
),
migrations.AddField(
model_name='jira_issue',
name='jira_project',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='dojo.jira_project'),
),
migrations.AddField(
model_name='JIRA_Project',
name='engagement',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.engagement'),
),
migrations.AlterField(
model_name='JIRA_Project',
name='product',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.product'),
),
migrations.AlterField(
model_name='jira_project',
name='jira_instance',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.jira_instance', verbose_name='JIRA Instance'),
),
migrations.RemoveField(
model_name='finding',
name='jira_change',
),
migrations.RemoveField(
model_name='finding',
name='jira_creation',
),
migrations.AlterField(
model_name='child_rule',
name='match_field',
field=models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('cwe', 'cwe'), ('cve', 'cve'), ('cvssv3', 'cvssv3'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('is_template', 'is_template'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_Mitigated', 'is_Mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('line_number', 'line_number'), ('sourcefilepath', 'sourcefilepath'), ('sourcefile', 'sourcefile'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('component_name', 'component_name'), ('component_version', 'component_version'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue'), ('unique_id_from_tool', 'unique_id_from_tool'), ('vuln_id_from_tool', 'vuln_id_from_tool'), ('sast_source_object', 'sast_source_object'), ('sast_sink_object', 'sast_sink_object'), ('sast_source_line', 'sast_source_line'), ('sast_source_file_path', 'sast_source_file_path'), ('nb_occurences', 'nb_occurences')], max_length=200),
),
migrations.AlterField(
model_name='rule',
name='applied_field',
field=models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('cwe', 'cwe'), ('cve', 'cve'), ('cvssv3', 'cvssv3'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('is_template', 'is_template'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_Mitigated', 'is_Mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('line_number', 'line_number'), ('sourcefilepath', 'sourcefilepath'), ('sourcefile', 'sourcefile'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('component_name', 'component_name'), ('component_version', 'component_version'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue'), ('unique_id_from_tool', 'unique_id_from_tool'), ('vuln_id_from_tool', 'vuln_id_from_tool'), ('sast_source_object', 'sast_source_object'), ('sast_sink_object', 'sast_sink_object'), ('sast_source_line', 'sast_source_line'), ('sast_source_file_path', 'sast_source_file_path'), ('nb_occurences', 'nb_occurences')], max_length=200),
),
migrations.AlterField(
model_name='rule',
name='match_field',
field=models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('cwe', 'cwe'), ('cve', 'cve'), ('cvssv3', 'cvssv3'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('is_template', 'is_template'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_Mitigated', 'is_Mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('line_number', 'line_number'), ('sourcefilepath', 'sourcefilepath'), ('sourcefile', 'sourcefile'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('component_name', 'component_name'), ('component_version', 'component_version'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue'), ('unique_id_from_tool', 'unique_id_from_tool'), ('vuln_id_from_tool', 'vuln_id_from_tool'), ('sast_source_object', 'sast_source_object'), ('sast_sink_object', 'sast_sink_object'), ('sast_source_line', 'sast_source_line'), ('sast_source_file_path', 'sast_source_file_path'), ('nb_occurences', 'nb_occurences')], max_length=200),
),
migrations.AddField(
model_name='app_analysis',
name='tags_from_django_tagging',
field=models.TextField(blank=True, editable=False, help_text='Temporary archive with tags from the previous tagging library we used'),
),
migrations.AddField(
model_name='endpoint',
name='tags_from_django_tagging',
field=models.TextField(blank=True, editable=False, help_text='Temporary archive with tags from the previous tagging library we used'),
),
migrations.AddField(
model_name='engagement',
name='tags_from_django_tagging',
field=models.TextField(blank=True, editable=False, help_text='Temporary archive with tags from the previous tagging library we used'),
),
migrations.AddField(
model_name='finding',
name='tags_from_django_tagging',
field=models.TextField(blank=True, editable=False, help_text='Temporary archive with tags from the previous tagging library we used'),
),
migrations.AddField(
model_name='objects',
name='tags_from_django_tagging',
field=models.TextField(blank=True, editable=False, help_text='Temporary archive with tags from the previous tagging library we used'),
),
migrations.AddField(
model_name='test',
name='tags_from_django_tagging',
field=models.TextField(blank=True, editable=False, help_text='Temporary archive with tags from the previous tagging library we used'),
),
migrations.AddField(
model_name='product',
name='tags_from_django_tagging',
field=models.TextField(blank=True, editable=False, help_text='Temporary archive with tags from the previous tagging library we used'),
),
migrations.AddField(
model_name='finding_template',
name='tags_from_django_tagging',
field=models.TextField(blank=True, editable=False, help_text='Temporary archive with tags from the previous tagging library we used'),
),
migrations.RenameModel(
old_name='Objects',
new_name='Objects_Product',
),
migrations.CreateModel(
name='Tagulous_Test_tags',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, unique=True)),
('slug', models.SlugField()),
('count', models.IntegerField(default=0, help_text='Internal counter of how many times this tag is in use')),
('protected', models.BooleanField(default=False, help_text='Will not be deleted when the count reaches 0')),
],
options={
'ordering': ('name',),
'abstract': False,
'unique_together': {('slug',)},
},
bases=(tagulous.models.models.BaseTagModel, models.Model),
),
migrations.CreateModel(
name='Tagulous_Product_tags',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, unique=True)),
('slug', models.SlugField()),
('count', models.IntegerField(default=0, help_text='Internal counter of how many times this tag is in use')),
('protected', models.BooleanField(default=False, help_text='Will not be deleted when the count reaches 0')),
],
options={
'ordering': ('name',),
'abstract': False,
'unique_together': {('slug',)},
},
bases=(tagulous.models.models.BaseTagModel, models.Model),
),
migrations.CreateModel(
name='Tagulous_Finding_Template_tags',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, unique=True)),
('slug', models.SlugField()),
('count', models.IntegerField(default=0, help_text='Internal counter of how many times this tag is in use')),
('protected', models.BooleanField(default=False, help_text='Will not be deleted when the count reaches 0')),
],
options={
'ordering': ('name',),
'abstract': False,
'unique_together': {('slug',)},
},
bases=(tagulous.models.models.BaseTagModel, models.Model),
),
migrations.CreateModel(
name='Tagulous_Finding_tags',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, unique=True)),
('slug', models.SlugField()),
('count', models.IntegerField(default=0, help_text='Internal counter of how many times this tag is in use')),
('protected', models.BooleanField(default=False, help_text='Will not be deleted when the count reaches 0')),
],
options={
'ordering': ('name',),
'abstract': False,
'unique_together': {('slug',)},
},
bases=(tagulous.models.models.BaseTagModel, models.Model),
),
migrations.CreateModel(
name='Tagulous_Engagement_tags',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, unique=True)),
('slug', models.SlugField()),
('count', models.IntegerField(default=0, help_text='Internal counter of how many times this tag is in use')),
('protected', models.BooleanField(default=False, help_text='Will not be deleted when the count reaches 0')),
],
options={
'ordering': ('name',),
'abstract': False,
'unique_together': {('slug',)},
},
bases=(tagulous.models.models.BaseTagModel, models.Model),
),
migrations.CreateModel(
name='Tagulous_Endpoint_tags',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, unique=True)),
('slug', models.SlugField()),
('count', models.IntegerField(default=0, help_text='Internal counter of how many times this tag is in use')),
('protected', models.BooleanField(default=False, help_text='Will not be deleted when the count reaches 0')),
],
options={
'ordering': ('name',),
'abstract': False,
'unique_together': {('slug',)},
},
bases=(tagulous.models.models.BaseTagModel, models.Model),
),
migrations.CreateModel(
name='Tagulous_App_Analysis_tags',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, unique=True)),
('slug', models.SlugField()),
('count', models.IntegerField(default=0, help_text='Internal counter of how many times this tag is in use')),
('protected', models.BooleanField(default=False, help_text='Will not be deleted when the count reaches 0')),
],
options={
'ordering': ('name',),
'abstract': False,
'unique_together': {('slug',)},
},
bases=(tagulous.models.models.BaseTagModel, models.Model),
),
migrations.CreateModel(
name='Tagulous_Objects_Product_tags',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, unique=True)),
('slug', models.SlugField()),
('count', models.IntegerField(default=0, help_text='Internal counter of how many times this tag is in use')),
('protected', models.BooleanField(default=False, help_text='Will not be deleted when the count reaches 0')),
],
options={
'ordering': ('name',),
'abstract': False,
'unique_together': {('slug',)},
},
bases=(tagulous.models.models.BaseTagModel, models.Model),
),
migrations.AddField(
model_name='app_analysis',
name='tags',
field=tagulous.models.fields.TagField(_set_tag_meta=True, blank=True, force_lowercase=True, help_text='Enter a comma-separated tag string', to='dojo.Tagulous_App_Analysis_tags'),
),
migrations.AddField(
model_name='endpoint',
name='tags',
field=tagulous.models.fields.TagField(_set_tag_meta=True, blank=True, force_lowercase=True, help_text='Add tags that help describe this endpoint. Choose from the list or add new tags. Press Enter key to add.', to='dojo.Tagulous_Endpoint_tags'),
),
migrations.AddField(
model_name='engagement',
name='tags',
field=tagulous.models.fields.TagField(_set_tag_meta=True, blank=True, force_lowercase=True, help_text='Add tags that help describe this engagement. Choose from the list or add new tags. Press Enter key to add.', to='dojo.Tagulous_Engagement_tags'),
),
migrations.AddField(
model_name='finding',
name='tags',
field=tagulous.models.fields.TagField(_set_tag_meta=True, blank=True, force_lowercase=True, help_text='Add tags that help describe this finding. Choose from the list or add new tags. Press Enter key to add.', to='dojo.Tagulous_Finding_tags'),
),
migrations.AddField(
model_name='finding_template',
name='tags',
field=tagulous.models.fields.TagField(_set_tag_meta=True, blank=True, force_lowercase=True, help_text='Add tags that help describe this finding template. Choose from the list or add new tags. Press Enter key to add.', to='dojo.Tagulous_Finding_Template_tags'),
),
migrations.AddField(
model_name='objects_product',
name='tags',
field=tagulous.models.fields.TagField(_set_tag_meta=True, blank=True, force_lowercase=True, help_text='Add tags that help describe this object. Choose from the list or add new tags. Press Enter key to add.', to='dojo.Tagulous_Objects_Product_tags'),
),
migrations.AddField(
model_name='product',
name='tags',
field=tagulous.models.fields.TagField(_set_tag_meta=True, blank=True, force_lowercase=True, help_text='Add tags that help describe this product. Choose from the list or add new tags. Press Enter key to add.', to='dojo.Tagulous_Product_tags'),
),
migrations.AddField(
model_name='test',
name='tags',
field=tagulous.models.fields.TagField(_set_tag_meta=True, blank=True, force_lowercase=True, help_text='Add tags that help describe this test. Choose from the list or add new tags. Press Enter key to add.', to='dojo.Tagulous_Test_tags'),
),
migrations.AlterField(
model_name='child_rule',
name='match_field',
field=models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('cwe', 'cwe'), ('cve', 'cve'), ('cvssv3', 'cvssv3'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('is_template', 'is_template'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_Mitigated', 'is_Mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('line_number', 'line_number'), ('sourcefilepath', 'sourcefilepath'), ('sourcefile', 'sourcefile'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('component_name', 'component_name'), ('component_version', 'component_version'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue'), ('unique_id_from_tool', 'unique_id_from_tool'), ('vuln_id_from_tool', 'vuln_id_from_tool'), ('sast_source_object', 'sast_source_object'), ('sast_sink_object', 'sast_sink_object'), ('sast_source_line', 'sast_source_line'), ('sast_source_file_path', 'sast_source_file_path'), ('nb_occurences', 'nb_occurences')], max_length=200),
),
migrations.AlterField(
model_name='rule',
name='applied_field',
field=models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('cwe', 'cwe'), ('cve', 'cve'), ('cvssv3', 'cvssv3'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('is_template', 'is_template'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_Mitigated', 'is_Mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('line_number', 'line_number'), ('sourcefilepath', 'sourcefilepath'), ('sourcefile', 'sourcefile'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('component_name', 'component_name'), ('component_version', 'component_version'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue'), ('unique_id_from_tool', 'unique_id_from_tool'), ('vuln_id_from_tool', 'vuln_id_from_tool'), ('sast_source_object', 'sast_source_object'), ('sast_sink_object', 'sast_sink_object'), ('sast_source_line', 'sast_source_line'), ('sast_source_file_path', 'sast_source_file_path'), ('nb_occurences', 'nb_occurences')], max_length=200),
),
migrations.AlterField(
model_name='rule',
name='match_field',
field=models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('cwe', 'cwe'), ('cve', 'cve'), ('cvssv3', 'cvssv3'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('is_template', 'is_template'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_Mitigated', 'is_Mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('line_number', 'line_number'), ('sourcefilepath', 'sourcefilepath'), ('sourcefile', 'sourcefile'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('component_name', 'component_name'), ('component_version', 'component_version'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue'), ('unique_id_from_tool', 'unique_id_from_tool'), ('vuln_id_from_tool', 'vuln_id_from_tool'), ('sast_source_object', 'sast_source_object'), ('sast_sink_object', 'sast_sink_object'), ('sast_source_line', 'sast_source_line'), ('sast_source_file_path', 'sast_source_file_path'), ('nb_occurences', 'nb_occurences')], max_length=200),
),
migrations.AlterField(
model_name='system_settings',
name='delete_dupulicates',
field=models.BooleanField(default=False, help_text='Requires next setting: maximum number of duplicates to retain.'),
),
migrations.AlterField(
model_name='system_settings',
name='max_dupes',
field=models.IntegerField(blank=True, default=10, help_text='When enabled, if a single issue reaches the maximum number of duplicates, the oldest will be deleted. Duplicate will not be deleted when left empty. A value of 0 will remove all duplicates.', null=True, verbose_name='Max Duplicates'),
),
migrations.CreateModel(
name='FileUpload',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100, unique=True)),
('file', models.FileField(upload_to=dojo.models.UniqueUploadNameProvider('uploaded_files'))),
],
),
migrations.AddField(
model_name='engagement',
name='files',
field=models.ManyToManyField(blank=True, editable=False, to='dojo.FileUpload'),
),
migrations.AddField(
model_name='finding',
name='files',
field=models.ManyToManyField(blank=True, editable=False, help_text='Files(s) related to the flaw.', to='dojo.FileUpload', verbose_name='Files'),
),
migrations.AddField(
model_name='test',
name='files',
field=models.ManyToManyField(blank=True, editable=False, to='dojo.FileUpload'),
),
migrations.AddField(
model_name='finding',
name='risk_accepted',
field=models.BooleanField(default=False, help_text='Denotes if this finding has been marked as an accepted risk.', verbose_name='Risk Accepted'),
),
migrations.RemoveField(
model_name='engagement',
name='risk_path',
),
migrations.RemoveField(
model_name='risk_acceptance',
name='compensating_control',
),
migrations.AddField(
model_name='finding',
name='sla_start_date',
field=models.DateField(blank=True, help_text="(readonly)The date used as start date for SLA calculation. Set by expiring risk acceptances. Empty by default, causing a fallback to 'date'.", null=True, verbose_name='SLA Start Date'),
),
migrations.AddField(
model_name='jira_project',
name='risk_acceptance_expiration_notification',
field=models.BooleanField(blank=True, default=False, verbose_name='Send Risk Acceptance expiration notifications as comment?'),
),
migrations.AddField(
model_name='notifications',
name='risk_acceptance_expiration',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default='alert', help_text='Get notified of (upcoming) Risk Acceptance expiries', max_length=24, verbose_name='Risk Acceptance Expiration'),
),
migrations.AddField(
model_name='product',
name='enable_full_risk_acceptance',
field=models.BooleanField(default=True, help_text='Allows full risk acceptance using a risk acceptance form, expiration date, uploaded proof, etc.'),
),
migrations.AddField(
model_name='product',
name='enable_simple_risk_acceptance',
field=models.BooleanField(default=False, help_text='Allows simple risk acceptance by checking/unchecking a checkbox.'),
),
migrations.AddField(
model_name='risk_acceptance',
name='decision',
field=models.CharField(choices=[('A', 'Accept (The risk is acknowledged, yet remains)'), ('V', 'Avoid (Do not engage with whatever creates the risk)'), ('M', 'Mitigate (The risk still exists, yet compensating controls make it less of a threat)'), ('F', 'Fix (The risk is eradicated)'), ('T', 'Transfer (The risk is transferred to a 3rd party)')], default='A', help_text='Risk treatment decision by risk owner', max_length=2),
),
migrations.AddField(
model_name='risk_acceptance',
name='decision_details',
field=models.TextField(blank=True, default=None, help_text='If a compensating control exists to mitigate the finding or reduce risk, then list the compensating control(s).', null=True),
),
migrations.AddField(
model_name='risk_acceptance',
name='expiration_date_handled',
field=models.DateTimeField(blank=True, default=None, help_text='(readonly) When the risk acceptance expiration was handled (manually or by the daily job).', null=True),
),
migrations.AddField(
model_name='risk_acceptance',
name='expiration_date_warned',
field=models.DateTimeField(blank=True, default=None, help_text='(readonly) Date at which notice about the risk acceptance expiration was sent.', null=True),
),
migrations.AddField(
model_name='risk_acceptance',
name='reactivate_expired',
field=models.BooleanField(default=True, help_text='Reactivate findings when risk acceptance expires?', verbose_name='Reactivate findings on expiration'),
),
migrations.AddField(
model_name='risk_acceptance',
name='recommendation',
field=models.CharField(choices=[('A', 'Accept (The risk is acknowledged, yet remains)'), ('V', 'Avoid (Do not engage with whatever creates the risk)'), ('M', 'Mitigate (The risk still exists, yet compensating controls make it less of a threat)'), ('F', 'Fix (The risk is eradicated)'), ('T', 'Transfer (The risk is transferred to a 3rd party)')], default='F', help_text='Recommendation from the security team.', max_length=2, verbose_name='Security Recommendation'),
),
migrations.AddField(
model_name='risk_acceptance',
name='recommendation_details',
field=models.TextField(blank=True, help_text='Explanation of security recommendation', null=True, verbose_name='Security Recommendation Details'),
),
migrations.AddField(
model_name='risk_acceptance',
name='restart_sla_expired',
field=models.BooleanField(default=False, help_text='When enabled, the SLA for findings is restarted when the risk acceptance expires.', verbose_name='Restart SLA on expiration'),
),
migrations.AddField(
model_name='system_settings',
name='risk_acceptance_form_default_days',
field=models.IntegerField(blank=True, default=180, help_text='Default expiry period for risk acceptance form.', null=True),
),
migrations.AddField(
model_name='system_settings',
name='risk_acceptance_notify_before_expiration',
field=models.IntegerField(blank=True, default=10, help_text='Notify X days before risk acceptance expires. Leave empty to disable.', null=True, verbose_name='Risk acceptance expiration heads up days'),
),
migrations.AlterField(
model_name='child_rule',
name='match_field',
field=models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('sla_start_date', 'sla_start_date'), ('cwe', 'cwe'), ('cve', 'cve'), ('cvssv3', 'cvssv3'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('is_template', 'is_template'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('risk_accepted', 'risk_accepted'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_Mitigated', 'is_Mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('line_number', 'line_number'), ('sourcefilepath', 'sourcefilepath'), ('sourcefile', 'sourcefile'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('component_name', 'component_name'), ('component_version', 'component_version'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue'), ('unique_id_from_tool', 'unique_id_from_tool'), ('vuln_id_from_tool', 'vuln_id_from_tool'), ('sast_source_object', 'sast_source_object'), ('sast_sink_object', 'sast_sink_object'), ('sast_source_line', 'sast_source_line'), ('sast_source_file_path', 'sast_source_file_path'), ('nb_occurences', 'nb_occurences')], max_length=200),
),
migrations.AlterField(
model_name='notifications',
name='sla_breach',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default='alert', help_text='Get notified of (upcoming) SLA breaches', max_length=24, verbose_name='SLA breach'),
),
migrations.AlterField(
model_name='risk_acceptance',
name='accepted_by',
field=models.CharField(blank=True, default=None, help_text='The person that accepts the risk, can be outside of DefectDojo.', max_length=200, null=True, verbose_name='Accepted By'),
),
migrations.AlterField(
model_name='risk_acceptance',
name='expiration_date',
field=models.DateTimeField(blank=True, default=None, help_text='When the risk acceptance expires, the findings will be reactivated (unless disabled below).', null=True),
),
migrations.AlterField(
model_name='risk_acceptance',
name='owner',
field=models.ForeignKey(help_text='User in DefectDojo owning this acceptance. Only the owner and staff users can edit the risk acceptance.', on_delete=django.db.models.deletion.CASCADE, to='dojo.dojo_user'),
),
migrations.AlterField(
model_name='risk_acceptance',
name='path',
field=models.FileField(blank=True, null=True, upload_to='risk/%Y/%m/%d', verbose_name='Proof'),
),
migrations.AlterField(
model_name='rule',
name='applied_field',
field=models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('sla_start_date', 'sla_start_date'), ('cwe', 'cwe'), ('cve', 'cve'), ('cvssv3', 'cvssv3'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('is_template', 'is_template'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('risk_accepted', 'risk_accepted'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_Mitigated', 'is_Mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('line_number', 'line_number'), ('sourcefilepath', 'sourcefilepath'), ('sourcefile', 'sourcefile'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('component_name', 'component_name'), ('component_version', 'component_version'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue'), ('unique_id_from_tool', 'unique_id_from_tool'), ('vuln_id_from_tool', 'vuln_id_from_tool'), ('sast_source_object', 'sast_source_object'), ('sast_sink_object', 'sast_sink_object'), ('sast_source_line', 'sast_source_line'), ('sast_source_file_path', 'sast_source_file_path'), ('nb_occurences', 'nb_occurences')], max_length=200),
),
migrations.AlterField(
model_name='rule',
name='match_field',
field=models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('sla_start_date', 'sla_start_date'), ('cwe', 'cwe'), ('cve', 'cve'), ('cvssv3', 'cvssv3'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('is_template', 'is_template'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('risk_accepted', 'risk_accepted'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_Mitigated', 'is_Mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('line_number', 'line_number'), ('sourcefilepath', 'sourcefilepath'), ('sourcefile', 'sourcefile'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('component_name', 'component_name'), ('component_version', 'component_version'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue'), ('unique_id_from_tool', 'unique_id_from_tool'), ('vuln_id_from_tool', 'vuln_id_from_tool'), ('sast_source_object', 'sast_source_object'), ('sast_sink_object', 'sast_sink_object'), ('sast_source_line', 'sast_source_line'), ('sast_source_file_path', 'sast_source_file_path'), ('nb_occurences', 'nb_occurences')], max_length=200),
),
migrations.AlterField(
model_name='alerts',
name='title',
field=models.CharField(default='', max_length=250),
),
migrations.AddField(
model_name='product_type',
name='description',
field=models.CharField(max_length=4000, null=True),
),
migrations.AddField(
model_name='notifications',
name='product_type_added',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default='alert', max_length=24),
),
migrations.AddIndex(
model_name='finding',
index=models.Index(fields=['test', 'active', 'verified'], name='dojo_findin_test_id_bfc47c_idx'),
),
migrations.AddIndex(
model_name='endpoint',
index=models.Index(fields=['product', 'mitigated'], name='dojo_endpoi_product_b80e9a_idx'),
),
migrations.AddIndex(
model_name='endpoint_status',
index=models.Index(fields=['finding', 'mitigated'], name='dojo_endpoi_finding_0fa5ce_idx'),
),
migrations.AddIndex(
model_name='endpoint_status',
index=models.Index(fields=['endpoint', 'mitigated'], name='dojo_endpoi_endpoin_d85400_idx'),
),
migrations.AddIndex(
model_name='engagement',
index=models.Index(fields=['product', 'active'], name='dojo_engage_product_16b039_idx'),
),
migrations.AddIndex(
model_name='finding',
index=models.Index(fields=['test', 'is_Mitigated'], name='dojo_findin_test_id_afee7e_idx'),
),
migrations.AddIndex(
model_name='finding',
index=models.Index(fields=['test', 'duplicate'], name='dojo_findin_test_id_52d78d_idx'),
),
migrations.AddIndex(
model_name='finding',
index=models.Index(fields=['test', 'out_of_scope'], name='dojo_findin_test_id_742a6f_idx'),
),
migrations.AddIndex(
model_name='finding',
index=models.Index(fields=['test', 'false_p'], name='dojo_findin_test_id_188e8a_idx'),
),
migrations.AddIndex(
model_name='finding',
index=models.Index(fields=['test', 'unique_id_from_tool', 'duplicate'], name='dojo_findin_test_id_269acb_idx'),
),
migrations.AddIndex(
model_name='finding',
index=models.Index(fields=['test', 'hash_code', 'duplicate'], name='dojo_findin_test_id_e54563_idx'),
),
migrations.AddIndex(
model_name='finding',
index=models.Index(fields=['test', 'component_name'], name='dojo_findin_test_id_396336_idx'),
),
migrations.AddIndex(
model_name='notifications',
index=models.Index(fields=['user', 'product'], name='dojo_notifi_user_id_575989_idx'),
),
migrations.AddIndex(
model_name='test',
index=models.Index(fields=['engagement', 'test_type'], name='dojo_test_engagem_bb6b41_idx'),
),
migrations.AlterField(
model_name='system_settings',
name='column_widths',
field=models.TextField(blank=True, max_length=1500),
),
migrations.AlterField(
model_name='system_settings',
name='credentials',
field=models.TextField(blank=True, max_length=3000),
),
migrations.AddField(
model_name='notifications',
name='close_engagement',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('slack', 'slack'), ('msteams', 'msteams'), ('mail', 'mail'), ('alert', 'alert')], default='alert', max_length=24),
),
migrations.CreateModel(
name='Test_Import',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('import_settings', models.JSONField(null=True)),
('version', models.CharField(blank=True, max_length=100, null=True)),
('type', models.CharField(default='unknown', max_length=64)),
],
options={
'ordering': ('-id',),
},
),
migrations.CreateModel(
name='Test_Import_Finding_Action',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('action', models.CharField(blank=True, choices=[('N', 'created'), ('C', 'closed'), ('R', 'reactivated'), ('U', 'updated')], max_length=100, null=True)),
('finding', models.ForeignKey(editable=False, on_delete=django.db.models.deletion.CASCADE, to='dojo.finding')),
('test_import', models.ForeignKey(editable=False, on_delete=django.db.models.deletion.CASCADE, to='dojo.test_import')),
],
options={
'ordering': ('test_import', 'action', 'finding'),
'unique_together': {('test_import', 'finding')},
},
),
migrations.AddField(
model_name='test_import',
name='findings_affected',
field=models.ManyToManyField(through='dojo.Test_Import_Finding_Action', to='dojo.Finding'),
),
migrations.AddField(
model_name='test_import',
name='test',
field=models.ForeignKey(editable=False, on_delete=django.db.models.deletion.CASCADE, to='dojo.test'),
),
migrations.AddIndex(
model_name='test_import',
index=models.Index(fields=['created', 'test', 'type'], name='dojo_test_i_created_951f4e_idx'),
),
migrations.CreateModel(
name='Product_Type_Member',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('role', models.IntegerField(default=0)),
('product_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.product_type')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Product_Member',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('role', models.IntegerField(default=0)),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.product')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='product',
name='members',
field=models.ManyToManyField(blank=True, related_name='product_members', through='dojo.Product_Member', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='product_type',
name='members',
field=models.ManyToManyField(blank=True, related_name='prod_type_members', through='dojo.Product_Type_Member', to=settings.AUTH_USER_MODEL),
),
migrations.RenameField(
model_name='system_settings',
old_name='delete_dupulicates',
new_name='delete_duplicates',
),
migrations.AlterField(
model_name='finding',
name='cvssv3',
field=models.TextField(help_text='Common Vulnerability Scoring System version 3 (CVSSv3) score associated with this flaw.', max_length=117, null=True, validators=[django.core.validators.RegexValidator(message="CVSS must be entered in format: 'AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H'", regex='^AV:[NALP]|AC:[LH]|PR:[UNLH]|UI:[NR]|S:[UC]|[CIA]:[NLH]')], verbose_name='CVSS v3'),
),
migrations.AddField(
model_name='system_settings',
name='disclaimer',
field=models.TextField(blank=True, default='', help_text='Include this custom disclaimer on all notifications and generated reports', max_length=3000, verbose_name='Custom Disclaimer'),
),
migrations.AddField(
model_name='jira_instance',
name='issue_template',
field=models.CharField(blank=True, help_text='Choose the folder containing the Django templates used to render the JIRA issue description. These are stored in dojo/templates/issue-trackers. Leave empty to use the default jira_full templates.', max_length=255, null=True),
),
migrations.AddField(
model_name='jira_project',
name='issue_template',
field=models.CharField(blank=True, help_text='Choose the folder containing the Django templates used to render the JIRA issue description. These are stored in dojo/templates/issue-trackers. Leave empty to use the default jira_full templates.', max_length=255, null=True),
),
migrations.AddField(
model_name='finding',
name='last_status_update',
field=models.DateTimeField(auto_now_add=True, help_text='Timestamp of latest status update (change in status related fields).', null=True, verbose_name='Last Status Update'),
),
migrations.RemoveField(
model_name='scan',
name='scan_settings',
),
migrations.RemoveField(
model_name='scansettings',
name='product',
),
migrations.RemoveField(
model_name='scansettings',
name='user',
),
migrations.RemoveField(
model_name='va',
name='result',
),
migrations.RemoveField(
model_name='va',
name='user',
),
migrations.DeleteModel(
name='IPScan',
),
migrations.DeleteModel(
name='Scan',
),
migrations.DeleteModel(
name='ScanSettings',
),
migrations.DeleteModel(
name='VA',
),
migrations.AddField(
model_name='tool_configuration',
name='extras',
field=models.CharField(blank=True, help_text='Additional definitions that will be consumed by scanner', max_length=255, null=True),
),
migrations.AddField(
model_name='finding',
name='cvssv3_score',
field=models.FloatField(blank=True, help_text='Numerical CVSSv3 score for the vulnerability. If the vector is given, the score is updated while saving the finding', null=True, verbose_name='CVSSv3 score'),
),
migrations.AddField(
model_name='finding',
name='publish_date',
field=models.DateTimeField(blank=True, help_text='Date when this vulnerability was made publicly available.', null=True, verbose_name='Publish date'),
),
migrations.AlterField(
model_name='child_rule',
name='match_field',
field=models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('sla_start_date', 'sla_start_date'), ('cwe', 'cwe'), ('cve', 'cve'), ('cvssv3', 'cvssv3'), ('cvssv3_score', 'cvssv3_score'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('is_template', 'is_template'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('risk_accepted', 'risk_accepted'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_Mitigated', 'is_Mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('line_number', 'line_number'), ('sourcefilepath', 'sourcefilepath'), ('sourcefile', 'sourcefile'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('component_name', 'component_name'), ('component_version', 'component_version'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue'), ('unique_id_from_tool', 'unique_id_from_tool'), ('vuln_id_from_tool', 'vuln_id_from_tool'), ('sast_source_object', 'sast_source_object'), ('sast_sink_object', 'sast_sink_object'), ('sast_source_line', 'sast_source_line'), ('sast_source_file_path', 'sast_source_file_path'), ('nb_occurences', 'nb_occurences'), ('publish_date', 'publish_date')], max_length=200),
),
migrations.AlterField(
model_name='finding',
name='impact',
field=models.TextField(blank=True, help_text='Text describing the impact this flaw has on systems, products, enterprise, etc.', null=True, verbose_name='Impact'),
),
migrations.AlterField(
model_name='finding',
name='mitigation',
field=models.TextField(blank=True, help_text='Text describing how to best fix the flaw.', null=True, verbose_name='Mitigation'),
),
migrations.AlterField(
model_name='rule',
name='applied_field',
field=models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('sla_start_date', 'sla_start_date'), ('cwe', 'cwe'), ('cve', 'cve'), ('cvssv3', 'cvssv3'), ('cvssv3_score', 'cvssv3_score'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('is_template', 'is_template'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('risk_accepted', 'risk_accepted'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_Mitigated', 'is_Mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('line_number', 'line_number'), ('sourcefilepath', 'sourcefilepath'), ('sourcefile', 'sourcefile'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('component_name', 'component_name'), ('component_version', 'component_version'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue'), ('unique_id_from_tool', 'unique_id_from_tool'), ('vuln_id_from_tool', 'vuln_id_from_tool'), ('sast_source_object', 'sast_source_object'), ('sast_sink_object', 'sast_sink_object'), ('sast_source_line', 'sast_source_line'), ('sast_source_file_path', 'sast_source_file_path'), ('nb_occurences', 'nb_occurences'), ('publish_date', 'publish_date')], max_length=200),
),
migrations.AlterField(
model_name='rule',
name='match_field',
field=models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('sla_start_date', 'sla_start_date'), ('cwe', 'cwe'), ('cve', 'cve'), ('cvssv3', 'cvssv3'), ('cvssv3_score', 'cvssv3_score'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('is_template', 'is_template'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('risk_accepted', 'risk_accepted'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_Mitigated', 'is_Mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('line_number', 'line_number'), ('sourcefilepath', 'sourcefilepath'), ('sourcefile', 'sourcefile'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('component_name', 'component_name'), ('component_version', 'component_version'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue'), ('unique_id_from_tool', 'unique_id_from_tool'), ('vuln_id_from_tool', 'vuln_id_from_tool'), ('sast_source_object', 'sast_source_object'), ('sast_sink_object', 'sast_sink_object'), ('sast_source_line', 'sast_source_line'), ('sast_source_file_path', 'sast_source_file_path'), ('nb_occurences', 'nb_occurences'), ('publish_date', 'publish_date')], max_length=200),
),
migrations.RenameField(
model_name='jira_instance',
old_name='issue_template',
new_name='issue_template_dir',
),
migrations.RenameField(
model_name='jira_project',
old_name='issue_template',
new_name='issue_template_dir',
),
migrations.CreateModel(
name='Finding_Group',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('name', models.CharField(max_length=255)),
('creator', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.dojo_user')),
('findings', models.ManyToManyField(to='dojo.Finding')),
('test', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dojo.test')),
],
options={
'ordering': ['id'],
},
),
migrations.AddField(
model_name='jira_issue',
name='finding_group',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.finding_group'),
),
migrations.AlterField(
model_name='jira_issue',
name='jira_project',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to='dojo.jira_project'),
),
migrations.AlterField(
model_name='jira_project',
name='jira_instance',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, to='dojo.jira_instance', verbose_name='JIRA Instance'),
),
migrations.AddField(
model_name='test',
name='branch_tag',
field=models.CharField(blank=True, help_text='Tag or branch that was tested, a reimport may update this field.', max_length=150, null=True, verbose_name='Branch/Tag'),
),
migrations.AddField(
model_name='test',
name='build_id',
field=models.CharField(blank=True, help_text='Build ID that was tested, a reimport may update this field.', max_length=150, null=True, verbose_name='Build ID'),
),
migrations.AddField(
model_name='test',
name='commit_hash',
field=models.CharField(blank=True, help_text='Commit hash tested, a reimport may update this field.', max_length=150, null=True, verbose_name='Commit Hash'),
),
migrations.AddField(
model_name='test_import',
name='branch_tag',
field=models.CharField(blank=True, help_text='Tag or branch that was tested, a reimport may update this field.', max_length=150, null=True, verbose_name='Branch/Tag'),
),
migrations.AddField(
model_name='test_import',
name='build_id',
field=models.CharField(blank=True, help_text='Build ID that was tested, a reimport may update this field.', max_length=150, null=True, verbose_name='Build ID'),
),
migrations.AddField(
model_name='test_import',
name='commit_hash',
field=models.CharField(blank=True, help_text='Commit hash tested, a reimport may update this field.', max_length=150, null=True, verbose_name='Commit Hash'),
),
migrations.AlterField(
model_name='jira_issue',
name='jira_project',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='dojo.jira_project'),
),
migrations.RunPython(log_me_0090),
migrations.AddIndex(
model_name='finding',
index=models.Index(fields=['duplicate_finding', 'id'], name='dojo_findin_duplica_94e6c5_idx'),
),
]
| 89.777435 | 22,873 | 0.642461 | 31,741 | 291,238 | 5.696764 | 0.060742 | 0.018814 | 0.022121 | 0.025661 | 0.861216 | 0.832232 | 0.792248 | 0.755006 | 0.729395 | 0.703829 | 0 | 0.00933 | 0.182253 | 291,238 | 3,243 | 22,874 | 89.805119 | 0.749915 | 0.003042 | 0 | 0.664277 | 1 | 0.014339 | 0.413355 | 0.048406 | 0 | 0 | 0 | 0 | 0 | 1 | 0.001247 | false | 0.00187 | 0.011222 | 0 | 0.014339 | 0.000312 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
5b55d3cfe435087984f7e5fa72de5c7f618e71b5 | 27,414 | py | Python | tiledb/cloud/rest_api/api/tasks_api.py | TileDB-Inc/TileDB-Cloud-Py | e73f6e0ae3fc595218abd3be606c68f62ad5ac9b | [
"MIT"
] | 4 | 2019-12-04T23:19:35.000Z | 2021-06-21T21:42:53.000Z | tiledb/cloud/rest_api/api/tasks_api.py | TileDB-Inc/TileDB-Cloud-Py | e73f6e0ae3fc595218abd3be606c68f62ad5ac9b | [
"MIT"
] | 106 | 2019-11-07T22:40:43.000Z | 2022-03-29T22:31:18.000Z | tiledb/cloud/rest_api/api/tasks_api.py | TileDB-Inc/TileDB-Cloud-Py | e73f6e0ae3fc595218abd3be606c68f62ad5ac9b | [
"MIT"
] | 1 | 2020-10-04T18:54:37.000Z | 2020-10-04T18:54:37.000Z | # coding: utf-8
"""
TileDB Storage Platform API
TileDB Storage Platform REST API # noqa: E501
The version of the OpenAPI document: 2.2.19
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from tiledb.cloud.rest_api.api_client import ApiClient
from tiledb.cloud.rest_api.exceptions import ApiTypeError # noqa: F401
from tiledb.cloud.rest_api.exceptions import ApiValueError
class TasksApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def run_sql(self, namespace, sql, **kwargs): # noqa: E501
"""run_sql # noqa: E501
Run a sql query # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.run_sql(namespace, sql, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str namespace: namespace to run task under is in (an organization name or user's username) (required)
:param SQLParameters sql: sql being submitted (required)
:param str accept_encoding: Encoding to use
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[object]
If the method is called asynchronously,
returns the request thread.
"""
kwargs["_return_http_data_only"] = True
return self.run_sql_with_http_info(namespace, sql, **kwargs) # noqa: E501
def run_sql_with_http_info(self, namespace, sql, **kwargs): # noqa: E501
"""run_sql # noqa: E501
Run a sql query # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.run_sql_with_http_info(namespace, sql, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str namespace: namespace to run task under is in (an organization name or user's username) (required)
:param SQLParameters sql: sql being submitted (required)
:param str accept_encoding: Encoding to use
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[object], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ["namespace", "sql", "accept_encoding"]
all_params.extend(
[
"async_req",
"_return_http_data_only",
"_preload_content",
"_request_timeout",
]
)
for key, val in six.iteritems(local_var_params["kwargs"]):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'" " to method run_sql" % key
)
local_var_params[key] = val
del local_var_params["kwargs"]
# verify the required parameter 'namespace' is set
if self.api_client.client_side_validation and (
"namespace" not in local_var_params
or local_var_params["namespace"] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `namespace` when calling `run_sql`"
) # noqa: E501
# verify the required parameter 'sql' is set
if self.api_client.client_side_validation and (
"sql" not in local_var_params
or local_var_params["sql"] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `sql` when calling `run_sql`"
) # noqa: E501
collection_formats = {}
path_params = {}
if "namespace" in local_var_params:
path_params["namespace"] = local_var_params["namespace"] # noqa: E501
query_params = []
header_params = {}
if "accept_encoding" in local_var_params:
header_params["Accept-Encoding"] = local_var_params[
"accept_encoding"
] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if "sql" in local_var_params:
body_params = local_var_params["sql"]
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(
["application/json"]
) # noqa: E501
# HTTP header `Content-Type`
header_params[
"Content-Type"
] = self.api_client.select_header_content_type( # noqa: E501
["application/json"]
) # noqa: E501
# Authentication setting
auth_settings = ["ApiKeyAuth", "BasicAuth"] # noqa: E501
return self.api_client.call_api(
"/sql/{namespace}",
"POST",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type="list[object]", # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get("async_req"),
_return_http_data_only=local_var_params.get(
"_return_http_data_only"
), # noqa: E501
_preload_content=local_var_params.get("_preload_content", True),
_request_timeout=local_var_params.get("_request_timeout"),
collection_formats=collection_formats,
)
def task_id_get(self, id, **kwargs): # noqa: E501
"""task_id_get # noqa: E501
Fetch an array task # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.task_id_get(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: task id to fetch (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ArrayTask
If the method is called asynchronously,
returns the request thread.
"""
kwargs["_return_http_data_only"] = True
return self.task_id_get_with_http_info(id, **kwargs) # noqa: E501
def task_id_get_with_http_info(self, id, **kwargs): # noqa: E501
"""task_id_get # noqa: E501
Fetch an array task # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.task_id_get_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: task id to fetch (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ArrayTask, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ["id"]
all_params.extend(
[
"async_req",
"_return_http_data_only",
"_preload_content",
"_request_timeout",
]
)
for key, val in six.iteritems(local_var_params["kwargs"]):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method task_id_get" % key
)
local_var_params[key] = val
del local_var_params["kwargs"]
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and (
"id" not in local_var_params or local_var_params["id"] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `id` when calling `task_id_get`"
) # noqa: E501
collection_formats = {}
path_params = {}
if "id" in local_var_params:
path_params["id"] = local_var_params["id"] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(
["application/json"]
) # noqa: E501
# Authentication setting
auth_settings = ["ApiKeyAuth", "BasicAuth"] # noqa: E501
return self.api_client.call_api(
"/task/{id}",
"GET",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type="ArrayTask", # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get("async_req"),
_return_http_data_only=local_var_params.get(
"_return_http_data_only"
), # noqa: E501
_preload_content=local_var_params.get("_preload_content", True),
_request_timeout=local_var_params.get("_request_timeout"),
collection_formats=collection_formats,
)
def task_id_result_get(self, id, **kwargs): # noqa: E501
"""task_id_result_get # noqa: E501
Retrieve results of an array task # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.task_id_result_get(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: task id to retrieve stored results (required)
:param str accept_encoding: Encoding to use
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs["_return_http_data_only"] = True
return self.task_id_result_get_with_http_info(id, **kwargs) # noqa: E501
def task_id_result_get_with_http_info(self, id, **kwargs): # noqa: E501
"""task_id_result_get # noqa: E501
Retrieve results of an array task # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.task_id_result_get_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: task id to retrieve stored results (required)
:param str accept_encoding: Encoding to use
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(str, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ["id", "accept_encoding"]
all_params.extend(
[
"async_req",
"_return_http_data_only",
"_preload_content",
"_request_timeout",
]
)
for key, val in six.iteritems(local_var_params["kwargs"]):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method task_id_result_get" % key
)
local_var_params[key] = val
del local_var_params["kwargs"]
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and (
"id" not in local_var_params or local_var_params["id"] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `id` when calling `task_id_result_get`"
) # noqa: E501
collection_formats = {}
path_params = {}
if "id" in local_var_params:
path_params["id"] = local_var_params["id"] # noqa: E501
query_params = []
header_params = {}
if "accept_encoding" in local_var_params:
header_params["Accept-Encoding"] = local_var_params[
"accept_encoding"
] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(
["application/json"]
) # noqa: E501
# Authentication setting
auth_settings = ["ApiKeyAuth", "BasicAuth"] # noqa: E501
return self.api_client.call_api(
"/task/{id}/result",
"GET",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type="str", # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get("async_req"),
_return_http_data_only=local_var_params.get(
"_return_http_data_only"
), # noqa: E501
_preload_content=local_var_params.get("_preload_content", True),
_request_timeout=local_var_params.get("_request_timeout"),
collection_formats=collection_formats,
)
def tasks_get(self, **kwargs): # noqa: E501
"""tasks_get # noqa: E501
Fetch a list of all array tasks a user has access to # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.tasks_get(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str namespace: namespace to filter
:param str created_by: username to filter
:param str array: name/uri of array that is url-encoded to filter
:param int start: start time for tasks to filter by
:param int end: end time for tasks to filter by
:param int page: pagination offset
:param int per_page: pagination limit
:param str type: task type, \"QUERY\", \"SQL\", \"UDF\", \"GENERIC_UDF\"
:param list[str] exclude_type: task_type to exclude matching array in results, more than one can be included
:param list[str] file_type: match file_type of task array, more than one can be included
:param list[str] exclude_file_type: exclude file_type of task arrays, more than one can be included
:param str status: Filter to only return these statuses
:param str search: search string that will look at name, namespace or description fields
:param str orderby: sort by which field valid values include start_time, name
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ArrayTaskData
If the method is called asynchronously,
returns the request thread.
"""
kwargs["_return_http_data_only"] = True
return self.tasks_get_with_http_info(**kwargs) # noqa: E501
def tasks_get_with_http_info(self, **kwargs): # noqa: E501
"""tasks_get # noqa: E501
Fetch a list of all array tasks a user has access to # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.tasks_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str namespace: namespace to filter
:param str created_by: username to filter
:param str array: name/uri of array that is url-encoded to filter
:param int start: start time for tasks to filter by
:param int end: end time for tasks to filter by
:param int page: pagination offset
:param int per_page: pagination limit
:param str type: task type, \"QUERY\", \"SQL\", \"UDF\", \"GENERIC_UDF\"
:param list[str] exclude_type: task_type to exclude matching array in results, more than one can be included
:param list[str] file_type: match file_type of task array, more than one can be included
:param list[str] exclude_file_type: exclude file_type of task arrays, more than one can be included
:param str status: Filter to only return these statuses
:param str search: search string that will look at name, namespace or description fields
:param str orderby: sort by which field valid values include start_time, name
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ArrayTaskData, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
"namespace",
"created_by",
"array",
"start",
"end",
"page",
"per_page",
"type",
"exclude_type",
"file_type",
"exclude_file_type",
"status",
"search",
"orderby",
]
all_params.extend(
[
"async_req",
"_return_http_data_only",
"_preload_content",
"_request_timeout",
]
)
for key, val in six.iteritems(local_var_params["kwargs"]):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method tasks_get" % key
)
local_var_params[key] = val
del local_var_params["kwargs"]
collection_formats = {}
path_params = {}
query_params = []
if (
"namespace" in local_var_params
and local_var_params["namespace"] is not None
): # noqa: E501
query_params.append(
("namespace", local_var_params["namespace"])
) # noqa: E501
if (
"created_by" in local_var_params
and local_var_params["created_by"] is not None
): # noqa: E501
query_params.append(
("created_by", local_var_params["created_by"])
) # noqa: E501
if (
"array" in local_var_params and local_var_params["array"] is not None
): # noqa: E501
query_params.append(("array", local_var_params["array"])) # noqa: E501
if (
"start" in local_var_params and local_var_params["start"] is not None
): # noqa: E501
query_params.append(("start", local_var_params["start"])) # noqa: E501
if (
"end" in local_var_params and local_var_params["end"] is not None
): # noqa: E501
query_params.append(("end", local_var_params["end"])) # noqa: E501
if (
"page" in local_var_params and local_var_params["page"] is not None
): # noqa: E501
query_params.append(("page", local_var_params["page"])) # noqa: E501
if (
"per_page" in local_var_params and local_var_params["per_page"] is not None
): # noqa: E501
query_params.append(
("per_page", local_var_params["per_page"])
) # noqa: E501
if (
"type" in local_var_params and local_var_params["type"] is not None
): # noqa: E501
query_params.append(("type", local_var_params["type"])) # noqa: E501
if (
"exclude_type" in local_var_params
and local_var_params["exclude_type"] is not None
): # noqa: E501
query_params.append(
("exclude_type", local_var_params["exclude_type"])
) # noqa: E501
collection_formats["exclude_type"] = "csv" # noqa: E501
if (
"file_type" in local_var_params
and local_var_params["file_type"] is not None
): # noqa: E501
query_params.append(
("file_type", local_var_params["file_type"])
) # noqa: E501
collection_formats["file_type"] = "multi" # noqa: E501
if (
"exclude_file_type" in local_var_params
and local_var_params["exclude_file_type"] is not None
): # noqa: E501
query_params.append(
("exclude_file_type", local_var_params["exclude_file_type"])
) # noqa: E501
collection_formats["exclude_file_type"] = "multi" # noqa: E501
if (
"status" in local_var_params and local_var_params["status"] is not None
): # noqa: E501
query_params.append(("status", local_var_params["status"])) # noqa: E501
if (
"search" in local_var_params and local_var_params["search"] is not None
): # noqa: E501
query_params.append(("search", local_var_params["search"])) # noqa: E501
if (
"orderby" in local_var_params and local_var_params["orderby"] is not None
): # noqa: E501
query_params.append(("orderby", local_var_params["orderby"])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(
["application/json"]
) # noqa: E501
# Authentication setting
auth_settings = ["ApiKeyAuth", "BasicAuth"] # noqa: E501
return self.api_client.call_api(
"/tasks",
"GET",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type="ArrayTaskData", # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get("async_req"),
_return_http_data_only=local_var_params.get(
"_return_http_data_only"
), # noqa: E501
_preload_content=local_var_params.get("_preload_content", True),
_request_timeout=local_var_params.get("_request_timeout"),
collection_formats=collection_formats,
)
| 41.853435 | 116 | 0.579303 | 3,134 | 27,414 | 4.832802 | 0.075622 | 0.053876 | 0.086888 | 0.025353 | 0.910141 | 0.890664 | 0.875215 | 0.857058 | 0.820877 | 0.8014 | 0 | 0.016996 | 0.343255 | 27,414 | 654 | 117 | 41.917431 | 0.824261 | 0.432845 | 0 | 0.598315 | 1 | 0 | 0.159988 | 0.019026 | 0 | 0 | 0 | 0 | 0 | 1 | 0.025281 | false | 0 | 0.016854 | 0 | 0.067416 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5b67f9f5797f8a58e1de93d1825dfd81d743635d | 8,080 | py | Python | test/model/test_enum.py | HansBug/hbutils | 6872311c8a441c5955572e0093b10189a2b90708 | [
"Apache-2.0"
] | null | null | null | test/model/test_enum.py | HansBug/hbutils | 6872311c8a441c5955572e0093b10189a2b90708 | [
"Apache-2.0"
] | 25 | 2021-10-03T06:19:05.000Z | 2022-03-27T12:48:57.000Z | test/model/test_enum.py | HansBug/hbutils | 6872311c8a441c5955572e0093b10189a2b90708 | [
"Apache-2.0"
] | null | null | null | from enum import IntEnum, Enum
from random import shuffle
import pytest
from hbutils.model import int_enum_loads, AutoIntEnum
# noinspection DuplicatedCode
@pytest.mark.unittest
class TestReflectionEnum:
def test_int_enum_loads_base(self):
@int_enum_loads()
class Enum1(IntEnum):
A = 1
B = 2
C = 3
assert Enum1.loads(Enum1.A) is Enum1.A
assert Enum1.loads(Enum1.B) is Enum1.B
assert Enum1.loads(Enum1.C) is Enum1.C
assert Enum1.loads(1) is Enum1.A
assert Enum1.loads(2) is Enum1.B
assert Enum1.loads(3) is Enum1.C
assert Enum1.loads('A') is Enum1.A
assert Enum1.loads('B') is Enum1.B
assert Enum1.loads('C') is Enum1.C
with pytest.raises(TypeError):
Enum1.loads(None)
def test_int_enum_loads_error(self):
with pytest.raises(TypeError):
@int_enum_loads()
class Enum1(Enum):
A = 1
B = 2
C = 3
def test_int_enum_disable_int(self):
@int_enum_loads(enable_int=False)
class Enum1(IntEnum):
A = 1
B = 2
C = 3
assert Enum1.loads(Enum1.A) is Enum1.A
assert Enum1.loads(Enum1.B) is Enum1.B
assert Enum1.loads(Enum1.C) is Enum1.C
with pytest.raises(TypeError):
assert Enum1.loads(1) is Enum1.A
with pytest.raises(TypeError):
assert Enum1.loads(2) is Enum1.B
with pytest.raises(TypeError):
assert Enum1.loads(3) is Enum1.C
assert Enum1.loads('A') is Enum1.A
assert Enum1.loads('B') is Enum1.B
assert Enum1.loads('C') is Enum1.C
with pytest.raises(TypeError):
Enum1.loads(None)
def test_int_enum_disable_str(self):
@int_enum_loads(enable_str=False)
class Enum1(IntEnum):
A = 1
B = 2
C = 3
assert Enum1.loads(Enum1.A) is Enum1.A
assert Enum1.loads(Enum1.B) is Enum1.B
assert Enum1.loads(Enum1.C) is Enum1.C
assert Enum1.loads(1) is Enum1.A
assert Enum1.loads(2) is Enum1.B
assert Enum1.loads(3) is Enum1.C
with pytest.raises(TypeError):
assert Enum1.loads('A') is Enum1.A
with pytest.raises(TypeError):
assert Enum1.loads('B') is Enum1.B
with pytest.raises(TypeError):
assert Enum1.loads('C') is Enum1.C
with pytest.raises(TypeError):
Enum1.loads(None)
def test_int_enum_extend_int(self):
@int_enum_loads(value_preprocess=lambda x: abs(x))
class Enum1(IntEnum):
A = 1
B = 2
C = 3
assert Enum1.loads(Enum1.A) is Enum1.A
assert Enum1.loads(Enum1.B) is Enum1.B
assert Enum1.loads(Enum1.C) is Enum1.C
assert Enum1.loads(1) is Enum1.A
assert Enum1.loads(2) is Enum1.B
assert Enum1.loads(3) is Enum1.C
assert Enum1.loads(-1) is Enum1.A
assert Enum1.loads(-2) is Enum1.B
assert Enum1.loads(-3) is Enum1.C
assert Enum1.loads('A') is Enum1.A
assert Enum1.loads('B') is Enum1.B
assert Enum1.loads('C') is Enum1.C
with pytest.raises(TypeError):
Enum1.loads(None)
def test_int_enum_extend_str(self):
@int_enum_loads(name_preprocess=lambda x: x.lower())
class Enum1(IntEnum):
A = 1
B = 2
C = 3
assert Enum1.loads(Enum1.A) is Enum1.A
assert Enum1.loads(Enum1.B) is Enum1.B
assert Enum1.loads(Enum1.C) is Enum1.C
assert Enum1.loads(1) is Enum1.A
assert Enum1.loads(2) is Enum1.B
assert Enum1.loads(3) is Enum1.C
assert Enum1.loads('A') is Enum1.A
assert Enum1.loads('B') is Enum1.B
assert Enum1.loads('C') is Enum1.C
assert Enum1.loads('a') is Enum1.A
assert Enum1.loads('b') is Enum1.B
assert Enum1.loads('c') is Enum1.C
with pytest.raises(TypeError):
Enum1.loads(None)
def test_int_enum_extend_else(self):
@int_enum_loads(external_process=lambda x: None)
class Enum1(IntEnum):
A = 1
B = 2
C = 3
assert Enum1.loads(Enum1.A) is Enum1.A
assert Enum1.loads(Enum1.B) is Enum1.B
assert Enum1.loads(Enum1.C) is Enum1.C
assert Enum1.loads(1) is Enum1.A
assert Enum1.loads(2) is Enum1.B
assert Enum1.loads(3) is Enum1.C
assert Enum1.loads('A') is Enum1.A
assert Enum1.loads('B') is Enum1.B
assert Enum1.loads('C') is Enum1.C
assert Enum1.loads(None) is None
assert Enum1.loads([1, 2]) is None
def test_auto_int_enum(self):
class MyEnum(AutoIntEnum):
def __init__(self, v):
self.v = v
A = 'a_v'
B = 'b_vv'
C = 'c_vvv'
assert MyEnum.A.value == 1
assert MyEnum.A.v == 'a_v'
assert MyEnum.B.value == 2
assert MyEnum.B.v == 'b_vv'
assert MyEnum.C.value == 3
assert MyEnum.C.v == 'c_vvv'
assert MyEnum.A == MyEnum.A
assert MyEnum.A != MyEnum.B
assert MyEnum.A != MyEnum.C
assert MyEnum.B != MyEnum.A
assert MyEnum.B == MyEnum.B
assert MyEnum.B != MyEnum.C
assert MyEnum.C != MyEnum.A
assert MyEnum.C != MyEnum.B
assert MyEnum.C == MyEnum.C
for i in range(100):
l = [MyEnum.A, MyEnum.B, MyEnum.C]
shuffle(l)
assert sorted(l) == [MyEnum.A, MyEnum.B, MyEnum.C]
d = {}
d[MyEnum.A] = 1
d[MyEnum.B] = 2
d[MyEnum.C] = 3
assert len(d.items()) == 3
assert d[MyEnum.A] == 1
assert d[MyEnum.B] == 2
assert d[MyEnum.C] == 3
d[MyEnum.C] = 4
assert d == {
MyEnum.A: 1,
MyEnum.B: 2,
MyEnum.C: 4,
}
def test_auto_int_enum_with_int_enum_loads(self):
@int_enum_loads(name_preprocess=str.upper)
class MyEnum(AutoIntEnum):
def __init__(self, v):
self.v = v
A = 'a_v'
B = 'b_vv'
C = 'c_vvv'
assert MyEnum.loads('a') == MyEnum.A
assert MyEnum.loads(1) == MyEnum.A
assert MyEnum.loads('B') == MyEnum.B
assert MyEnum.loads(2) == MyEnum.B
assert MyEnum.loads('c') == MyEnum.C
assert MyEnum.loads(3) == MyEnum.C
def test_auto_int_enum_with_multiple_args(self):
class MyEnum(AutoIntEnum):
def __init__(self, v, vx):
self.v = v
self.vx = vx
A = ('a_v', 0b1)
B = ('b_vv', 0b10)
C = ('c_vvv', 0b100)
assert MyEnum.A.value == 1
assert MyEnum.A.v == 'a_v'
assert MyEnum.A.vx == 1
assert MyEnum.B.value == 2
assert MyEnum.B.v == 'b_vv'
assert MyEnum.B.vx == 2
assert MyEnum.C.value == 3
assert MyEnum.C.v == 'c_vvv'
assert MyEnum.C.vx == 4
assert MyEnum.A == MyEnum.A
assert MyEnum.A != MyEnum.B
assert MyEnum.A != MyEnum.C
assert MyEnum.B != MyEnum.A
assert MyEnum.B == MyEnum.B
assert MyEnum.B != MyEnum.C
assert MyEnum.C != MyEnum.A
assert MyEnum.C != MyEnum.B
assert MyEnum.C == MyEnum.C
def test_auto_int_enum_with_int_enum_loads_and_multiple_args(self):
@int_enum_loads(name_preprocess=str.upper)
class MyEnum(AutoIntEnum):
def __init__(self, v, vx):
self.v = v
self.vx = vx
A = ('a_v', 0b1)
B = ('b_vv', 0b10)
C = ('c_vvv', 0b100)
assert MyEnum.loads('a') == MyEnum.A
assert MyEnum.loads(1) == MyEnum.A
assert MyEnum.loads('B') == MyEnum.B
assert MyEnum.loads(2) == MyEnum.B
assert MyEnum.loads('c') == MyEnum.C
assert MyEnum.loads(3) == MyEnum.C
| 31.439689 | 71 | 0.549629 | 1,152 | 8,080 | 3.755208 | 0.065972 | 0.154877 | 0.229311 | 0.087379 | 0.880952 | 0.833333 | 0.825012 | 0.813916 | 0.806981 | 0.791262 | 0 | 0.045463 | 0.33849 | 8,080 | 256 | 72 | 31.5625 | 0.763891 | 0.003342 | 0 | 0.765766 | 0 | 0 | 0.012297 | 0 | 0 | 0 | 0 | 0 | 0.509009 | 1 | 0.067568 | false | 0 | 0.018018 | 0 | 0.13964 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
5b7a4c426118d8598b0ef9bddceb47271f096a4b | 23,347 | py | Python | tests/functional/commands/test_status_command.py | mnoukhov/orion | 7849d77344e84ec805207cf4148aecf6f7d6b3d7 | [
"BSD-3-Clause"
] | 3 | 2019-12-13T03:41:19.000Z | 2021-06-15T20:14:33.000Z | tests/functional/commands/test_status_command.py | mnoukhov/orion | 7849d77344e84ec805207cf4148aecf6f7d6b3d7 | [
"BSD-3-Clause"
] | null | null | null | tests/functional/commands/test_status_command.py | mnoukhov/orion | 7849d77344e84ec805207cf4148aecf6f7d6b3d7 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Perform a functional test of the status command."""
import os
import orion.core.cli
def test_no_experiments(clean_db, monkeypatch, capsys):
"""Test status with no experiments."""
monkeypatch.chdir(os.path.dirname(os.path.abspath(__file__)))
orion.core.cli.main(['status'])
captured = capsys.readouterr().out
assert captured == ""
def test_experiment_without_trials_wout_ac(clean_db, one_experiment, capsys):
"""Test status with only one experiment and no trials."""
orion.core.cli.main(['status'])
captured = capsys.readouterr().out
expected = """\
test_single_exp
===============
empty
"""
assert captured == expected
def test_experiment_wout_success_wout_ac(clean_db, single_without_success, capsys):
"""Test status with only one experiment and no successful trial."""
orion.core.cli.main(['status'])
captured = capsys.readouterr().out
expected = """\
test_single_exp
===============
status quantity
----------- ----------
broken 1
interrupted 1
new 1
reserved 1
suspended 1
"""
assert captured == expected
def test_experiment_w_trials_wout_ac(clean_db, single_with_trials, capsys):
"""Test status with only one experiment and all trials."""
orion.core.cli.main(['status'])
captured = capsys.readouterr().out
expected = """\
test_single_exp
===============
status quantity min obj
----------- ---------- ---------
broken 1
completed 1 0
interrupted 1
new 1
reserved 1
suspended 1
"""
assert captured == expected
def test_two_unrelated_w_trials_wout_ac(clean_db, unrelated_with_trials, capsys):
"""Test two unrelated experiments, with all types of trials."""
orion.core.cli.main(['status'])
captured = capsys.readouterr().out
expected = """\
test_double_exp
===============
status quantity
----------- ----------
broken 1
completed 1
interrupted 1
new 1
reserved 1
suspended 1
test_single_exp
===============
status quantity min obj
----------- ---------- ---------
broken 1
completed 1 0
interrupted 1
new 1
reserved 1
suspended 1
"""
assert captured == expected
def test_two_related_w_trials_wout_ac(clean_db, family_with_trials, capsys):
"""Test two related experiments, with all types of trials."""
orion.core.cli.main(['status'])
captured = capsys.readouterr().out
expected = """\
test_double_exp
===============
status quantity
----------- ----------
broken 1
completed 1
interrupted 1
new 1
reserved 1
suspended 1
test_double_exp_child
=====================
status quantity
----------- ----------
broken 1
completed 1
interrupted 1
new 1
reserved 1
suspended 1
"""
assert captured == expected
def test_three_unrelated_wout_ac(clean_db, three_experiments_with_trials, capsys):
"""Test three unrelated experiments with all types of trials."""
orion.core.cli.main(['status'])
captured = capsys.readouterr().out
expected = """\
test_double_exp
===============
status quantity
----------- ----------
broken 1
completed 1
interrupted 1
new 1
reserved 1
suspended 1
test_double_exp_child
=====================
status quantity
----------- ----------
broken 1
completed 1
interrupted 1
new 1
reserved 1
suspended 1
test_single_exp
===============
status quantity min obj
----------- ---------- ---------
broken 1
completed 1 0
interrupted 1
new 1
reserved 1
suspended 1
"""
assert captured == expected
def test_three_related_wout_ac(clean_db, three_family_with_trials, capsys):
"""Test three related experiments with all types of trials."""
orion.core.cli.main(['status'])
captured = capsys.readouterr().out
expected = """\
test_double_exp
===============
status quantity
----------- ----------
broken 1
completed 1
interrupted 1
new 1
reserved 1
suspended 1
test_double_exp_child
=====================
status quantity
----------- ----------
broken 1
completed 1
interrupted 1
new 1
reserved 1
suspended 1
test_double_exp_child2
======================
status quantity
----------- ----------
broken 1
completed 1
interrupted 1
new 1
reserved 1
suspended 1
"""
assert captured == expected
def test_three_related_branch_wout_ac(clean_db, three_family_branch_with_trials, capsys):
"""Test three related experiments with all types of trials."""
orion.core.cli.main(['status'])
captured = capsys.readouterr().out
expected = """\
test_double_exp
===============
status quantity
----------- ----------
broken 1
completed 1
interrupted 1
new 1
reserved 1
suspended 1
test_double_exp_child
=====================
status quantity
----------- ----------
broken 1
completed 1
interrupted 1
new 1
reserved 1
suspended 1
test_double_exp_grand_child
===========================
status quantity
----------- ----------
broken 1
completed 1
interrupted 1
new 1
reserved 1
suspended 1
"""
assert captured == expected
def test_one_wout_trials_w_a_wout_c(clean_db, one_experiment, capsys):
"""Test experiments, without trials, with --all."""
orion.core.cli.main(['status', '--all'])
captured = capsys.readouterr().out
expected = """\
test_single_exp
===============
id status best objective
---- -------- ----------------
"""
assert captured == expected
def test_one_w_trials_w_a_wout_c(clean_db, single_with_trials, capsys):
"""Test experiment, with all trials, with --all."""
orion.core.cli.main(['status', '--all'])
captured = capsys.readouterr().out
expected = """\
test_single_exp
===============
id status min obj
-------------------------------- ----------- ---------
ec6ee7892275400a9acbf4f4d5cd530d broken
c4c44cb46d075546824e2a32f800fece completed 0
2b5059fa8fdcdc01f769c31e63d93f24 interrupted
7e8eade99d5fb1aa59a1985e614732bc new
507496236ff94d0f3ad332949dfea484 reserved
caf6afc856536f6d061676e63d14c948 suspended
"""
assert captured == expected
def test_one_wout_success_w_a_wout_c(clean_db, single_without_success, capsys):
"""Test experiment, without success, with --all."""
orion.core.cli.main(['status', '--all'])
captured = capsys.readouterr().out
expected = """\
test_single_exp
===============
id status
-------------------------------- -----------
ec6ee7892275400a9acbf4f4d5cd530d broken
2b5059fa8fdcdc01f769c31e63d93f24 interrupted
7e8eade99d5fb1aa59a1985e614732bc new
507496236ff94d0f3ad332949dfea484 reserved
caf6afc856536f6d061676e63d14c948 suspended
"""
assert captured == expected
def test_two_unrelated_w_a_wout_c(clean_db, unrelated_with_trials, capsys):
"""Test two unrelated experiments with --all."""
orion.core.cli.main(['status', '--all'])
captured = capsys.readouterr().out
expected = """\
test_double_exp
===============
id status
-------------------------------- -----------
a8f8122af9e5162e1e2328fdd5dd75db broken
ab82b1fa316de5accb4306656caa07d0 completed
c187684f7c7d9832ba953f246900462d interrupted
1497d4f27622520439c4bc132c6046b1 new
bd0999e1a3b00bf8658303b14867b30e reserved
b9f1506db880645a25ad9b5d2cfa0f37 suspended
test_single_exp
===============
id status min obj
-------------------------------- ----------- ---------
ec6ee7892275400a9acbf4f4d5cd530d broken
c4c44cb46d075546824e2a32f800fece completed 0
2b5059fa8fdcdc01f769c31e63d93f24 interrupted
7e8eade99d5fb1aa59a1985e614732bc new
507496236ff94d0f3ad332949dfea484 reserved
caf6afc856536f6d061676e63d14c948 suspended
"""
assert captured == expected
def test_two_related_w_a_wout_c(clean_db, family_with_trials, capsys):
"""Test two related experiments with --all."""
orion.core.cli.main(['status', '--all'])
captured = capsys.readouterr().out
expected = """\
test_double_exp
===============
id status
-------------------------------- -----------
a8f8122af9e5162e1e2328fdd5dd75db broken
ab82b1fa316de5accb4306656caa07d0 completed
c187684f7c7d9832ba953f246900462d interrupted
1497d4f27622520439c4bc132c6046b1 new
bd0999e1a3b00bf8658303b14867b30e reserved
b9f1506db880645a25ad9b5d2cfa0f37 suspended
test_double_exp_child
=====================
id status
-------------------------------- -----------
45c359f1c753a10f2cfeca4073a3a7ef broken
e79761fe3fc24dcbb7850939ede84b68 completed
69928939792d67f6fe30e9b8459be1ec interrupted
5f4a9c92b8f7c26654b5b37ecd3d5d32 new
58c4019fb2f92da88a0e63fafb36b3da reserved
82f340cb9d90cbf024169926b60aeef2 suspended
"""
assert captured == expected
def test_three_unrelated_w_a_wout_c(clean_db, three_experiments_with_trials, capsys):
"""Test three unrelated experiments with --all."""
orion.core.cli.main(['status', '--all'])
captured = capsys.readouterr().out
expected = """\
test_double_exp
===============
id status
-------------------------------- -----------
a8f8122af9e5162e1e2328fdd5dd75db broken
ab82b1fa316de5accb4306656caa07d0 completed
c187684f7c7d9832ba953f246900462d interrupted
1497d4f27622520439c4bc132c6046b1 new
bd0999e1a3b00bf8658303b14867b30e reserved
b9f1506db880645a25ad9b5d2cfa0f37 suspended
test_double_exp_child
=====================
id status
-------------------------------- -----------
45c359f1c753a10f2cfeca4073a3a7ef broken
e79761fe3fc24dcbb7850939ede84b68 completed
69928939792d67f6fe30e9b8459be1ec interrupted
5f4a9c92b8f7c26654b5b37ecd3d5d32 new
58c4019fb2f92da88a0e63fafb36b3da reserved
82f340cb9d90cbf024169926b60aeef2 suspended
test_single_exp
===============
id status min obj
-------------------------------- ----------- ---------
ec6ee7892275400a9acbf4f4d5cd530d broken
c4c44cb46d075546824e2a32f800fece completed 0
2b5059fa8fdcdc01f769c31e63d93f24 interrupted
7e8eade99d5fb1aa59a1985e614732bc new
507496236ff94d0f3ad332949dfea484 reserved
caf6afc856536f6d061676e63d14c948 suspended
"""
assert captured == expected
def test_three_related_w_a_wout_c(clean_db, three_family_with_trials, capsys):
"""Test three related experiments with --all."""
orion.core.cli.main(['status', '--all'])
captured = capsys.readouterr().out
expected = """\
test_double_exp
===============
id status
-------------------------------- -----------
a8f8122af9e5162e1e2328fdd5dd75db broken
ab82b1fa316de5accb4306656caa07d0 completed
c187684f7c7d9832ba953f246900462d interrupted
1497d4f27622520439c4bc132c6046b1 new
bd0999e1a3b00bf8658303b14867b30e reserved
b9f1506db880645a25ad9b5d2cfa0f37 suspended
test_double_exp_child
=====================
id status
-------------------------------- -----------
45c359f1c753a10f2cfeca4073a3a7ef broken
e79761fe3fc24dcbb7850939ede84b68 completed
69928939792d67f6fe30e9b8459be1ec interrupted
5f4a9c92b8f7c26654b5b37ecd3d5d32 new
58c4019fb2f92da88a0e63fafb36b3da reserved
82f340cb9d90cbf024169926b60aeef2 suspended
test_double_exp_child2
======================
id status
-------------------------------- -----------
d0f4aa931345bfd864201b7dd93ae667 broken
5005c35be98025a24731d7dfdf4423de completed
c9fa9f0682a370396c8c4265c4e775dd interrupted
3d8163138be100e37f1656b7b591179e new
790d3c4c965e0d91ada9cbdaebe220cf reserved
6efdb99952d5f80f55adbba9c61dc288 suspended
"""
assert captured == expected
def test_three_related_branch_w_a_wout_c(clean_db, three_family_branch_with_trials, capsys):
"""Test three related experiments in a branch with --all."""
orion.core.cli.main(['status', '--all'])
captured = capsys.readouterr().out
expected = """\
test_double_exp
===============
id status
-------------------------------- -----------
a8f8122af9e5162e1e2328fdd5dd75db broken
ab82b1fa316de5accb4306656caa07d0 completed
c187684f7c7d9832ba953f246900462d interrupted
1497d4f27622520439c4bc132c6046b1 new
bd0999e1a3b00bf8658303b14867b30e reserved
b9f1506db880645a25ad9b5d2cfa0f37 suspended
test_double_exp_child
=====================
id status
-------------------------------- -----------
45c359f1c753a10f2cfeca4073a3a7ef broken
e79761fe3fc24dcbb7850939ede84b68 completed
69928939792d67f6fe30e9b8459be1ec interrupted
5f4a9c92b8f7c26654b5b37ecd3d5d32 new
58c4019fb2f92da88a0e63fafb36b3da reserved
82f340cb9d90cbf024169926b60aeef2 suspended
test_double_exp_grand_child
===========================
id status
-------------------------------- -----------
994602c021c470989d6f392b06cb37dd broken
24c228352de31010d8d3bf253604a82d completed
a3c8a1f4c80c094754c7217a83aae5e2 interrupted
d667f5d719ddaa4e1da2fbe568e11e46 new
a40748e487605df3ed04a5ac7154d4f6 reserved
229622a6d7132c311b7d4c57a08ecf08 suspended
"""
assert captured == expected
def test_two_unrelated_w_c_wout_a(clean_db, unrelated_with_trials, capsys):
"""Test two unrelated experiments with --collapse."""
orion.core.cli.main(['status', '--collapse'])
captured = capsys.readouterr().out
expected = """\
test_double_exp
===============
status quantity
----------- ----------
broken 1
completed 1
interrupted 1
new 1
reserved 1
suspended 1
test_single_exp
===============
status quantity min obj
----------- ---------- ---------
broken 1
completed 1 0
interrupted 1
new 1
reserved 1
suspended 1
"""
assert captured == expected
def test_two_related_w_c_wout_a(clean_db, family_with_trials, capsys):
"""Test two related experiments with --collapse."""
orion.core.cli.main(['status', '--collapse'])
captured = capsys.readouterr().out
expected = """\
test_double_exp
===============
status quantity
----------- ----------
broken 1
completed 1
interrupted 1
new 2
reserved 1
suspended 1
"""
assert captured == expected
def test_three_unrelated_w_c_wout_a(clean_db, three_experiments_with_trials, capsys):
"""Test three unrelated experiments with --collapse."""
orion.core.cli.main(['status', '--collapse'])
captured = capsys.readouterr().out
expected = """\
test_double_exp
===============
status quantity
----------- ----------
broken 1
completed 1
interrupted 1
new 2
reserved 1
suspended 1
test_single_exp
===============
status quantity min obj
----------- ---------- ---------
broken 1
completed 1 0
interrupted 1
new 1
reserved 1
suspended 1
"""
assert captured == expected
def test_three_related_w_c_wout_a(clean_db, three_family_with_trials, capsys):
"""Test three related experiments with --collapse."""
orion.core.cli.main(['status', '--collapse'])
captured = capsys.readouterr().out
expected = """\
test_double_exp
===============
status quantity
----------- ----------
broken 1
completed 1
interrupted 1
new 3
reserved 1
suspended 1
"""
assert captured == expected
def test_three_related_branch_w_c_wout_a(clean_db, three_family_branch_with_trials, capsys):
"""Test three related experiments with --collapse."""
orion.core.cli.main(['status', '--collapse'])
captured = capsys.readouterr().out
expected = """\
test_double_exp
===============
status quantity
----------- ----------
broken 1
completed 1
interrupted 1
new 3
reserved 1
suspended 1
"""
assert captured == expected
def test_two_unrelated_w_ac(clean_db, unrelated_with_trials, capsys):
"""Test two unrelated experiments with --collapse and --all."""
orion.core.cli.main(['status', '--collapse', '--all'])
captured = capsys.readouterr().out
expected = """\
test_double_exp
===============
id status
-------------------------------- -----------
a8f8122af9e5162e1e2328fdd5dd75db broken
ab82b1fa316de5accb4306656caa07d0 completed
c187684f7c7d9832ba953f246900462d interrupted
1497d4f27622520439c4bc132c6046b1 new
bd0999e1a3b00bf8658303b14867b30e reserved
b9f1506db880645a25ad9b5d2cfa0f37 suspended
test_single_exp
===============
id status min obj
-------------------------------- ----------- ---------
ec6ee7892275400a9acbf4f4d5cd530d broken
c4c44cb46d075546824e2a32f800fece completed 0
2b5059fa8fdcdc01f769c31e63d93f24 interrupted
7e8eade99d5fb1aa59a1985e614732bc new
507496236ff94d0f3ad332949dfea484 reserved
caf6afc856536f6d061676e63d14c948 suspended
"""
assert captured == expected
def test_two_related_w_ac(clean_db, family_with_trials, capsys):
"""Test two related experiments with --collapse and --all."""
orion.core.cli.main(['status', '--collapse', '--all'])
captured = capsys.readouterr().out
expected = """\
test_double_exp
===============
id status
-------------------------------- -----------
a8f8122af9e5162e1e2328fdd5dd75db broken
ab82b1fa316de5accb4306656caa07d0 completed
c187684f7c7d9832ba953f246900462d interrupted
1497d4f27622520439c4bc132c6046b1 new
ad6ea2decff2f298594b948fdaea03b2 new
bd0999e1a3b00bf8658303b14867b30e reserved
b9f1506db880645a25ad9b5d2cfa0f37 suspended
"""
assert captured == expected
def test_three_unrelated_w_ac(clean_db, three_experiments_with_trials, capsys):
"""Test three unrelated experiments with --collapse and --all."""
orion.core.cli.main(['status', '--collapse', '--all'])
captured = capsys.readouterr().out
expected = """\
test_double_exp
===============
id status
-------------------------------- -----------
a8f8122af9e5162e1e2328fdd5dd75db broken
ab82b1fa316de5accb4306656caa07d0 completed
c187684f7c7d9832ba953f246900462d interrupted
1497d4f27622520439c4bc132c6046b1 new
ad6ea2decff2f298594b948fdaea03b2 new
bd0999e1a3b00bf8658303b14867b30e reserved
b9f1506db880645a25ad9b5d2cfa0f37 suspended
test_single_exp
===============
id status min obj
-------------------------------- ----------- ---------
ec6ee7892275400a9acbf4f4d5cd530d broken
c4c44cb46d075546824e2a32f800fece completed 0
2b5059fa8fdcdc01f769c31e63d93f24 interrupted
7e8eade99d5fb1aa59a1985e614732bc new
507496236ff94d0f3ad332949dfea484 reserved
caf6afc856536f6d061676e63d14c948 suspended
"""
assert captured == expected
def test_three_related_w_ac(clean_db, three_family_with_trials, capsys):
"""Test three related experiments with --collapse and --all."""
orion.core.cli.main(['status', '--collapse', '--all'])
captured = capsys.readouterr().out
expected = """\
test_double_exp
===============
id status
-------------------------------- -----------
a8f8122af9e5162e1e2328fdd5dd75db broken
ab82b1fa316de5accb4306656caa07d0 completed
c187684f7c7d9832ba953f246900462d interrupted
1497d4f27622520439c4bc132c6046b1 new
ad6ea2decff2f298594b948fdaea03b2 new
f357f8c185ccab3037c65dcf721b9e71 new
bd0999e1a3b00bf8658303b14867b30e reserved
b9f1506db880645a25ad9b5d2cfa0f37 suspended
"""
assert captured == expected
def test_three_related_branch_w_ac(clean_db, three_family_branch_with_trials, capsys):
"""Test three related experiments in a branch with --collapse and --all."""
orion.core.cli.main(['status', '--collapse', '--all'])
captured = capsys.readouterr().out
expected = """\
test_double_exp
===============
id status
-------------------------------- -----------
a8f8122af9e5162e1e2328fdd5dd75db broken
ab82b1fa316de5accb4306656caa07d0 completed
c187684f7c7d9832ba953f246900462d interrupted
1497d4f27622520439c4bc132c6046b1 new
ad6ea2decff2f298594b948fdaea03b2 new
8f763d441db41d0f56e4e6aa40cc2321 new
bd0999e1a3b00bf8658303b14867b30e reserved
b9f1506db880645a25ad9b5d2cfa0f37 suspended
"""
assert captured == expected
def test_experiment_wout_child_w_name(clean_db, unrelated_with_trials, capsys):
"""Test status with the name argument and no child."""
orion.core.cli.main(['status', '--name', 'test_single_exp'])
captured = capsys.readouterr().out
expected = """test_single_exp
===============
status quantity min obj
----------- ---------- ---------
broken 1
completed 1 0
interrupted 1
new 1
reserved 1
suspended 1
"""
assert captured == expected
def test_experiment_w_child_w_name(clean_db, three_experiments_with_trials, capsys):
"""Test status with the name argument and one child."""
orion.core.cli.main(['status', '--name', 'test_double_exp'])
captured = capsys.readouterr().out
expected = """\
test_double_exp
===============
status quantity
----------- ----------
broken 1
completed 1
interrupted 1
new 1
reserved 1
suspended 1
"""
assert captured == expected
| 25.487991 | 92 | 0.592796 | 1,906 | 23,347 | 7.065058 | 0.062959 | 0.025249 | 0.032823 | 0.034457 | 0.928635 | 0.923957 | 0.915417 | 0.893955 | 0.882148 | 0.863657 | 0 | 0.165799 | 0.251339 | 23,347 | 915 | 93 | 25.515847 | 0.604611 | 0.067846 | 0 | 0.917829 | 0 | 0 | 0.708019 | 0.260053 | 0 | 0 | 0 | 0 | 0.044961 | 1 | 0.044961 | false | 0 | 0.003101 | 0 | 0.048062 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
5bf062dc8ebc1a701cbacab1f12b9e1eede01475 | 2,946 | py | Python | cajas/movement/migrations/0014_auto_20190920_1437.py | dmontoya1/cajas | 5eb3d5835250d5dafae398082200b79c1ca8063b | [
"MIT"
] | null | null | null | cajas/movement/migrations/0014_auto_20190920_1437.py | dmontoya1/cajas | 5eb3d5835250d5dafae398082200b79c1ca8063b | [
"MIT"
] | null | null | null | cajas/movement/migrations/0014_auto_20190920_1437.py | dmontoya1/cajas | 5eb3d5835250d5dafae398082200b79c1ca8063b | [
"MIT"
] | null | null | null | # Generated by Django 2.0.9 on 2019-09-20 14:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('movement', '0013_movementdailysquare_temp_employee'),
]
operations = [
migrations.AddField(
model_name='movementboxcolombia',
name='movement_don_juan',
field=models.IntegerField(blank=True, null=True, verbose_name='Movimiento Don Juan PK'),
),
migrations.AddField(
model_name='movementboxcolombia',
name='movement_don_juan_usd',
field=models.IntegerField(blank=True, null=True, verbose_name='Movimiento Don Juan Dólares'),
),
migrations.AddField(
model_name='movementboxcolombia',
name='movement_office',
field=models.IntegerField(blank=True, null=True, verbose_name='Movimiento Oficina PK'),
),
migrations.AddField(
model_name='movementdonjuan',
name='movement_box_colombia',
field=models.IntegerField(blank=True, null=True, verbose_name='Movimiento Caja Colombia'),
),
migrations.AddField(
model_name='movementdonjuan',
name='movement_don_juan_usd',
field=models.IntegerField(blank=True, null=True, verbose_name='Movimiento Don Juan Dólares'),
),
migrations.AddField(
model_name='movementdonjuan',
name='movement_office',
field=models.IntegerField(blank=True, null=True, verbose_name='Movimiento Oficina PK'),
),
migrations.AddField(
model_name='movementdonjuanusd',
name='movement_box_colombia',
field=models.IntegerField(blank=True, null=True, verbose_name='Movimiento Caja Colombia'),
),
migrations.AddField(
model_name='movementdonjuanusd',
name='movement_don_juan',
field=models.IntegerField(blank=True, null=True, verbose_name='Movimiento Don Juan PK'),
),
migrations.AddField(
model_name='movementdonjuanusd',
name='movement_office',
field=models.IntegerField(blank=True, null=True, verbose_name='Movimiento Oficina PK'),
),
migrations.AddField(
model_name='movementoffice',
name='movement_don_juan',
field=models.IntegerField(blank=True, null=True, verbose_name='Movimiento Don Juan PK'),
),
migrations.AddField(
model_name='movementoffice',
name='movement_don_juan_usd',
field=models.IntegerField(blank=True, null=True, verbose_name='Movimiento Don Juan Dólares'),
),
migrations.AddField(
model_name='movementoffice',
name='movement_office',
field=models.IntegerField(blank=True, null=True, verbose_name='Movimiento Oficina PK'),
),
]
| 39.810811 | 105 | 0.624576 | 286 | 2,946 | 6.258741 | 0.174825 | 0.12067 | 0.15419 | 0.181006 | 0.907263 | 0.907263 | 0.907263 | 0.853631 | 0.83352 | 0.797207 | 0 | 0.008821 | 0.268839 | 2,946 | 73 | 106 | 40.356164 | 0.822191 | 0.015275 | 0 | 0.895522 | 1 | 0 | 0.254915 | 0.049327 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.014925 | 0 | 0.059701 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
7515418d718fd8d41cf3e54474eb9827b1b32646 | 132 | py | Python | quickvision/layers/__init__.py | Quick-AI/quickvision | dc3c083356f3afa12c8992254249d3a1a3ea0d7d | [
"Apache-2.0"
] | 47 | 2020-11-15T03:36:48.000Z | 2021-04-08T05:28:02.000Z | quickvision/layers/__init__.py | oke-aditya/quickvision | dc3c083356f3afa12c8992254249d3a1a3ea0d7d | [
"Apache-2.0"
] | 78 | 2020-11-14T17:55:28.000Z | 2021-04-06T08:55:24.000Z | quickvision/layers/__init__.py | Quick-AI/quickvision | dc3c083356f3afa12c8992254249d3a1a3ea0d7d | [
"Apache-2.0"
] | 15 | 2020-11-14T18:01:04.000Z | 2021-02-16T14:50:12.000Z | from quickvision.layers import functional
from quickvision.layers.act_mish import Mish
from quickvision.layers.block_mlp import MLP
| 33 | 44 | 0.871212 | 19 | 132 | 5.947368 | 0.473684 | 0.39823 | 0.557522 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.090909 | 132 | 3 | 45 | 44 | 0.941667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
f32f36fe6230c16e25ade021caccdfb7e60d6e6e | 9,537 | py | Python | tests/Permutation/TestFoplPermutation.py | oIi123/TableauxProver | cb527f91f5c2d0393fbfcb3fb501b4480e0c9031 | [
"MIT"
] | null | null | null | tests/Permutation/TestFoplPermutation.py | oIi123/TableauxProver | cb527f91f5c2d0393fbfcb3fb501b4480e0c9031 | [
"MIT"
] | null | null | null | tests/Permutation/TestFoplPermutation.py | oIi123/TableauxProver | cb527f91f5c2d0393fbfcb3fb501b4480e0c9031 | [
"MIT"
] | null | null | null | import unittest
from src.Parser.FoplParser import FoplParser as Parser
from src.Model.FoplExpressionTree import *
class TestPlPermutation(unittest.TestCase):
def test_atom(self):
# A = [A]
expr = Predicate("A", [])
permutations = expr.permute()
self.assertEqual(len(permutations), 1)
self.assertEqual(expr, permutations[0])
def test_and_1(self):
expected_permutations = [
"A()&B()",
]
for expr in expected_permutations:
expr = Parser.parse(expr).expr
permutations = expr.permute()
self.assertEqual(len(permutations), len(expected_permutations))
for expected_permutation in expected_permutations:
expr = Parser.parse(expected_permutation).expr
self.assertIn(expr, permutations, f"{expected_permutation} missing")
def test_and_2(self):
expected_permutations = [
"A()&(B()&C())",
"(A()&B())&C()",
]
for expr in expected_permutations:
expr = Parser.parse(expr).expr
permutations = expr.permute()
self.assertEqual(len(permutations), len(expected_permutations))
for expected_permutation in expected_permutations:
expr = Parser.parse(expected_permutation).expr
self.assertIn(expr, permutations, f"{expected_permutation} missing")
def test_and_3(self):
expected_permutations = [
"A()&(B()&C())",
"(A()&B())&C()",
]
for expr in expected_permutations:
expr = Parser.parse(expr).expr
permutations = expr.permute()
self.assertEqual(len(permutations), len(expected_permutations))
for expected_permutation in expected_permutations:
expr = Parser.parse(expected_permutation).expr
self.assertIn(expr, permutations, f"{expected_permutation} missing")
def test_and_4(self):
expected_permutations = [
"A()&(B()&(C()&D()))",
"A()&((B()&C())&D())",
"((A()&B())&C())&D()",
"(A()&(B()&C()))&D()",
"(A()&B())&(C()&D())",
]
for expr in expected_permutations:
expr = Parser.parse(expr).expr
permutations = expr.permute()
self.assertEqual(len(permutations), len(expected_permutations))
for expected_permutation in expected_permutations:
expr = Parser.parse(expected_permutation).expr
self.assertIn(expr, permutations, f"{expected_permutation} missing")
def test_and_5(self):
expr = And(Or(Predicate("A", []), Predicate("B", [])), Predicate("C", []))
expected_permutations = [
"(A()|B())&C()",
]
permutations = expr.permute()
self.assertEqual(len(permutations), len(expected_permutations))
for expected_permutation in expected_permutations:
expr = Parser.parse(expected_permutation).expr
self.assertIn(expr, permutations, f"{expected_permutation} missing")
def test_and_6(self):
expr = Or(And(Predicate("A", []), Predicate("B", [])), Predicate("C", []))
expected_permutations = [
"A()&B()|C()",
]
permutations = expr.permute()
self.assertEqual(len(permutations), len(expected_permutations))
for expected_permutation in expected_permutations:
expr = Parser.parse(expected_permutation).expr
self.assertIn(expr, permutations, f"{expected_permutation} missing")
def test_or_1(self):
expected_permutations = [
"A()|B()",
]
for expr in expected_permutations:
expr = Parser.parse(expr).expr
permutations = expr.permute()
self.assertEqual(len(permutations), len(expected_permutations))
for expected_permutation in expected_permutations:
expr = Parser.parse(expected_permutation).expr
self.assertIn(expr, permutations, f"{expected_permutation} missing")
def test_or_2(self):
expected_permutations = [
"(A()|B())|C()",
"A()|(B()|C())",
]
for expr in expected_permutations:
expr = Parser.parse(expr).expr
permutations = expr.permute()
self.assertEqual(len(permutations), len(expected_permutations))
for expected_permutation in expected_permutations:
expr = Parser.parse(expected_permutation).expr
self.assertIn(expr, permutations, f"{expected_permutation} missing")
def test_or_3(self):
expected_permutations = [
"(A()|B())|C()",
"A()|(B()|C())",
]
for expr in expected_permutations:
expr = Parser.parse(expr).expr
permutations = expr.permute()
self.assertEqual(len(permutations), len(expected_permutations))
for expected_permutation in expected_permutations:
expr = Parser.parse(expected_permutation).expr
self.assertIn(expr, permutations, f"{expected_permutation} missing")
def test_or_4(self):
expected_permutations = [
"A()|((B()|C())|D())",
"A()|(B()|(C()|D()))",
"((A()|B())|C())|D()",
"(A()|(B()|C()))|D()",
"(A()|B())|(C()|D())",
]
for expr in expected_permutations:
expr = Parser.parse(expr).expr
permutations = expr.permute()
self.assertEqual(len(permutations), len(expected_permutations))
for expected_permutation in expected_permutations:
expr = Parser.parse(expected_permutation).expr
self.assertIn(expr, permutations, f"{expected_permutation} missing")
def test_impl_1(self):
expected_permutations = [
"A()->B()",
]
for expr_str in expected_permutations:
expr = Parser.parse(expr_str).expr
permutations = expr.permute()
self.assertEqual(len(permutations), 1)
expr = Parser.parse(expr_str).expr
self.assertIn(expr, permutations, f"{expr_str} not in perms")
def test_impl_2(self):
expected_permutations = [
"(A()->B())->C()",
"A()->(B()->C())",
]
for expr_str in expected_permutations:
expr = Parser.parse(expr_str).expr
permutations = expr.permute()
self.assertEqual(len(permutations), 1)
expr = Parser.parse(expr_str).expr
self.assertIn(expr, permutations, f"{expr_str} not in perms")
def test_impl_3(self):
expected_permutations = [
"A()->((B()->C())->D())",
"A()->(B()->(C()->D()))",
"((A()->B())->C())->D()",
"(A()->(B()->C()))->D()",
"(A()->B())->(C()->D())",
]
for expr_str in expected_permutations:
expr = Parser.parse(expr_str).expr
permutations = expr.permute()
self.assertEqual(len(permutations), 1)
expr = Parser.parse(expr_str).expr
self.assertIn(expr, permutations, f"{expr_str} not in perms")
def test_eq_1(self):
expected_permutations = [
"A()<->B()",
]
for expr in expected_permutations:
expr = Parser.parse(expr).expr
permutations = expr.permute()
self.assertEqual(len(permutations), len(expected_permutations))
for expected_permutation in expected_permutations:
expr = Parser.parse(expected_permutation).expr
self.assertIn(expr, permutations, f"{expected_permutation} missing")
def test_eq_2(self):
expected_permutations = [
"A()<->(B()<->C())",
"(A()<->B())<->C()",
]
for expr in expected_permutations:
expr = Parser.parse(expr).expr
permutations = expr.permute()
self.assertEqual(len(permutations), len(expected_permutations))
for expected_permutation in expected_permutations:
expr = Parser.parse(expected_permutation).expr
self.assertIn(expr, permutations, f"{expected_permutation} missing")
def test_eq_3(self):
expected_permutations = [
"A()<->((B()<->C())<->D())",
"A()<->(B()<->(C()<->D()))",
"((A()<->B())<->C())<->D()",
"(A()<->(B()<->C()))<->D()",
"(A()<->B())<->(C()<->D())",
]
for expr in expected_permutations:
expr = Parser.parse(expr).expr
permutations = expr.permute()
self.assertEqual(len(permutations), len(expected_permutations))
for expected_permutation in expected_permutations:
expr = Parser.parse(expected_permutation).expr
self.assertIn(expr, permutations, f"{expected_permutation} missing")
def test_not_1(self):
expr = Not(Predicate("A", []))
permutations = expr.permute()
self.assertEqual(len(permutations), 1)
self.assertEqual(expr, permutations[0])
def test_not_2(self):
expr = Not(Not(Predicate("A", [])))
permutations = expr.permute()
self.assertEqual(len(permutations), 1)
self.assertEqual(expr, permutations[0])
| 34.429603 | 84 | 0.554891 | 954 | 9,537 | 5.396226 | 0.050314 | 0.21756 | 0.019814 | 0.136364 | 0.957265 | 0.957265 | 0.957265 | 0.957265 | 0.95202 | 0.95202 | 0 | 0.00404 | 0.299256 | 9,537 | 276 | 85 | 34.554348 | 0.766273 | 0.000734 | 0 | 0.650943 | 0 | 0 | 0.117128 | 0.054681 | 0 | 0 | 0 | 0 | 0.179245 | 1 | 0.089623 | false | 0 | 0.014151 | 0 | 0.108491 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f37a4efdb0488998ed7c5a8c2b7b37bf52bdd835 | 26,135 | py | Python | tests/test_carb_math.py | novalegra/PyLoopKit | c275ef9490b0c528841525bf1b501e9c8805b20a | [
"BSD-2-Clause"
] | 6 | 2020-04-08T15:17:58.000Z | 2021-06-04T06:47:15.000Z | tests/test_carb_math.py | novalegra/PyLoopKit | c275ef9490b0c528841525bf1b501e9c8805b20a | [
"BSD-2-Clause"
] | 8 | 2019-08-29T01:38:41.000Z | 2021-03-11T22:58:07.000Z | tests/test_carb_math.py | novalegra/PyLoopKit | c275ef9490b0c528841525bf1b501e9c8805b20a | [
"BSD-2-Clause"
] | 5 | 2019-09-03T21:51:14.000Z | 2021-01-20T04:15:37.000Z | #!/usr/bin/env py#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Jun 28 13:35:51 2019
@author: annaquinlan
Github URL: https://github.com/tidepool-org/LoopKit/blob/
57a9f2ba65ae3765ef7baafe66b883e654e08391/LoopKitTests/CarbMathTests.swift
"""
# pylint: disable=R0201, C0111, C0200, W0105, R0914
import unittest
from datetime import datetime, time, timedelta
#from . import path_grabber # pylint: disable=unused-import
from .loop_kit_tests import load_fixture
from pyloopkit.carb_math import (map_, carb_glucose_effects, carbs_on_board,
dynamic_carbs_on_board, dynamic_glucose_effects)
class TestCarbKitFunctions(unittest.TestCase):
""" unittest class to run CarbKit tests."""
INSULIN_SENSITIVITY_START_DATES = [time(0, 0)]
INSULIN_SENSITIVITY_END_DATES = [time(23, 59)]
INSULIN_SENSITIVITY_VALUES = [40]
DEFAULT_ABSORPTION_TIMES = [60,
120,
240
]
def load_schedules(self):
""" Load the carb schedule
Output:
2 lists in (schedule_offsets, carb_ratios) format
"""
schedule = load_fixture("read_carb_ratios", ".json").get("schedule")
carb_sched_starts = [
time.fromisoformat(dict_.get("start"))
for dict_ in schedule
]
carb_sched_ratios = [dict_.get("ratio") for dict_ in schedule]
return (carb_sched_starts, carb_sched_ratios)
def load_history_fixture(self, name):
""" Load carb history from json file
Argument:
name -- name of file, without .json extension
Output:
3 lists in (carb_values, carb_start_dates, carb_absorption_times)
format
"""
fixture = load_fixture(name, ".json")
return self.carb_entries_from_fixture(fixture)
def load_carb_entry_fixture(self):
""" Load carb entry
Output:
3 lists in (carb_values, carb_start_dates, carb_absorption_times)
format
"""
fixture = load_fixture("carb_entry_input", ".json")
return self.carb_entries_from_fixture(fixture)
def carb_entries_from_fixture(self, fixture):
""" Convert fixture to carb entries
Arguments:
fixture -- the pre-loaded json fixture
Output:
3 lists in (carb_values, carb_start_dates, carb_absorption_times)
format
"""
carb_values = [dict_.get("amount") for dict_ in fixture]
start_dates = [
datetime.fromisoformat(dict_.get("start_at"))
for dict_ in fixture
]
absorption_times = [
dict_.get("absorption_time") if dict_.get("absorption_time")
else None for dict_ in fixture
]
return (start_dates, carb_values, absorption_times)
def load_effect_fixture(self, name):
""" Load glucose effects from json file
Output:
2 lists in (date, glucose_value) format
"""
fixture = load_fixture(
name,
".json"
)
dates = [
datetime.fromisoformat(dict_.get("date"))
for dict_ in fixture
]
glucose_values = [dict_.get("amount") for dict_ in fixture]
assert len(dates) == len(glucose_values),\
"expected output shape to match"
return (dates, glucose_values)
def load_cob_output_fixture(self, name):
""" Load COB from json file
Arguments:
name -- name of file without the extension
Output:
2 lists in (date, cob_value) format
"""
fixture = load_fixture(name, ".json")
dates = [
datetime.fromisoformat(dict_.get("date"))
for dict_ in fixture
]
cob_values = [dict_.get("amount") for dict_ in fixture]
assert len(dates) == len(cob_values),\
"expected output shape to match"
return (dates, cob_values)
def load_ice_input_fixture(self, name):
""" Load insulin counteraction effects (ICE) from json file
Arguments:
name -- name of file without the extension
Output:
3 lists in (start_date, end_date, insulin_counteraction_value) format
"""
fixture = load_fixture(name, ".json")
start_dates = [
datetime.fromisoformat(dict_.get("start_at"))
for dict_ in fixture
]
end_dates = [
datetime.fromisoformat(dict_.get("end_at"))
for dict_ in fixture
]
ice_values = [dict_.get("velocity") for dict_ in fixture]
assert len(start_dates) == len(end_dates) == len(ice_values),\
"expected output shape to match"
return (start_dates, end_dates, ice_values)
""" Tests for map_ """
def test_carb_effect_with_zero_entry(self):
input_ice = self.load_ice_input_fixture("ice_35_min_input")
carb_ratio_tuple = self.load_schedules()
default_absorption_times = self.DEFAULT_ABSORPTION_TIMES
carb_entry_starts = [input_ice[0][0]]
carb_entry_quantities = [0]
carb_entry_absorptions = [120]
(absorptions,
timelines,
entries # pylint: disable=W0612
) = map_(
carb_entry_starts,
carb_entry_quantities,
carb_entry_absorptions,
*input_ice,
*carb_ratio_tuple,
self.INSULIN_SENSITIVITY_START_DATES,
self.INSULIN_SENSITIVITY_END_DATES,
self.INSULIN_SENSITIVITY_VALUES,
default_absorption_times[0] / default_absorption_times[1],
default_absorption_times[1],
0
)
self.assertEqual(len(absorptions), 1)
self.assertEqual(absorptions[0][6], 0)
""" Tests for carb_glucose_effects """
def test_carb_effect_from_history(self):
input_ = self.load_history_fixture("carb_effect_from_history_input")
(expected_dates,
expected_values
) = self.load_effect_fixture("carb_effect_from_history_output")
carb_ratio_tuple = self.load_schedules()
(effect_starts,
effect_values
) = carb_glucose_effects(
*input_,
*carb_ratio_tuple,
self.INSULIN_SENSITIVITY_START_DATES,
self.INSULIN_SENSITIVITY_END_DATES,
self.INSULIN_SENSITIVITY_VALUES,
180
)
self.assertEqual(
len(expected_dates), len(effect_starts)
)
for i in range(0, len(expected_dates)):
self.assertEqual(
expected_dates[i], effect_starts[i]
)
self.assertAlmostEqual(
expected_values[i], effect_values[i], 1
)
""" Tests for non-dynamic COB """
def test_carbs_on_board_from_history(self):
input_ = self.load_history_fixture("carb_effect_from_history_input")
(expected_dates,
expected_values
) = self.load_cob_output_fixture("carbs_on_board_output")
(cob_starts,
cob_values
) = carbs_on_board(
*input_,
default_absorption_time=180,
delay=10,
delta=5
)
self.assertEqual(
len(expected_dates), len(cob_starts)
)
for i in range(0, len(expected_dates)):
self.assertEqual(
expected_dates[i], cob_starts[i]
)
self.assertAlmostEqual(
expected_values[i], cob_values[i], 1
)
""" Tests for dynamic COB """
def test_dynamic_absorption_none_observed(self):
input_ice = self.load_ice_input_fixture("ice_35_min_input")
(carb_starts,
carb_values,
carb_absorptions
) = self.load_carb_entry_fixture()
carb_ratio_tuple = self.load_schedules()
default_absorption_times = self.DEFAULT_ABSORPTION_TIMES
carb_entry_starts = [carb_starts[2]]
carb_entry_quantities = [carb_values[2]]
carb_entry_absorptions = [carb_absorptions[2]]
(expected_dates,
expected_values
) = self.load_cob_output_fixture("ice_35_min_none_output")
(absorptions,
timelines,
entries, # pylint: disable=W0612
) = map_(
carb_entry_starts,
carb_entry_quantities,
carb_entry_absorptions,
*input_ice,
*carb_ratio_tuple,
self.INSULIN_SENSITIVITY_START_DATES,
self.INSULIN_SENSITIVITY_END_DATES,
self.INSULIN_SENSITIVITY_VALUES,
default_absorption_times[1] / default_absorption_times[0],
default_absorption_times[1],
0
)
self.assertEqual(len(absorptions), 1)
self.assertEqual(absorptions[0][6], 240)
self.assertEqual(
absorptions[0][4],
datetime.fromisoformat("2015-10-15 23:00:00")
)
(cob_dates,
cob_values
) = dynamic_carbs_on_board(
carb_entry_starts,
carb_entry_quantities,
carb_entry_absorptions,
absorptions,
timelines,
default_absorption_times[1],
delay=10,
delta=5,
start=input_ice[0][0],
end=(
input_ice[0][0]
+ timedelta(hours=6)
)
)
assert len(expected_dates) == len(cob_dates)
for i in range(0, len(expected_dates)):
self.assertEqual(
expected_dates[i], cob_dates[i]
)
self.assertAlmostEqual(
expected_values[i], cob_values[i], 1
)
def test_dynamic_absorption_partially_observed(self):
input_ice = self.load_ice_input_fixture("ice_35_min_input")
(carb_starts,
carb_values,
carb_absorptions
) = self.load_carb_entry_fixture()
carb_ratio_tuple = self.load_schedules()
default_absorption_times = self.DEFAULT_ABSORPTION_TIMES
carb_entry_starts = [carb_starts[0]]
carb_entry_quantities = [carb_values[0]]
carb_entry_absorptions = [carb_absorptions[0]]
(expected_dates,
expected_values
) = self.load_cob_output_fixture("ice_35_min_partial_output")
(absorptions,
timelines,
entries, # pylint: disable=W0612
) = map_(
carb_entry_starts,
carb_entry_quantities,
carb_entry_absorptions,
*input_ice,
*carb_ratio_tuple,
self.INSULIN_SENSITIVITY_START_DATES,
self.INSULIN_SENSITIVITY_END_DATES,
self.INSULIN_SENSITIVITY_VALUES,
default_absorption_times[1] / default_absorption_times[0],
default_absorption_times[1],
0
)
self.assertEqual(len(absorptions), 1)
self.assertAlmostEqual(absorptions[0][6], 8509/60, 2)
(cob_dates,
cob_values
) = dynamic_carbs_on_board(
carb_entry_starts,
carb_entry_quantities,
carb_entry_absorptions,
absorptions,
timelines,
default_absorption_times[1],
delay=10,
delta=5,
start=input_ice[0][0],
end=(
input_ice[0][0]
+ timedelta(hours=6)
)
)
assert len(expected_dates) == len(cob_dates)
for i in range(0, len(expected_dates)):
self.assertEqual(
expected_dates[i], cob_dates[i]
)
self.assertAlmostEqual(
expected_values[i], cob_values[i], 1
)
def test_dynamic_absorption_fully_observed(self):
input_ice = self.load_ice_input_fixture("ice_1_hour_input")
(carb_starts,
carb_values,
carb_absorptions
) = self.load_carb_entry_fixture()
carb_ratio_tuple = self.load_schedules()
default_absorption_times = self.DEFAULT_ABSORPTION_TIMES
carb_entry_starts = [carb_starts[0]]
carb_entry_quantities = [carb_values[0]]
carb_entry_absorptions = [carb_absorptions[0]]
(expected_dates,
expected_values
) = self.load_cob_output_fixture("ice_1_hour_output")
(absorptions,
timelines,
entries, # pylint: disable=W0612
) = map_(
carb_entry_starts,
carb_entry_quantities,
carb_entry_absorptions,
*input_ice,
*carb_ratio_tuple,
self.INSULIN_SENSITIVITY_START_DATES,
self.INSULIN_SENSITIVITY_END_DATES,
self.INSULIN_SENSITIVITY_VALUES,
default_absorption_times[1] / default_absorption_times[0],
default_absorption_times[1],
0
)
self.assertEqual(len(absorptions), 1)
self.assertIsNotNone(absorptions[0])
# No remaining absorption
self.assertEqual(absorptions[0][6], 0)
# All should be absorbed
self.assertEqual(absorptions[0][0], 44)
(cob_dates,
cob_values
) = dynamic_carbs_on_board(
carb_entry_starts,
carb_entry_quantities,
carb_entry_absorptions,
absorptions,
timelines,
default_absorption_times[1],
delay=10,
delta=5,
start=input_ice[0][0],
end=(
input_ice[0][0]
+ timedelta(hours=6)
)
)
assert len(expected_dates) == len(cob_dates)
for i in range(0, len(expected_dates)):
self.assertEqual(
expected_dates[i], cob_dates[i]
)
self.assertAlmostEqual(
expected_values[i], cob_values[i], 1
)
def test_dynamic_absorption_never_fully_observed(self):
input_ice = self.load_ice_input_fixture("ice_slow_absorption")
(carb_starts,
carb_values,
carb_absorptions
) = self.load_carb_entry_fixture()
carb_ratio_tuple = self.load_schedules()
default_absorption_times = self.DEFAULT_ABSORPTION_TIMES
carb_entry_starts = [carb_starts[1]]
carb_entry_quantities = [carb_values[1]]
carb_entry_absorptions = [carb_absorptions[1]]
(expected_dates,
expected_values
) = self.load_cob_output_fixture("ice_slow_absorption_output")
(absorptions,
timelines,
entries, # pylint: disable=W0612
) = map_(
carb_entry_starts,
carb_entry_quantities,
carb_entry_absorptions,
*input_ice,
*carb_ratio_tuple,
self.INSULIN_SENSITIVITY_START_DATES,
self.INSULIN_SENSITIVITY_END_DATES,
self.INSULIN_SENSITIVITY_VALUES,
default_absorption_times[1] / default_absorption_times[0],
default_absorption_times[1],
0
)
self.assertEqual(len(absorptions), 1)
self.assertIsNotNone(absorptions[0])
self.assertAlmostEqual(absorptions[0][6], 10488/60, 2)
(cob_dates,
cob_values
) = dynamic_carbs_on_board(
carb_entry_starts,
carb_entry_quantities,
carb_entry_absorptions,
absorptions,
timelines,
default_absorption_times[1],
delay=10,
delta=5,
start=input_ice[0][0],
end=(
input_ice[0][0]
+ timedelta(hours=18)
)
)
assert len(expected_dates) == len(cob_dates)
for i in range(0, len(expected_dates)):
self.assertEqual(
expected_dates[i], cob_dates[i]
)
self.assertAlmostEqual(
expected_values[i], cob_values[i], 1
)
""" Tests for dynamic_glucose_effects """
def test_dynamic_glucose_effect_absorption_none_observed(self):
input_ice = self.load_ice_input_fixture("ice_35_min_input")
(carb_starts,
carb_values,
carb_absorptions
) = self.load_carb_entry_fixture()
(expected_dates, expected_values) = self.load_effect_fixture(
"dynamic_glucose_effect_none_observed_output")
carb_ratio_tuple = self.load_schedules()
default_absorption_times = self.DEFAULT_ABSORPTION_TIMES
carb_entry_starts = [carb_starts[2]]
carb_entry_quantities = [carb_values[2]]
carb_entry_absorptions = [carb_absorptions[2]]
(expected_dates,
expected_values
) = self.load_cob_output_fixture(
"dynamic_glucose_effect_none_observed_output"
)
(absorptions,
timelines,
entries, # pylint: disable=W0612
) = map_(
carb_entry_starts,
carb_entry_quantities,
carb_entry_absorptions,
*input_ice,
*carb_ratio_tuple,
self.INSULIN_SENSITIVITY_START_DATES,
self.INSULIN_SENSITIVITY_END_DATES,
self.INSULIN_SENSITIVITY_VALUES,
default_absorption_times[1] / default_absorption_times[0],
default_absorption_times[1],
0
)
self.assertEqual(len(absorptions), 1)
self.assertEqual(absorptions[0][6], 240)
self.assertEqual(
absorptions[0][4],
datetime.fromisoformat("2015-10-15 23:00:00")
)
(effect_dates,
effect_values
) = dynamic_glucose_effects(
carb_entry_starts,
carb_entry_quantities,
carb_entry_absorptions,
absorptions,
timelines,
*carb_ratio_tuple,
self.INSULIN_SENSITIVITY_START_DATES,
self.INSULIN_SENSITIVITY_END_DATES,
self.INSULIN_SENSITIVITY_VALUES,
default_absorption_times[1],
delay=10,
delta=5,
start=input_ice[0][0],
end=(
input_ice[0][0]
+ timedelta(hours=6)
)
)
assert len(expected_dates) == len(effect_dates)
for i in range(0, len(expected_dates)):
self.assertEqual(
expected_dates[i], effect_dates[i]
)
self.assertAlmostEqual(
expected_values[i], effect_values[i], 2
)
def test_dynamic_glucose_effect_absorption_partially_observed(self):
input_ice = self.load_ice_input_fixture("ice_35_min_input")
(carb_starts,
carb_values,
carb_absorptions
) = self.load_carb_entry_fixture()
carb_ratio_tuple = self.load_schedules()
default_absorption_times = self.DEFAULT_ABSORPTION_TIMES
carb_entry_starts = [carb_starts[0]]
carb_entry_quantities = [carb_values[0]]
carb_entry_absorptions = [carb_absorptions[0]]
(expected_dates,
expected_values
) = self.load_effect_fixture(
"dynamic_glucose_effect_partially_observed_output"
)
(absorptions,
timelines,
entries, # pylint: disable=W0612
) = map_(
carb_entry_starts,
carb_entry_quantities,
carb_entry_absorptions,
*input_ice,
*carb_ratio_tuple,
self.INSULIN_SENSITIVITY_START_DATES,
self.INSULIN_SENSITIVITY_END_DATES,
self.INSULIN_SENSITIVITY_VALUES,
default_absorption_times[1] / default_absorption_times[0],
default_absorption_times[1],
0
)
self.assertEqual(len(absorptions), 1)
self.assertAlmostEqual(absorptions[0][6], 8509/60, 2)
(effect_dates,
effect_values
) = dynamic_glucose_effects(
carb_entry_starts,
carb_entry_quantities,
carb_entry_absorptions,
absorptions,
timelines,
*carb_ratio_tuple,
self.INSULIN_SENSITIVITY_START_DATES,
self.INSULIN_SENSITIVITY_END_DATES,
self.INSULIN_SENSITIVITY_VALUES,
default_absorption_times[1],
delay=10,
delta=5,
start=input_ice[0][0],
end=(
input_ice[0][0]
+ timedelta(hours=6)
)
)
assert len(expected_dates) == len(effect_dates)
for i in range(0, len(expected_dates)):
self.assertEqual(
expected_dates[i], effect_dates[i]
)
self.assertAlmostEqual(
expected_values[i], effect_values[i], 2
)
def test_dynamic_glucose_effect_absorption_fully_observed(self):
input_ice = self.load_ice_input_fixture("ice_1_hour_input")
(carb_starts,
carb_values,
carb_absorptions
) = self.load_carb_entry_fixture()
(expected_dates,
expected_values
) = self.load_effect_fixture(
"dynamic_glucose_effect_fully_observed_output")
carb_ratio_tuple = self.load_schedules()
default_absorption_times = self.DEFAULT_ABSORPTION_TIMES
carb_entry_starts = [carb_starts[0]]
carb_entry_quantities = [carb_values[0]]
carb_entry_absorptions = [carb_absorptions[0]]
(absorptions,
timelines,
entries, # pylint: disable=W0612
) = map_(
carb_entry_starts,
carb_entry_quantities,
carb_entry_absorptions,
*input_ice,
*carb_ratio_tuple,
self.INSULIN_SENSITIVITY_START_DATES,
self.INSULIN_SENSITIVITY_END_DATES,
self.INSULIN_SENSITIVITY_VALUES,
default_absorption_times[1] / default_absorption_times[0],
default_absorption_times[1],
0
)
self.assertEqual(len(absorptions), 1)
self.assertIsNotNone(absorptions[0])
# No remaining absorption
self.assertEqual(absorptions[0][6], 0)
# All should be absorbed
self.assertEqual(absorptions[0][0], 44)
(effect_dates,
effect_values
) = dynamic_glucose_effects(
carb_entry_starts,
carb_entry_quantities,
carb_entry_absorptions,
absorptions,
timelines,
*carb_ratio_tuple,
self.INSULIN_SENSITIVITY_START_DATES,
self.INSULIN_SENSITIVITY_END_DATES,
self.INSULIN_SENSITIVITY_VALUES,
default_absorption_times[1],
delay=10,
delta=5,
start=input_ice[0][0],
end=(
input_ice[0][0]
+ timedelta(hours=6)
)
)
self.assertEqual(len(effect_values), len(expected_values))
for i in range(0, len(effect_values)):
self.assertAlmostEqual(
expected_dates[i], effect_dates[i], 2
)
self.assertAlmostEqual(
expected_values[i], effect_values[i], 2
)
def test_dynamic_glucose_effect_absorption_never_fully_observed(self):
input_ice = self.load_ice_input_fixture("ice_slow_absorption")
(carb_starts,
carb_values,
carb_absorptions
) = self.load_carb_entry_fixture()
carb_ratio_tuple = self.load_schedules()
default_absorption_times = self.DEFAULT_ABSORPTION_TIMES
carb_entry_starts = [carb_starts[1]]
carb_entry_quantities = [carb_values[1]]
carb_entry_absorptions = [carb_absorptions[1]]
(expected_dates,
expected_values
) = self.load_cob_output_fixture(
"dynamic_glucose_effect_never_fully_observed_output"
)
(absorptions,
timelines,
entries, # pylint: disable=W0612
) = map_(
carb_entry_starts,
carb_entry_quantities,
carb_entry_absorptions,
*input_ice,
*carb_ratio_tuple,
self.INSULIN_SENSITIVITY_START_DATES,
self.INSULIN_SENSITIVITY_END_DATES,
self.INSULIN_SENSITIVITY_VALUES,
default_absorption_times[1] / default_absorption_times[0],
default_absorption_times[1],
0
)
self.assertEqual(len(absorptions), 1)
self.assertIsNotNone(absorptions[0])
self.assertAlmostEqual(absorptions[0][6], 10488/60, 2)
(effect_dates,
effect_values
) = dynamic_glucose_effects(
carb_entry_starts,
carb_entry_quantities,
carb_entry_absorptions,
absorptions,
timelines,
*carb_ratio_tuple,
self.INSULIN_SENSITIVITY_START_DATES,
self.INSULIN_SENSITIVITY_END_DATES,
self.INSULIN_SENSITIVITY_VALUES,
default_absorption_times[1],
delay=10,
delta=5,
start=input_ice[0][0],
end=(
input_ice[0][0]
+ timedelta(hours=18)
)
)
assert len(expected_dates) == len(effect_dates)
for i in range(0, len(expected_dates)):
self.assertEqual(
expected_dates[i], effect_dates[i]
)
self.assertAlmostEqual(
expected_values[i], effect_values[i], 2
)
if __name__ == '__main__':
unittest.main()
| 30.710928 | 77 | 0.578688 | 2,665 | 26,135 | 5.285178 | 0.068293 | 0.056869 | 0.084345 | 0.053674 | 0.850621 | 0.839049 | 0.827263 | 0.817891 | 0.806674 | 0.795456 | 0 | 0.024073 | 0.341955 | 26,135 | 850 | 78 | 30.747059 | 0.794918 | 0.061335 | 0 | 0.749226 | 0 | 0 | 0.036703 | 0.017186 | 0 | 0 | 0 | 0 | 0.091331 | 1 | 0.027864 | false | 0 | 0.006192 | 0 | 0.052632 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f3868c4181a92b894048b161825ac7788d3e7f14 | 13,715 | py | Python | Codigos_python/Graficos/FrequenciaAcq_Texp/FrequenciaAcq_x_Texp_AllModes.py | DBernardes/Macro-SPARC4-CCD-cameras | 08eb5b331c993d9f5b360bc2961972ded56c171d | [
"MIT"
] | null | null | null | Codigos_python/Graficos/FrequenciaAcq_Texp/FrequenciaAcq_x_Texp_AllModes.py | DBernardes/Macro-SPARC4-CCD-cameras | 08eb5b331c993d9f5b360bc2961972ded56c171d | [
"MIT"
] | null | null | null | Codigos_python/Graficos/FrequenciaAcq_Texp/FrequenciaAcq_x_Texp_AllModes.py | DBernardes/Macro-SPARC4-CCD-cameras | 08eb5b331c993d9f5b360bc2961972ded56c171d | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# coding: utf-8
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
from scipy.optimize import curve_fit
#----------------- 0.1 MHz, B1 --------------------------------------------------------------
##df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X1024B1HSS01.xlsm')
##columns = pd.DataFrame(df)
##TEXP1024B1 = columns['TEXP (s)'][0:22]
##FREQ1024B1 = columns['FREQ (fps)'][0:22]
##
### Faz a leitura da planilha e pega os valores das colunas
##df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X512B1HSS01.xlsm')
##columns = pd.DataFrame(df)
##TEXP512B1 = columns['TEXP (s)'][0:27]
##FREQ512B1 = columns['FREQ (fps)'][0:27]
##
### Faz a leitura da planilha e pega os valores das colunas
##df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X256B1HSS01.xlsm')
##columns = pd.DataFrame(df)
##TEXP256B1 = columns['TEXP (s)'][0:23]
##FREQ256B1 = columns['FREQ (fps)'][0:23]
##
##fontsize = 14
fig = plt.figure()
##ax = fig.add_subplot(251)
##ax.errorbar(TEXP1024B1, FREQ1024B1, marker='o', c='blue',linewidth=1.0, label=r'$\mathtt{x1024, \; B1}$')
##ax.errorbar(TEXP512B1, FREQ512B1, marker='o', c='red',linewidth=1.0, label=r'$\mathtt{x512, \; B1}$')
##ax.errorbar(TEXP256B1, FREQ256B1, marker='o', c='green',linewidth=1.0, label=r'$\mathtt{x256, \; B1}$')
##plt.title('0.1 MHz B1')
#----------------- 0.1 MHz, B2 --------------------------------------------------------------
##df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X1024B2HSS01.xlsm')
##columns = pd.DataFrame(df)
##
##TEXP1024B2 = columns['TEXP (s)'][0:19]
##FREQ1024B2 = columns['FREQ (fps)'][0:19]
##
### Faz a leitura da planilha e pega os valores das colunas
##df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X512B2HSS01.xlsm')
##columns = pd.DataFrame(df)
##TEXP512B2 = columns['TEXP (s)'][0:23]
##FREQ512B2 = columns['FREQ (fps)'][0:23]
##
### Faz a leitura da planilha e pega os valores das colunas
##df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X256B2HSS01.xlsm')
##columns = pd.DataFrame(df)
##TEXP256B2 = columns['TEXP (s)'][0:19]
##FREQ256B2 = columns['FREQ (fps)'][0:19]
##
##ax = fig.add_subplot(252)
##ax.errorbar(TEXP256B2, FREQ256B2, marker='o', c='green',linewidth=1.0, label=r'$\mathtt{x256, \; B2}$')
##ax.errorbar(TEXP512B2, FREQ512B2, marker='o', c='red',linewidth=1.0, label=r'$\mathtt{x512, \; B2}$')
##ax.errorbar(TEXP1024B2, FREQ1024B2, marker='o', c='blue',linewidth=1.0, label=r'$\mathtt{x1024, \; B2}$')
##plt.title('0.1 MHz B2')
#----------------- 1 MHz, B1 --------------------------------------------------------------
# Faz a leitura da planilha e pega os valores das colunas
df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X1024B1HSS1.xlsm')
columns = pd.DataFrame(df)
TEXP1024B1 = columns['TEXP (s)'][0:21]
FREQ1024B1 = columns['FREQ (fps)'][0:21]
# Faz a leitura da planilha e pega os valores das colunas
df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X512B1HSS1.xlsm')
columns = pd.DataFrame(df)
TEXP512B1 = columns['TEXP (s)'][0:26]
FREQ512B1 = columns['FREQ (fps)'][0:26]
# Faz a leitura da planilha e pega os valores das colunas
df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X256B1HSS1.xlsm')
columns = pd.DataFrame(df)
TEXP256B1 = columns['TEXP (s)'][0:32]
FREQ256B1 = columns['FREQ (fps)'][0:32]
ax = fig.add_subplot(111)
ax.errorbar(TEXP256B1, FREQ256B1, marker='o', c='green',linewidth=1.0, label=r'$\mathtt{x256}$')
ax.errorbar(TEXP512B1, FREQ512B1, marker='o', c='red',linewidth=1.0, label=r'$\mathtt{x512}$')
ax.errorbar(TEXP1024B1, FREQ1024B1, marker='o', c='blue',linewidth=1.0, label=r'$\mathtt{x1024}$')
ax.legend()
#plt.title('1 MHz B1')
plt.show()
###----------------- 1 MHz, B2 --------------------------------------------------------------
##
### Faz a leitura da planilha e pega os valores das colunas
##df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X1024B2HSS1.xlsm')
##columns = pd.DataFrame(df)
##TEXP1024B2 = columns['TEXP (s)'][0:22]
##FREQ1024B2 = columns['FREQ (fps)'][0:22]
##
### Faz a leitura da planilha e pega os valores das colunas
##df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X512B2HSS1.xlsm')
##columns = pd.DataFrame(df)
##TEXP512B2 = columns['TEXP (s)'][0:27]
##FREQ512B2 = columns['FREQ (fps)'][0:27]
##
##
### Faz a leitura da planilha e pega os valores das colunas
##df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X256B2HSS1.xlsm')
##columns = pd.DataFrame(df)
##TEXP256B2 = columns['TEXP (s)'][0:30]
##FREQ256B2 = columns['FREQ (fps)'][0:30]
##
##ax = fig.add_subplot(254)
##ax.errorbar(TEXP256B2, FREQ256B2, marker='o', c='green',linewidth=1.0, label=r'$\mathtt{x256, \; B2}$')
##ax.errorbar(TEXP512B2, FREQ512B2, marker='o', c='red',linewidth=1.0, label=r'$\mathtt{x512, \; B2}$')
##ax.errorbar(TEXP1024B2, FREQ1024B2, marker='o', c='blue',linewidth=1.0, label=r'$\mathtt{x1024, \; B2}$')
##plt.title('1 MHz B2')
###----------------- 10 MHz, B1 --------------------------------------------------------------
##
##df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X1024B1HSS10.xlsm')
##columns = pd.DataFrame(df)
##TEXP1024B1 = columns['TEXP (s)'][0:16]
##FREQ1024B1 = columns['FREQ (fps)'][0:16]
##
##
### Faz a leitura da planilha e pega os valores das colunas
##df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X512B1HSS10.xlsm')
##columns = pd.DataFrame(df)
##Freq = {}
##TEXP512B1 = columns['TEXP (s)'][0:31]
##FREQ512B1 = columns['FREQ (fps)'][0:31]
##
### Faz a leitura da planilha e pega os valores das colunas
##df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X256B1HSS10.xlsm')
##columns = pd.DataFrame(df)
##Freq = {}
##TEXP256B1 = columns['TEXP (s)'][0:25]
##FREQ256B1 = columns['FREQ (fps)'][0:25]
##
##ax = fig.add_subplot(255)
##ax.errorbar(TEXP1024B1, FREQ1024B1, marker='o', c='blue',linewidth=1.0, label=r'$\mathtt{x1024, \; B1}$')
##ax.errorbar(TEXP512B1, FREQ512B1, marker='o', c='red',linewidth=1.0, label=r'$\mathtt{x512, \; B1}$')
##ax.errorbar(TEXP256B1, FREQ256B1, marker='o', c='green',linewidth=1.0, label=r'$\mathtt{x256, \; B1}$')
##plt.title('10 MHz B1')
###----------------- 10 MHz, B2 --------------------------------------------------------------
##
##df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X1024B2HSS10.xlsm')
##columns = pd.DataFrame(df)
##TEXP1024B1 = columns['TEXP (s)'][0:16]
##FREQ1024B1 = columns['FREQ (fps)'][0:16]
##
##
### Faz a leitura da planilha e pega os valores das colunas
##df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X512B2HSS10.xlsm')
##columns = pd.DataFrame(df)
##Freq = {}
##TEXP512B1 = columns['TEXP (s)'][0:31]
##FREQ512B1 = columns['FREQ (fps)'][0:31]
##
### Faz a leitura da planilha e pega os valores das colunas
##df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X256B2HSS10.xlsm')
##columns = pd.DataFrame(df)
##Freq = {}
##TEXP256B1 = columns['TEXP (s)'][0:25]
##FREQ256B1 = columns['FREQ (fps)'][0:25]
##
##ax = fig.add_subplot(256)
##ax.errorbar(TEXP1024B1, FREQ1024B1, marker='o', c='blue',linewidth=1.0, label=r'$\mathtt{x1024, \; B1}$')
##ax.errorbar(TEXP512B1, FREQ512B1, marker='o', c='red',linewidth=1.0, label=r'$\mathtt{x512, \; B1}$')
##ax.errorbar(TEXP256B1, FREQ256B1, marker='o', c='green',linewidth=1.0, label=r'$\mathtt{x256, \; B1}$')
##plt.title('10 MHz B2')
###----------------- 20 MHz, B1 --------------------------------------------------------------
##
##df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X1024B1HSS20.xlsm')
##columns = pd.DataFrame(df)
##TEXP1024B1 = columns['TEXP (s)'][0:31]
##FREQ1024B1 = columns['FREQ (fps)'][0:31]
##
##
### Faz a leitura da planilha e pega os valores das colunas
##df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X512B1HSS20.xlsm')
##columns = pd.DataFrame(df)
##Freq = {}
##TEXP512B1 = columns['TEXP (s)'][0:25]
##FREQ512B1 = columns['FREQ (fps)'][0:25]
##
### Faz a leitura da planilha e pega os valores das colunas
##df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X256B1HSS20.xlsm')
##columns = pd.DataFrame(df)
##Freq = {}
##TEXP256B1 = columns['TEXP (s)'][0:26]
##FREQ256B1 = columns['FREQ (fps)'][0:26]
##
##ax = fig.add_subplot(257)
##ax.errorbar(TEXP1024B1, FREQ1024B1, marker='o', c='blue',linewidth=1.0, label=r'$\mathtt{x1024, \; B1}$')
##ax.errorbar(TEXP512B1, FREQ512B1, marker='o', c='red',linewidth=1.0, label=r'$\mathtt{x512, \; B1}$')
##ax.errorbar(TEXP256B1, FREQ256B1, marker='o', c='green',linewidth=1.0, label=r'$\mathtt{x256, \; B1}$')
##plt.title('20 MHz B1')
###----------------- 20 MHz, B2 --------------------------------------------------------------
##
##df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X1024B2HSS20.xlsm')
##columns = pd.DataFrame(df)
##TEXP1024B1 = columns['TEXP (s)'][0:25]
##FREQ1024B1 = columns['FREQ (fps)'][0:25]
##
##
### Faz a leitura da planilha e pega os valores das colunas
##df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X512B2HSS20.xlsm')
##columns = pd.DataFrame(df)
##Freq = {}
##TEXP512B1 = columns['TEXP (s)'][0:26]
##FREQ512B1 = columns['FREQ (fps)'][0:26]
##
### Faz a leitura da planilha e pega os valores das colunas
##df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X256B2HSS20.xlsm')
##columns = pd.DataFrame(df)
##Freq = {}
##TEXP256B1 = columns['TEXP (s)'][0:31]
##FREQ256B1 = columns['FREQ (fps)'][0:31]
##
##ax = fig.add_subplot(258)
##ax.errorbar(TEXP1024B1, FREQ1024B1, marker='o', c='blue',linewidth=1.0, label=r'$\mathtt{x1024, \; B1}$')
##ax.errorbar(TEXP512B1, FREQ512B1, marker='o', c='red',linewidth=1.0, label=r'$\mathtt{x512, \; B1}$')
##ax.errorbar(TEXP256B1, FREQ256B1, marker='o', c='green',linewidth=1.0, label=r'$\mathtt{x256, \; B1}$')
##plt.title('20 MHz B2')
###----------------- 30 MHz, B1 --------------------------------------------------------------
##
##df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X1024B1HSS30.xlsm')
##columns = pd.DataFrame(df)
##TEXP1024B1 = columns['TEXP (s)'][0:15]
##FREQ1024B1 = columns['FREQ (fps)'][0:15]
##
##
### Faz a leitura da planilha e pega os valores das colunas
##df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X512B1HSS30.xlsm')
##columns = pd.DataFrame(df)
##Freq = {}
##TEXP512B1 = columns['TEXP (s)'][0:16]
##FREQ512B1 = columns['FREQ (fps)'][0:16]
##
### Faz a leitura da planilha e pega os valores das colunas
##df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X256B1HSS30.xlsm')
##columns = pd.DataFrame(df)
##Freq = {}
##TEXP256B1 = columns['TEXP (s)'][0:26]
##FREQ256B1 = columns['FREQ (fps)'][0:26]
##
##ax = fig.add_subplot(259)
##ax.errorbar(TEXP1024B1, FREQ1024B1, marker='o', c='blue',linewidth=1.0, label=r'$\mathtt{x1024, \; B1}$')
##ax.errorbar(TEXP512B1, FREQ512B1, marker='o', c='red',linewidth=1.0, label=r'$\mathtt{x512, \; B1}$')
##ax.errorbar(TEXP256B1, FREQ256B1, marker='o', c='green',linewidth=1.0, label=r'$\mathtt{x256, \; B1}$')
##plt.title('30 MHz B1')
###----------------- 30 MHz, B2 --------------------------------------------------------------
##
##df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X1024B2HSS30.xlsm')
##columns = pd.DataFrame(df)
##TEXP1024B1 = columns['TEXP (s)'][0:25]
##FREQ1024B1 = columns['FREQ (fps)'][0:25]
##
##
### Faz a leitura da planilha e pega os valores das colunas
##df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X512B2HSS30.xlsm')
##columns = pd.DataFrame(df)
##Freq = {}
##TEXP512B1 = columns['TEXP (s)'][0:31]
##FREQ512B1 = columns['FREQ (fps)'][0:31]
##
### Faz a leitura da planilha e pega os valores das colunas
##df = pd.read_excel(r'C:\Users\denis\Desktop\UNIFEI\Projeto_Mestrado\Codigos\blibioteca_de_funcoes\X256B2HSS30.xlsm')
##columns = pd.DataFrame(df)
##Freq = {}
##TEXP256B1 = columns['TEXP (s)'][0:33]
##FREQ256B1 = columns['FREQ (fps)'][0:33]
##
##ax = plt.subplot2grid((2,5), (1, 4))
##ax.errorbar(TEXP1024B1, FREQ1024B1, marker='o', c='blue',linewidth=1.0, label=r'$\mathtt{x1024, \; B1}$')
##ax.errorbar(TEXP512B1, FREQ512B1, marker='o', c='red',linewidth=1.0, label=r'$\mathtt{x512, \; B1}$')
##ax.errorbar(TEXP256B1, FREQ256B1, marker='o', c='green',linewidth=1.0, label=r'$\mathtt{x256, \; B1}$')
##plt.title('30 MHz B2')
##
###plt.xlim(0,20)
###plt.xlabel(r'$ Tempo \;\; de \;\; Exposi c \c \tilde{a} o \;\; (s) $', size=fontsize)
###plt.ylabel(r'$ Frequ\^ encia \;\; de \;\; aquisic \c \~{a}o \;\; (fps) $', size=fontsize)
##plt.rc('xtick', labelsize=13)
##plt.rc('ytick', labelsize=13)
##plt.legend(loc='upper right')
##plt.show()
| 47.621528 | 120 | 0.651039 | 1,970 | 13,715 | 4.466497 | 0.085787 | 0.013638 | 0.027276 | 0.044323 | 0.901693 | 0.861575 | 0.861575 | 0.861575 | 0.861575 | 0.861575 | 0 | 0.09358 | 0.089172 | 13,715 | 287 | 121 | 47.787456 | 0.610791 | 0.859861 | 0 | 0.130435 | 0 | 0 | 0.2802 | 0.197999 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.173913 | 0 | 0.173913 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f39fa13eaf06896c7b8b856361d977e7a6742fa7 | 68 | py | Python | tests/assets/testpackage/two/beta.py | mrcljx/grimp | 258eee1e6aa4908fffc1962d82cd01a3f5fd395c | [
"BSD-2-Clause"
] | 19 | 2019-01-21T09:45:00.000Z | 2022-02-21T11:20:09.000Z | tests/assets/testpackage/two/beta.py | mrcljx/grimp | 258eee1e6aa4908fffc1962d82cd01a3f5fd395c | [
"BSD-2-Clause"
] | 51 | 2018-11-05T17:10:31.000Z | 2022-01-05T18:24:18.000Z | tests/assets/testpackage/two/beta.py | mrcljx/grimp | 258eee1e6aa4908fffc1962d82cd01a3f5fd395c | [
"BSD-2-Clause"
] | 4 | 2021-01-16T04:16:22.000Z | 2021-12-23T02:50:04.000Z | from testpackage.one import alpha
def foo():
return alpha.BAR
| 11.333333 | 33 | 0.720588 | 10 | 68 | 4.9 | 0.9 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.205882 | 68 | 5 | 34 | 13.6 | 0.907407 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
f3b7ead3d541e4c92edd698a3003b52a786a75f4 | 43,000 | py | Python | custos-client-sdks/custos-python-sdk/build/lib/custos/server/integration/GroupManagementService_pb2_grpc.py | apache/airavata-custos | 075dd26c364b5b5abe8a4f2b226b2de30474f8e4 | [
"Apache-2.0"
] | 10 | 2019-05-21T22:42:35.000Z | 2022-03-25T15:58:09.000Z | custos-client-sdks/custos-python-sdk/build/lib/custos/server/integration/GroupManagementService_pb2_grpc.py | apache/airavata-custos | 075dd26c364b5b5abe8a4f2b226b2de30474f8e4 | [
"Apache-2.0"
] | 83 | 2019-02-22T12:22:14.000Z | 2022-03-30T13:42:47.000Z | custos-client-sdks/custos-python-sdk/build/lib/custos/server/integration/GroupManagementService_pb2_grpc.py | apache/airavata-custos | 075dd26c364b5b5abe8a4f2b226b2de30474f8e4 | [
"Apache-2.0"
] | 20 | 2019-02-22T08:10:05.000Z | 2021-11-07T19:37:04.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
import custos.server.core.IamAdminService_pb2 as IamAdminService__pb2
import custos.server.core.UserProfileService_pb2 as UserProfileService__pb2
class GroupManagementServiceStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.createKeycloakGroups = channel.unary_unary(
'/org.apache.custos.group.management.service.GroupManagementService/createKeycloakGroups',
request_serializer=IamAdminService__pb2.GroupsRequest.SerializeToString,
response_deserializer=IamAdminService__pb2.GroupsResponse.FromString,
)
self.updateKeycloakGroup = channel.unary_unary(
'/org.apache.custos.group.management.service.GroupManagementService/updateKeycloakGroup',
request_serializer=IamAdminService__pb2.GroupRequest.SerializeToString,
response_deserializer=IamAdminService__pb2.GroupRepresentation.FromString,
)
self.deleteKeycloakGroup = channel.unary_unary(
'/org.apache.custos.group.management.service.GroupManagementService/deleteKeycloakGroup',
request_serializer=IamAdminService__pb2.GroupRequest.SerializeToString,
response_deserializer=IamAdminService__pb2.OperationStatus.FromString,
)
self.findKeycloakGroup = channel.unary_unary(
'/org.apache.custos.group.management.service.GroupManagementService/findKeycloakGroup',
request_serializer=IamAdminService__pb2.GroupRequest.SerializeToString,
response_deserializer=IamAdminService__pb2.GroupRepresentation.FromString,
)
self.getAllKeycloakGroups = channel.unary_unary(
'/org.apache.custos.group.management.service.GroupManagementService/getAllKeycloakGroups',
request_serializer=IamAdminService__pb2.GroupRequest.SerializeToString,
response_deserializer=IamAdminService__pb2.GroupsResponse.FromString,
)
self.addUserToKeycloakGroup = channel.unary_unary(
'/org.apache.custos.group.management.service.GroupManagementService/addUserToKeycloakGroup',
request_serializer=IamAdminService__pb2.UserGroupMappingRequest.SerializeToString,
response_deserializer=IamAdminService__pb2.OperationStatus.FromString,
)
self.removeUserFromKeycloakGroup = channel.unary_unary(
'/org.apache.custos.group.management.service.GroupManagementService/removeUserFromKeycloakGroup',
request_serializer=IamAdminService__pb2.UserGroupMappingRequest.SerializeToString,
response_deserializer=IamAdminService__pb2.OperationStatus.FromString,
)
self.createGroup = channel.unary_unary(
'/org.apache.custos.group.management.service.GroupManagementService/createGroup',
request_serializer=UserProfileService__pb2.GroupRequest.SerializeToString,
response_deserializer=UserProfileService__pb2.Group.FromString,
)
self.updateGroup = channel.unary_unary(
'/org.apache.custos.group.management.service.GroupManagementService/updateGroup',
request_serializer=UserProfileService__pb2.GroupRequest.SerializeToString,
response_deserializer=UserProfileService__pb2.Group.FromString,
)
self.deleteGroup = channel.unary_unary(
'/org.apache.custos.group.management.service.GroupManagementService/deleteGroup',
request_serializer=UserProfileService__pb2.GroupRequest.SerializeToString,
response_deserializer=UserProfileService__pb2.Status.FromString,
)
self.findGroup = channel.unary_unary(
'/org.apache.custos.group.management.service.GroupManagementService/findGroup',
request_serializer=UserProfileService__pb2.GroupRequest.SerializeToString,
response_deserializer=UserProfileService__pb2.Group.FromString,
)
self.getAllGroups = channel.unary_unary(
'/org.apache.custos.group.management.service.GroupManagementService/getAllGroups',
request_serializer=UserProfileService__pb2.GroupRequest.SerializeToString,
response_deserializer=UserProfileService__pb2.GetAllGroupsResponse.FromString,
)
self.addUserToGroup = channel.unary_unary(
'/org.apache.custos.group.management.service.GroupManagementService/addUserToGroup',
request_serializer=UserProfileService__pb2.GroupMembership.SerializeToString,
response_deserializer=UserProfileService__pb2.Status.FromString,
)
self.removeUserFromGroup = channel.unary_unary(
'/org.apache.custos.group.management.service.GroupManagementService/removeUserFromGroup',
request_serializer=UserProfileService__pb2.GroupMembership.SerializeToString,
response_deserializer=UserProfileService__pb2.Status.FromString,
)
self.addChildGroupToParentGroup = channel.unary_unary(
'/org.apache.custos.group.management.service.GroupManagementService/addChildGroupToParentGroup',
request_serializer=UserProfileService__pb2.GroupToGroupMembership.SerializeToString,
response_deserializer=UserProfileService__pb2.Status.FromString,
)
self.removeChildGroupFromParentGroup = channel.unary_unary(
'/org.apache.custos.group.management.service.GroupManagementService/removeChildGroupFromParentGroup',
request_serializer=UserProfileService__pb2.GroupToGroupMembership.SerializeToString,
response_deserializer=UserProfileService__pb2.Status.FromString,
)
self.getAllGroupsOfUser = channel.unary_unary(
'/org.apache.custos.group.management.service.GroupManagementService/getAllGroupsOfUser',
request_serializer=UserProfileService__pb2.UserProfileRequest.SerializeToString,
response_deserializer=UserProfileService__pb2.GetAllGroupsResponse.FromString,
)
self.getAllParentGroupsOfGroup = channel.unary_unary(
'/org.apache.custos.group.management.service.GroupManagementService/getAllParentGroupsOfGroup',
request_serializer=UserProfileService__pb2.GroupRequest.SerializeToString,
response_deserializer=UserProfileService__pb2.GetAllGroupsResponse.FromString,
)
self.getAllChildUsers = channel.unary_unary(
'/org.apache.custos.group.management.service.GroupManagementService/getAllChildUsers',
request_serializer=UserProfileService__pb2.GroupRequest.SerializeToString,
response_deserializer=UserProfileService__pb2.GetAllUserProfilesResponse.FromString,
)
self.getAllChildGroups = channel.unary_unary(
'/org.apache.custos.group.management.service.GroupManagementService/getAllChildGroups',
request_serializer=UserProfileService__pb2.GroupRequest.SerializeToString,
response_deserializer=UserProfileService__pb2.GetAllGroupsResponse.FromString,
)
self.changeUserMembershipType = channel.unary_unary(
'/org.apache.custos.group.management.service.GroupManagementService/changeUserMembershipType',
request_serializer=UserProfileService__pb2.GroupMembership.SerializeToString,
response_deserializer=UserProfileService__pb2.Status.FromString,
)
self.hasAccess = channel.unary_unary(
'/org.apache.custos.group.management.service.GroupManagementService/hasAccess',
request_serializer=UserProfileService__pb2.GroupMembership.SerializeToString,
response_deserializer=UserProfileService__pb2.Status.FromString,
)
self.addGroupMembershipType = channel.unary_unary(
'/org.apache.custos.group.management.service.GroupManagementService/addGroupMembershipType',
request_serializer=UserProfileService__pb2.UserGroupMembershipTypeRequest.SerializeToString,
response_deserializer=UserProfileService__pb2.Status.FromString,
)
self.removeUserGroupMembershipType = channel.unary_unary(
'/org.apache.custos.group.management.service.GroupManagementService/removeUserGroupMembershipType',
request_serializer=UserProfileService__pb2.UserGroupMembershipTypeRequest.SerializeToString,
response_deserializer=UserProfileService__pb2.Status.FromString,
)
class GroupManagementServiceServicer(object):
"""Missing associated documentation comment in .proto file."""
def createKeycloakGroups(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def updateKeycloakGroup(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def deleteKeycloakGroup(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def findKeycloakGroup(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def getAllKeycloakGroups(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def addUserToKeycloakGroup(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def removeUserFromKeycloakGroup(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def createGroup(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def updateGroup(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def deleteGroup(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def findGroup(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def getAllGroups(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def addUserToGroup(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def removeUserFromGroup(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def addChildGroupToParentGroup(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def removeChildGroupFromParentGroup(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def getAllGroupsOfUser(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def getAllParentGroupsOfGroup(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def getAllChildUsers(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def getAllChildGroups(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def changeUserMembershipType(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def hasAccess(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def addGroupMembershipType(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def removeUserGroupMembershipType(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_GroupManagementServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'createKeycloakGroups': grpc.unary_unary_rpc_method_handler(
servicer.createKeycloakGroups,
request_deserializer=IamAdminService__pb2.GroupsRequest.FromString,
response_serializer=IamAdminService__pb2.GroupsResponse.SerializeToString,
),
'updateKeycloakGroup': grpc.unary_unary_rpc_method_handler(
servicer.updateKeycloakGroup,
request_deserializer=IamAdminService__pb2.GroupRequest.FromString,
response_serializer=IamAdminService__pb2.GroupRepresentation.SerializeToString,
),
'deleteKeycloakGroup': grpc.unary_unary_rpc_method_handler(
servicer.deleteKeycloakGroup,
request_deserializer=IamAdminService__pb2.GroupRequest.FromString,
response_serializer=IamAdminService__pb2.OperationStatus.SerializeToString,
),
'findKeycloakGroup': grpc.unary_unary_rpc_method_handler(
servicer.findKeycloakGroup,
request_deserializer=IamAdminService__pb2.GroupRequest.FromString,
response_serializer=IamAdminService__pb2.GroupRepresentation.SerializeToString,
),
'getAllKeycloakGroups': grpc.unary_unary_rpc_method_handler(
servicer.getAllKeycloakGroups,
request_deserializer=IamAdminService__pb2.GroupRequest.FromString,
response_serializer=IamAdminService__pb2.GroupsResponse.SerializeToString,
),
'addUserToKeycloakGroup': grpc.unary_unary_rpc_method_handler(
servicer.addUserToKeycloakGroup,
request_deserializer=IamAdminService__pb2.UserGroupMappingRequest.FromString,
response_serializer=IamAdminService__pb2.OperationStatus.SerializeToString,
),
'removeUserFromKeycloakGroup': grpc.unary_unary_rpc_method_handler(
servicer.removeUserFromKeycloakGroup,
request_deserializer=IamAdminService__pb2.UserGroupMappingRequest.FromString,
response_serializer=IamAdminService__pb2.OperationStatus.SerializeToString,
),
'createGroup': grpc.unary_unary_rpc_method_handler(
servicer.createGroup,
request_deserializer=UserProfileService__pb2.GroupRequest.FromString,
response_serializer=UserProfileService__pb2.Group.SerializeToString,
),
'updateGroup': grpc.unary_unary_rpc_method_handler(
servicer.updateGroup,
request_deserializer=UserProfileService__pb2.GroupRequest.FromString,
response_serializer=UserProfileService__pb2.Group.SerializeToString,
),
'deleteGroup': grpc.unary_unary_rpc_method_handler(
servicer.deleteGroup,
request_deserializer=UserProfileService__pb2.GroupRequest.FromString,
response_serializer=UserProfileService__pb2.Status.SerializeToString,
),
'findGroup': grpc.unary_unary_rpc_method_handler(
servicer.findGroup,
request_deserializer=UserProfileService__pb2.GroupRequest.FromString,
response_serializer=UserProfileService__pb2.Group.SerializeToString,
),
'getAllGroups': grpc.unary_unary_rpc_method_handler(
servicer.getAllGroups,
request_deserializer=UserProfileService__pb2.GroupRequest.FromString,
response_serializer=UserProfileService__pb2.GetAllGroupsResponse.SerializeToString,
),
'addUserToGroup': grpc.unary_unary_rpc_method_handler(
servicer.addUserToGroup,
request_deserializer=UserProfileService__pb2.GroupMembership.FromString,
response_serializer=UserProfileService__pb2.Status.SerializeToString,
),
'removeUserFromGroup': grpc.unary_unary_rpc_method_handler(
servicer.removeUserFromGroup,
request_deserializer=UserProfileService__pb2.GroupMembership.FromString,
response_serializer=UserProfileService__pb2.Status.SerializeToString,
),
'addChildGroupToParentGroup': grpc.unary_unary_rpc_method_handler(
servicer.addChildGroupToParentGroup,
request_deserializer=UserProfileService__pb2.GroupToGroupMembership.FromString,
response_serializer=UserProfileService__pb2.Status.SerializeToString,
),
'removeChildGroupFromParentGroup': grpc.unary_unary_rpc_method_handler(
servicer.removeChildGroupFromParentGroup,
request_deserializer=UserProfileService__pb2.GroupToGroupMembership.FromString,
response_serializer=UserProfileService__pb2.Status.SerializeToString,
),
'getAllGroupsOfUser': grpc.unary_unary_rpc_method_handler(
servicer.getAllGroupsOfUser,
request_deserializer=UserProfileService__pb2.UserProfileRequest.FromString,
response_serializer=UserProfileService__pb2.GetAllGroupsResponse.SerializeToString,
),
'getAllParentGroupsOfGroup': grpc.unary_unary_rpc_method_handler(
servicer.getAllParentGroupsOfGroup,
request_deserializer=UserProfileService__pb2.GroupRequest.FromString,
response_serializer=UserProfileService__pb2.GetAllGroupsResponse.SerializeToString,
),
'getAllChildUsers': grpc.unary_unary_rpc_method_handler(
servicer.getAllChildUsers,
request_deserializer=UserProfileService__pb2.GroupRequest.FromString,
response_serializer=UserProfileService__pb2.GetAllUserProfilesResponse.SerializeToString,
),
'getAllChildGroups': grpc.unary_unary_rpc_method_handler(
servicer.getAllChildGroups,
request_deserializer=UserProfileService__pb2.GroupRequest.FromString,
response_serializer=UserProfileService__pb2.GetAllGroupsResponse.SerializeToString,
),
'changeUserMembershipType': grpc.unary_unary_rpc_method_handler(
servicer.changeUserMembershipType,
request_deserializer=UserProfileService__pb2.GroupMembership.FromString,
response_serializer=UserProfileService__pb2.Status.SerializeToString,
),
'hasAccess': grpc.unary_unary_rpc_method_handler(
servicer.hasAccess,
request_deserializer=UserProfileService__pb2.GroupMembership.FromString,
response_serializer=UserProfileService__pb2.Status.SerializeToString,
),
'addGroupMembershipType': grpc.unary_unary_rpc_method_handler(
servicer.addGroupMembershipType,
request_deserializer=UserProfileService__pb2.UserGroupMembershipTypeRequest.FromString,
response_serializer=UserProfileService__pb2.Status.SerializeToString,
),
'removeUserGroupMembershipType': grpc.unary_unary_rpc_method_handler(
servicer.removeUserGroupMembershipType,
request_deserializer=UserProfileService__pb2.UserGroupMembershipTypeRequest.FromString,
response_serializer=UserProfileService__pb2.Status.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'org.apache.custos.group.management.service.GroupManagementService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class GroupManagementService(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def createKeycloakGroups(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/org.apache.custos.group.management.service.GroupManagementService/createKeycloakGroups',
IamAdminService__pb2.GroupsRequest.SerializeToString,
IamAdminService__pb2.GroupsResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def updateKeycloakGroup(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/org.apache.custos.group.management.service.GroupManagementService/updateKeycloakGroup',
IamAdminService__pb2.GroupRequest.SerializeToString,
IamAdminService__pb2.GroupRepresentation.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def deleteKeycloakGroup(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/org.apache.custos.group.management.service.GroupManagementService/deleteKeycloakGroup',
IamAdminService__pb2.GroupRequest.SerializeToString,
IamAdminService__pb2.OperationStatus.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def findKeycloakGroup(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/org.apache.custos.group.management.service.GroupManagementService/findKeycloakGroup',
IamAdminService__pb2.GroupRequest.SerializeToString,
IamAdminService__pb2.GroupRepresentation.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def getAllKeycloakGroups(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/org.apache.custos.group.management.service.GroupManagementService/getAllKeycloakGroups',
IamAdminService__pb2.GroupRequest.SerializeToString,
IamAdminService__pb2.GroupsResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def addUserToKeycloakGroup(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/org.apache.custos.group.management.service.GroupManagementService/addUserToKeycloakGroup',
IamAdminService__pb2.UserGroupMappingRequest.SerializeToString,
IamAdminService__pb2.OperationStatus.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def removeUserFromKeycloakGroup(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/org.apache.custos.group.management.service.GroupManagementService/removeUserFromKeycloakGroup',
IamAdminService__pb2.UserGroupMappingRequest.SerializeToString,
IamAdminService__pb2.OperationStatus.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def createGroup(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/org.apache.custos.group.management.service.GroupManagementService/createGroup',
UserProfileService__pb2.GroupRequest.SerializeToString,
UserProfileService__pb2.Group.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def updateGroup(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/org.apache.custos.group.management.service.GroupManagementService/updateGroup',
UserProfileService__pb2.GroupRequest.SerializeToString,
UserProfileService__pb2.Group.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def deleteGroup(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/org.apache.custos.group.management.service.GroupManagementService/deleteGroup',
UserProfileService__pb2.GroupRequest.SerializeToString,
UserProfileService__pb2.Status.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def findGroup(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/org.apache.custos.group.management.service.GroupManagementService/findGroup',
UserProfileService__pb2.GroupRequest.SerializeToString,
UserProfileService__pb2.Group.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def getAllGroups(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/org.apache.custos.group.management.service.GroupManagementService/getAllGroups',
UserProfileService__pb2.GroupRequest.SerializeToString,
UserProfileService__pb2.GetAllGroupsResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def addUserToGroup(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/org.apache.custos.group.management.service.GroupManagementService/addUserToGroup',
UserProfileService__pb2.GroupMembership.SerializeToString,
UserProfileService__pb2.Status.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def removeUserFromGroup(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/org.apache.custos.group.management.service.GroupManagementService/removeUserFromGroup',
UserProfileService__pb2.GroupMembership.SerializeToString,
UserProfileService__pb2.Status.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def addChildGroupToParentGroup(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/org.apache.custos.group.management.service.GroupManagementService/addChildGroupToParentGroup',
UserProfileService__pb2.GroupToGroupMembership.SerializeToString,
UserProfileService__pb2.Status.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def removeChildGroupFromParentGroup(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/org.apache.custos.group.management.service.GroupManagementService/removeChildGroupFromParentGroup',
UserProfileService__pb2.GroupToGroupMembership.SerializeToString,
UserProfileService__pb2.Status.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def getAllGroupsOfUser(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/org.apache.custos.group.management.service.GroupManagementService/getAllGroupsOfUser',
UserProfileService__pb2.UserProfileRequest.SerializeToString,
UserProfileService__pb2.GetAllGroupsResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def getAllParentGroupsOfGroup(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/org.apache.custos.group.management.service.GroupManagementService/getAllParentGroupsOfGroup',
UserProfileService__pb2.GroupRequest.SerializeToString,
UserProfileService__pb2.GetAllGroupsResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def getAllChildUsers(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/org.apache.custos.group.management.service.GroupManagementService/getAllChildUsers',
UserProfileService__pb2.GroupRequest.SerializeToString,
UserProfileService__pb2.GetAllUserProfilesResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def getAllChildGroups(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/org.apache.custos.group.management.service.GroupManagementService/getAllChildGroups',
UserProfileService__pb2.GroupRequest.SerializeToString,
UserProfileService__pb2.GetAllGroupsResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def changeUserMembershipType(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/org.apache.custos.group.management.service.GroupManagementService/changeUserMembershipType',
UserProfileService__pb2.GroupMembership.SerializeToString,
UserProfileService__pb2.Status.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def hasAccess(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/org.apache.custos.group.management.service.GroupManagementService/hasAccess',
UserProfileService__pb2.GroupMembership.SerializeToString,
UserProfileService__pb2.Status.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def addGroupMembershipType(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/org.apache.custos.group.management.service.GroupManagementService/addGroupMembershipType',
UserProfileService__pb2.UserGroupMembershipTypeRequest.SerializeToString,
UserProfileService__pb2.Status.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def removeUserGroupMembershipType(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/org.apache.custos.group.management.service.GroupManagementService/removeUserGroupMembershipType',
UserProfileService__pb2.UserGroupMembershipTypeRequest.SerializeToString,
UserProfileService__pb2.Status.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 50.947867 | 163 | 0.683837 | 3,458 | 43,000 | 8.274147 | 0.054656 | 0.076332 | 0.025689 | 0.034251 | 0.865092 | 0.865092 | 0.85936 | 0.81672 | 0.77457 | 0.770586 | 0 | 0.004678 | 0.244326 | 43,000 | 843 | 164 | 51.008304 | 0.875854 | 0.059 | 0 | 0.708049 | 0 | 0 | 0.142247 | 0.108827 | 0 | 0 | 0 | 0 | 0 | 1 | 0.068213 | false | 0 | 0.004093 | 0.032742 | 0.109141 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f3c8df5371d5e2039548763e5d68df57d67eaa95 | 223,383 | py | Python | nodeum_sdk/api/files_api.py | nodeum-io/nodeum-sdk-python | 205536491bff507dea7be44af46202c17e7121d9 | [
"MIT"
] | null | null | null | nodeum_sdk/api/files_api.py | nodeum-io/nodeum-sdk-python | 205536491bff507dea7be44af46202c17e7121d9 | [
"MIT"
] | null | null | null | nodeum_sdk/api/files_api.py | nodeum-io/nodeum-sdk-python | 205536491bff507dea7be44af46202c17e7121d9 | [
"MIT"
] | null | null | null | # coding: utf-8
"""
Nodeum API
The Nodeum API makes it easy to tap into the digital data mesh that runs across your organisation. Make requests to our API endpoints and we’ll give you everything you need to interconnect your business workflows with your storage. All production API requests are made to: http://nodeumhostname/api/ The current production version of the API is v1. **REST** The Nodeum API is a RESTful API. This means that the API is designed to allow you to get, create, update, & delete objects with the HTTP verbs GET, POST, PUT, PATCH, & DELETE. **JSON** The Nodeum API speaks exclusively in JSON. This means that you should always set the Content-Type header to application/json to ensure that your requests are properly accepted and processed by the API. **Authentication** All API calls require user-password authentication. **Cross-Origin Resource Sharing** The Nodeum API supports CORS for communicating from Javascript for these endpoints. You will need to specify an Origin URI when creating your application to allow for CORS to be whitelisted for your domain. **Pagination** Some endpoints such as File Listing return a potentially lengthy array of objects. In order to keep the response sizes manageable the API will take advantage of pagination. Pagination is a mechanism for returning a subset of the results for a request and allowing for subsequent requests to “page” through the rest of the results until the end is reached. Paginated endpoints follow a standard interface that accepts two query parameters, limit and offset, and return a payload that follows a standard form. These parameters names and their behavior are borrowed from SQL LIMIT and OFFSET keywords. **Versioning** The Nodeum API is constantly being worked on to add features, make improvements, and fix bugs. This means that you should expect changes to be introduced and documented. However, there are some changes or additions that are considered backwards-compatible and your applications should be flexible enough to handle them. These include: - Adding new endpoints to the API - Adding new attributes to the response of an existing endpoint - Changing the order of attributes of responses (JSON by definition is an object of unordered key/value pairs) **Filter parameters** When browsing a list of items, multiple filter parameters may be applied. Some operators can be added to the value as a prefix: - `=` value is equal. Default operator, may be omitted - `!=` value is different - `>` greater than - `>=` greater than or equal - `<` lower than - `>=` lower than or equal - `><` included in list, items should be separated by `|` - `!><` not included in list, items should be separated by `|` - `~` pattern matching, may include `%` (any characters) and `_` (one character) - `!~` pattern not matching, may include `%` (any characters) and `_` (one character) # noqa: E501
The version of the OpenAPI document: 2.1.0
Contact: info@nodeum.io
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from nodeum_sdk.api_client import ApiClient
from nodeum_sdk.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class FilesApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def files_children(self, **kwargs): # noqa: E501
"""Lists files under a specific folder. # noqa: E501
**API Key Scope**: files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.files_children(file_parent_id=file_parent_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int file_parent_id: Numeric ID of parent folder. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str file_id: Filter on file id
:param str name: Filter on name
:param str type: Filter on type
:param str permission: Filter on permission
:param str size: Filter on size
:param str change_date: Filter on change date
:param str modification_date: Filter on modification date
:param str access_date: Filter on access date
:param str gid: Filter on gid
:param str uid: Filter on uid
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: NodeumFileCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.files_children_with_http_info(**kwargs) # noqa: E501
def files_children_with_http_info(self, **kwargs): # noqa: E501
"""Lists files under a specific folder. # noqa: E501
**API Key Scope**: files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.files_children_with_http_info(file_parent_id=file_parent_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int file_parent_id: Numeric ID of parent folder. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str file_id: Filter on file id
:param str name: Filter on name
:param str type: Filter on type
:param str permission: Filter on permission
:param str size: Filter on size
:param str change_date: Filter on change date
:param str modification_date: Filter on modification date
:param str access_date: Filter on access date
:param str gid: Filter on gid
:param str uid: Filter on uid
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(NodeumFileCollection, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'file_parent_id',
'limit',
'offset',
'file_id',
'name',
'type',
'permission',
'size',
'change_date',
'modification_date',
'access_date',
'gid',
'uid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method files_children" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'file_parent_id' is set
if self.api_client.client_side_validation and ('file_parent_id' not in local_var_params or # noqa: E501
local_var_params['file_parent_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `file_parent_id` when calling `files_children`") # noqa: E501
collection_formats = {}
path_params = {}
if 'file_parent_id' in local_var_params:
path_params['file_parent_id'] = local_var_params['file_parent_id'] # noqa: E501
query_params = []
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'file_id' in local_var_params and local_var_params['file_id'] is not None: # noqa: E501
query_params.append(('file_id', local_var_params['file_id'])) # noqa: E501
if 'name' in local_var_params and local_var_params['name'] is not None: # noqa: E501
query_params.append(('name', local_var_params['name'])) # noqa: E501
if 'type' in local_var_params and local_var_params['type'] is not None: # noqa: E501
query_params.append(('type', local_var_params['type'])) # noqa: E501
if 'permission' in local_var_params and local_var_params['permission'] is not None: # noqa: E501
query_params.append(('permission', local_var_params['permission'])) # noqa: E501
if 'size' in local_var_params and local_var_params['size'] is not None: # noqa: E501
query_params.append(('size', local_var_params['size'])) # noqa: E501
if 'change_date' in local_var_params and local_var_params['change_date'] is not None: # noqa: E501
query_params.append(('change_date', local_var_params['change_date'])) # noqa: E501
if 'modification_date' in local_var_params and local_var_params['modification_date'] is not None: # noqa: E501
query_params.append(('modification_date', local_var_params['modification_date'])) # noqa: E501
if 'access_date' in local_var_params and local_var_params['access_date'] is not None: # noqa: E501
query_params.append(('access_date', local_var_params['access_date'])) # noqa: E501
if 'gid' in local_var_params and local_var_params['gid'] is not None: # noqa: E501
query_params.append(('gid', local_var_params['gid'])) # noqa: E501
if 'uid' in local_var_params and local_var_params['uid'] is not None: # noqa: E501
query_params.append(('uid', local_var_params['uid'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/files/{file_parent_id}/children', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NodeumFileCollection', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def files_children_by_container(self, **kwargs): # noqa: E501
"""Lists files under a specific folder. # noqa: E501
**API Key Scope**: files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.files_children_by_container(container_id=container_id_value, file_parent_id=file_parent_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str container_id: Numeric ID or name of container. (required)
:param int file_parent_id: Numeric ID of parent folder. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str file_id: Filter on file id
:param str name: Filter on name
:param str type: Filter on type
:param str permission: Filter on permission
:param str size: Filter on size
:param str change_date: Filter on change date
:param str modification_date: Filter on modification date
:param str access_date: Filter on access date
:param str gid: Filter on gid
:param str uid: Filter on uid
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: NodeumFileCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.files_children_by_container_with_http_info(**kwargs) # noqa: E501
def files_children_by_container_with_http_info(self, **kwargs): # noqa: E501
"""Lists files under a specific folder. # noqa: E501
**API Key Scope**: files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.files_children_by_container_with_http_info(container_id=container_id_value, file_parent_id=file_parent_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str container_id: Numeric ID or name of container. (required)
:param int file_parent_id: Numeric ID of parent folder. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str file_id: Filter on file id
:param str name: Filter on name
:param str type: Filter on type
:param str permission: Filter on permission
:param str size: Filter on size
:param str change_date: Filter on change date
:param str modification_date: Filter on modification date
:param str access_date: Filter on access date
:param str gid: Filter on gid
:param str uid: Filter on uid
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(NodeumFileCollection, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'container_id',
'file_parent_id',
'limit',
'offset',
'file_id',
'name',
'type',
'permission',
'size',
'change_date',
'modification_date',
'access_date',
'gid',
'uid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method files_children_by_container" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'container_id' is set
if self.api_client.client_side_validation and ('container_id' not in local_var_params or # noqa: E501
local_var_params['container_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `container_id` when calling `files_children_by_container`") # noqa: E501
# verify the required parameter 'file_parent_id' is set
if self.api_client.client_side_validation and ('file_parent_id' not in local_var_params or # noqa: E501
local_var_params['file_parent_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `file_parent_id` when calling `files_children_by_container`") # noqa: E501
collection_formats = {}
path_params = {}
if 'container_id' in local_var_params:
path_params['container_id'] = local_var_params['container_id'] # noqa: E501
if 'file_parent_id' in local_var_params:
path_params['file_parent_id'] = local_var_params['file_parent_id'] # noqa: E501
query_params = []
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'file_id' in local_var_params and local_var_params['file_id'] is not None: # noqa: E501
query_params.append(('file_id', local_var_params['file_id'])) # noqa: E501
if 'name' in local_var_params and local_var_params['name'] is not None: # noqa: E501
query_params.append(('name', local_var_params['name'])) # noqa: E501
if 'type' in local_var_params and local_var_params['type'] is not None: # noqa: E501
query_params.append(('type', local_var_params['type'])) # noqa: E501
if 'permission' in local_var_params and local_var_params['permission'] is not None: # noqa: E501
query_params.append(('permission', local_var_params['permission'])) # noqa: E501
if 'size' in local_var_params and local_var_params['size'] is not None: # noqa: E501
query_params.append(('size', local_var_params['size'])) # noqa: E501
if 'change_date' in local_var_params and local_var_params['change_date'] is not None: # noqa: E501
query_params.append(('change_date', local_var_params['change_date'])) # noqa: E501
if 'modification_date' in local_var_params and local_var_params['modification_date'] is not None: # noqa: E501
query_params.append(('modification_date', local_var_params['modification_date'])) # noqa: E501
if 'access_date' in local_var_params and local_var_params['access_date'] is not None: # noqa: E501
query_params.append(('access_date', local_var_params['access_date'])) # noqa: E501
if 'gid' in local_var_params and local_var_params['gid'] is not None: # noqa: E501
query_params.append(('gid', local_var_params['gid'])) # noqa: E501
if 'uid' in local_var_params and local_var_params['uid'] is not None: # noqa: E501
query_params.append(('uid', local_var_params['uid'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/containers/{container_id}/files/{file_parent_id}/children', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NodeumFileCollection', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def files_children_by_pool(self, **kwargs): # noqa: E501
"""Lists files under a specific folder. # noqa: E501
**API Key Scope**: files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.files_children_by_pool(pool_id=pool_id_value, file_parent_id=file_parent_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str pool_id: Numeric ID, or name of pool. (required)
:param int file_parent_id: Numeric ID of parent folder. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str file_id: Filter on file id
:param str name: Filter on name
:param str type: Filter on type
:param str permission: Filter on permission
:param str size: Filter on size
:param str change_date: Filter on change date
:param str modification_date: Filter on modification date
:param str access_date: Filter on access date
:param str gid: Filter on gid
:param str uid: Filter on uid
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: NodeumFileCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.files_children_by_pool_with_http_info(**kwargs) # noqa: E501
def files_children_by_pool_with_http_info(self, **kwargs): # noqa: E501
"""Lists files under a specific folder. # noqa: E501
**API Key Scope**: files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.files_children_by_pool_with_http_info(pool_id=pool_id_value, file_parent_id=file_parent_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str pool_id: Numeric ID, or name of pool. (required)
:param int file_parent_id: Numeric ID of parent folder. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str file_id: Filter on file id
:param str name: Filter on name
:param str type: Filter on type
:param str permission: Filter on permission
:param str size: Filter on size
:param str change_date: Filter on change date
:param str modification_date: Filter on modification date
:param str access_date: Filter on access date
:param str gid: Filter on gid
:param str uid: Filter on uid
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(NodeumFileCollection, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'pool_id',
'file_parent_id',
'limit',
'offset',
'file_id',
'name',
'type',
'permission',
'size',
'change_date',
'modification_date',
'access_date',
'gid',
'uid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method files_children_by_pool" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'pool_id' is set
if self.api_client.client_side_validation and ('pool_id' not in local_var_params or # noqa: E501
local_var_params['pool_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `pool_id` when calling `files_children_by_pool`") # noqa: E501
# verify the required parameter 'file_parent_id' is set
if self.api_client.client_side_validation and ('file_parent_id' not in local_var_params or # noqa: E501
local_var_params['file_parent_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `file_parent_id` when calling `files_children_by_pool`") # noqa: E501
collection_formats = {}
path_params = {}
if 'pool_id' in local_var_params:
path_params['pool_id'] = local_var_params['pool_id'] # noqa: E501
if 'file_parent_id' in local_var_params:
path_params['file_parent_id'] = local_var_params['file_parent_id'] # noqa: E501
query_params = []
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'file_id' in local_var_params and local_var_params['file_id'] is not None: # noqa: E501
query_params.append(('file_id', local_var_params['file_id'])) # noqa: E501
if 'name' in local_var_params and local_var_params['name'] is not None: # noqa: E501
query_params.append(('name', local_var_params['name'])) # noqa: E501
if 'type' in local_var_params and local_var_params['type'] is not None: # noqa: E501
query_params.append(('type', local_var_params['type'])) # noqa: E501
if 'permission' in local_var_params and local_var_params['permission'] is not None: # noqa: E501
query_params.append(('permission', local_var_params['permission'])) # noqa: E501
if 'size' in local_var_params and local_var_params['size'] is not None: # noqa: E501
query_params.append(('size', local_var_params['size'])) # noqa: E501
if 'change_date' in local_var_params and local_var_params['change_date'] is not None: # noqa: E501
query_params.append(('change_date', local_var_params['change_date'])) # noqa: E501
if 'modification_date' in local_var_params and local_var_params['modification_date'] is not None: # noqa: E501
query_params.append(('modification_date', local_var_params['modification_date'])) # noqa: E501
if 'access_date' in local_var_params and local_var_params['access_date'] is not None: # noqa: E501
query_params.append(('access_date', local_var_params['access_date'])) # noqa: E501
if 'gid' in local_var_params and local_var_params['gid'] is not None: # noqa: E501
query_params.append(('gid', local_var_params['gid'])) # noqa: E501
if 'uid' in local_var_params and local_var_params['uid'] is not None: # noqa: E501
query_params.append(('uid', local_var_params['uid'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/pools/{pool_id}/files/{file_parent_id}/children', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NodeumFileCollection', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def files_children_by_task(self, **kwargs): # noqa: E501
"""Lists files under a specific folder. # noqa: E501
**API Key Scope**: files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.files_children_by_task(task_id=task_id_value, file_parent_id=file_parent_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str task_id: Numeric ID or name of task. Task names are not unique, it's recommanded to use numeric ID. (required)
:param int file_parent_id: Numeric ID of parent folder. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str file_id: Filter on file id
:param str name: Filter on name
:param str type: Filter on type
:param str permission: Filter on permission
:param str size: Filter on size
:param str change_date: Filter on change date
:param str modification_date: Filter on modification date
:param str access_date: Filter on access date
:param str gid: Filter on gid
:param str uid: Filter on uid
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: NodeumFileCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.files_children_by_task_with_http_info(**kwargs) # noqa: E501
def files_children_by_task_with_http_info(self, **kwargs): # noqa: E501
"""Lists files under a specific folder. # noqa: E501
**API Key Scope**: files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.files_children_by_task_with_http_info(task_id=task_id_value, file_parent_id=file_parent_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str task_id: Numeric ID or name of task. Task names are not unique, it's recommanded to use numeric ID. (required)
:param int file_parent_id: Numeric ID of parent folder. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str file_id: Filter on file id
:param str name: Filter on name
:param str type: Filter on type
:param str permission: Filter on permission
:param str size: Filter on size
:param str change_date: Filter on change date
:param str modification_date: Filter on modification date
:param str access_date: Filter on access date
:param str gid: Filter on gid
:param str uid: Filter on uid
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(NodeumFileCollection, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'task_id',
'file_parent_id',
'limit',
'offset',
'file_id',
'name',
'type',
'permission',
'size',
'change_date',
'modification_date',
'access_date',
'gid',
'uid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method files_children_by_task" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'task_id' is set
if self.api_client.client_side_validation and ('task_id' not in local_var_params or # noqa: E501
local_var_params['task_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `task_id` when calling `files_children_by_task`") # noqa: E501
# verify the required parameter 'file_parent_id' is set
if self.api_client.client_side_validation and ('file_parent_id' not in local_var_params or # noqa: E501
local_var_params['file_parent_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `file_parent_id` when calling `files_children_by_task`") # noqa: E501
collection_formats = {}
path_params = {}
if 'task_id' in local_var_params:
path_params['task_id'] = local_var_params['task_id'] # noqa: E501
if 'file_parent_id' in local_var_params:
path_params['file_parent_id'] = local_var_params['file_parent_id'] # noqa: E501
query_params = []
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'file_id' in local_var_params and local_var_params['file_id'] is not None: # noqa: E501
query_params.append(('file_id', local_var_params['file_id'])) # noqa: E501
if 'name' in local_var_params and local_var_params['name'] is not None: # noqa: E501
query_params.append(('name', local_var_params['name'])) # noqa: E501
if 'type' in local_var_params and local_var_params['type'] is not None: # noqa: E501
query_params.append(('type', local_var_params['type'])) # noqa: E501
if 'permission' in local_var_params and local_var_params['permission'] is not None: # noqa: E501
query_params.append(('permission', local_var_params['permission'])) # noqa: E501
if 'size' in local_var_params and local_var_params['size'] is not None: # noqa: E501
query_params.append(('size', local_var_params['size'])) # noqa: E501
if 'change_date' in local_var_params and local_var_params['change_date'] is not None: # noqa: E501
query_params.append(('change_date', local_var_params['change_date'])) # noqa: E501
if 'modification_date' in local_var_params and local_var_params['modification_date'] is not None: # noqa: E501
query_params.append(('modification_date', local_var_params['modification_date'])) # noqa: E501
if 'access_date' in local_var_params and local_var_params['access_date'] is not None: # noqa: E501
query_params.append(('access_date', local_var_params['access_date'])) # noqa: E501
if 'gid' in local_var_params and local_var_params['gid'] is not None: # noqa: E501
query_params.append(('gid', local_var_params['gid'])) # noqa: E501
if 'uid' in local_var_params and local_var_params['uid'] is not None: # noqa: E501
query_params.append(('uid', local_var_params['uid'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/tasks/{task_id}/files/{file_parent_id}/children', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NodeumFileCollection', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def files_children_by_task_execution(self, **kwargs): # noqa: E501
"""Lists files under a specific folder. # noqa: E501
**API Key Scope**: files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.files_children_by_task_execution(task_execution_id=task_execution_id_value, file_parent_id=file_parent_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str task_execution_id: Numeric ID of task execution. (required)
:param int file_parent_id: Numeric ID of parent folder. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str file_id: Filter on file id
:param str name: Filter on name
:param str type: Filter on type
:param str permission: Filter on permission
:param str size: Filter on size
:param str change_date: Filter on change date
:param str modification_date: Filter on modification date
:param str access_date: Filter on access date
:param str gid: Filter on gid
:param str uid: Filter on uid
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: NodeumFileCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.files_children_by_task_execution_with_http_info(**kwargs) # noqa: E501
def files_children_by_task_execution_with_http_info(self, **kwargs): # noqa: E501
"""Lists files under a specific folder. # noqa: E501
**API Key Scope**: files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.files_children_by_task_execution_with_http_info(task_execution_id=task_execution_id_value, file_parent_id=file_parent_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str task_execution_id: Numeric ID of task execution. (required)
:param int file_parent_id: Numeric ID of parent folder. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str file_id: Filter on file id
:param str name: Filter on name
:param str type: Filter on type
:param str permission: Filter on permission
:param str size: Filter on size
:param str change_date: Filter on change date
:param str modification_date: Filter on modification date
:param str access_date: Filter on access date
:param str gid: Filter on gid
:param str uid: Filter on uid
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(NodeumFileCollection, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'task_execution_id',
'file_parent_id',
'limit',
'offset',
'file_id',
'name',
'type',
'permission',
'size',
'change_date',
'modification_date',
'access_date',
'gid',
'uid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method files_children_by_task_execution" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'task_execution_id' is set
if self.api_client.client_side_validation and ('task_execution_id' not in local_var_params or # noqa: E501
local_var_params['task_execution_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `task_execution_id` when calling `files_children_by_task_execution`") # noqa: E501
# verify the required parameter 'file_parent_id' is set
if self.api_client.client_side_validation and ('file_parent_id' not in local_var_params or # noqa: E501
local_var_params['file_parent_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `file_parent_id` when calling `files_children_by_task_execution`") # noqa: E501
collection_formats = {}
path_params = {}
if 'task_execution_id' in local_var_params:
path_params['task_execution_id'] = local_var_params['task_execution_id'] # noqa: E501
if 'file_parent_id' in local_var_params:
path_params['file_parent_id'] = local_var_params['file_parent_id'] # noqa: E501
query_params = []
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'file_id' in local_var_params and local_var_params['file_id'] is not None: # noqa: E501
query_params.append(('file_id', local_var_params['file_id'])) # noqa: E501
if 'name' in local_var_params and local_var_params['name'] is not None: # noqa: E501
query_params.append(('name', local_var_params['name'])) # noqa: E501
if 'type' in local_var_params and local_var_params['type'] is not None: # noqa: E501
query_params.append(('type', local_var_params['type'])) # noqa: E501
if 'permission' in local_var_params and local_var_params['permission'] is not None: # noqa: E501
query_params.append(('permission', local_var_params['permission'])) # noqa: E501
if 'size' in local_var_params and local_var_params['size'] is not None: # noqa: E501
query_params.append(('size', local_var_params['size'])) # noqa: E501
if 'change_date' in local_var_params and local_var_params['change_date'] is not None: # noqa: E501
query_params.append(('change_date', local_var_params['change_date'])) # noqa: E501
if 'modification_date' in local_var_params and local_var_params['modification_date'] is not None: # noqa: E501
query_params.append(('modification_date', local_var_params['modification_date'])) # noqa: E501
if 'access_date' in local_var_params and local_var_params['access_date'] is not None: # noqa: E501
query_params.append(('access_date', local_var_params['access_date'])) # noqa: E501
if 'gid' in local_var_params and local_var_params['gid'] is not None: # noqa: E501
query_params.append(('gid', local_var_params['gid'])) # noqa: E501
if 'uid' in local_var_params and local_var_params['uid'] is not None: # noqa: E501
query_params.append(('uid', local_var_params['uid'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/task_executions/{task_execution_id}/files/{file_parent_id}/children', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NodeumFileCollection', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def files_children_by_task_execution_by_task(self, **kwargs): # noqa: E501
"""Lists files under a specific folder. # noqa: E501
**API Key Scope**: files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.files_children_by_task_execution_by_task(task_id=task_id_value, task_execution_id=task_execution_id_value, file_parent_id=file_parent_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str task_id: Numeric ID or name of task. Task names are not unique, it's recommanded to use numeric ID. (required)
:param str task_execution_id: Numeric ID of task execution. (required)
:param int file_parent_id: Numeric ID of parent folder. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str file_id: Filter on file id
:param str name: Filter on name
:param str type: Filter on type
:param str permission: Filter on permission
:param str size: Filter on size
:param str change_date: Filter on change date
:param str modification_date: Filter on modification date
:param str access_date: Filter on access date
:param str gid: Filter on gid
:param str uid: Filter on uid
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: NodeumFileCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.files_children_by_task_execution_by_task_with_http_info(**kwargs) # noqa: E501
def files_children_by_task_execution_by_task_with_http_info(self, **kwargs): # noqa: E501
"""Lists files under a specific folder. # noqa: E501
**API Key Scope**: files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.files_children_by_task_execution_by_task_with_http_info(task_id=task_id_value, task_execution_id=task_execution_id_value, file_parent_id=file_parent_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str task_id: Numeric ID or name of task. Task names are not unique, it's recommanded to use numeric ID. (required)
:param str task_execution_id: Numeric ID of task execution. (required)
:param int file_parent_id: Numeric ID of parent folder. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str file_id: Filter on file id
:param str name: Filter on name
:param str type: Filter on type
:param str permission: Filter on permission
:param str size: Filter on size
:param str change_date: Filter on change date
:param str modification_date: Filter on modification date
:param str access_date: Filter on access date
:param str gid: Filter on gid
:param str uid: Filter on uid
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(NodeumFileCollection, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'task_id',
'task_execution_id',
'file_parent_id',
'limit',
'offset',
'file_id',
'name',
'type',
'permission',
'size',
'change_date',
'modification_date',
'access_date',
'gid',
'uid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method files_children_by_task_execution_by_task" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'task_id' is set
if self.api_client.client_side_validation and ('task_id' not in local_var_params or # noqa: E501
local_var_params['task_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `task_id` when calling `files_children_by_task_execution_by_task`") # noqa: E501
# verify the required parameter 'task_execution_id' is set
if self.api_client.client_side_validation and ('task_execution_id' not in local_var_params or # noqa: E501
local_var_params['task_execution_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `task_execution_id` when calling `files_children_by_task_execution_by_task`") # noqa: E501
# verify the required parameter 'file_parent_id' is set
if self.api_client.client_side_validation and ('file_parent_id' not in local_var_params or # noqa: E501
local_var_params['file_parent_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `file_parent_id` when calling `files_children_by_task_execution_by_task`") # noqa: E501
collection_formats = {}
path_params = {}
if 'task_id' in local_var_params:
path_params['task_id'] = local_var_params['task_id'] # noqa: E501
if 'task_execution_id' in local_var_params:
path_params['task_execution_id'] = local_var_params['task_execution_id'] # noqa: E501
if 'file_parent_id' in local_var_params:
path_params['file_parent_id'] = local_var_params['file_parent_id'] # noqa: E501
query_params = []
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'file_id' in local_var_params and local_var_params['file_id'] is not None: # noqa: E501
query_params.append(('file_id', local_var_params['file_id'])) # noqa: E501
if 'name' in local_var_params and local_var_params['name'] is not None: # noqa: E501
query_params.append(('name', local_var_params['name'])) # noqa: E501
if 'type' in local_var_params and local_var_params['type'] is not None: # noqa: E501
query_params.append(('type', local_var_params['type'])) # noqa: E501
if 'permission' in local_var_params and local_var_params['permission'] is not None: # noqa: E501
query_params.append(('permission', local_var_params['permission'])) # noqa: E501
if 'size' in local_var_params and local_var_params['size'] is not None: # noqa: E501
query_params.append(('size', local_var_params['size'])) # noqa: E501
if 'change_date' in local_var_params and local_var_params['change_date'] is not None: # noqa: E501
query_params.append(('change_date', local_var_params['change_date'])) # noqa: E501
if 'modification_date' in local_var_params and local_var_params['modification_date'] is not None: # noqa: E501
query_params.append(('modification_date', local_var_params['modification_date'])) # noqa: E501
if 'access_date' in local_var_params and local_var_params['access_date'] is not None: # noqa: E501
query_params.append(('access_date', local_var_params['access_date'])) # noqa: E501
if 'gid' in local_var_params and local_var_params['gid'] is not None: # noqa: E501
query_params.append(('gid', local_var_params['gid'])) # noqa: E501
if 'uid' in local_var_params and local_var_params['uid'] is not None: # noqa: E501
query_params.append(('uid', local_var_params['uid'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/tasks/{task_id}/task_executions/{task_execution_id}/files/{file_parent_id}/children', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NodeumFileCollection', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def import_files_children_by_pool(self, **kwargs): # noqa: E501
"""Lists files under a specific folder on tape of pools, specific for Data Exchange. # noqa: E501
**API Key Scope**: import_files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.import_files_children_by_pool(pool_id=pool_id_value, file_parent_id=file_parent_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str pool_id: Numeric ID, or name of pool. (required)
:param int file_parent_id: Numeric ID of parent folder. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str file_id: Filter on file id
:param str name: Filter on name
:param str type: Filter on type
:param str permission: Filter on permission
:param str size: Filter on size
:param str change_date: Filter on change date
:param str modification_date: Filter on modification date
:param str access_date: Filter on access date
:param str gid: Filter on gid
:param str uid: Filter on uid
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ImportFileCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.import_files_children_by_pool_with_http_info(**kwargs) # noqa: E501
def import_files_children_by_pool_with_http_info(self, **kwargs): # noqa: E501
"""Lists files under a specific folder on tape of pools, specific for Data Exchange. # noqa: E501
**API Key Scope**: import_files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.import_files_children_by_pool_with_http_info(pool_id=pool_id_value, file_parent_id=file_parent_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str pool_id: Numeric ID, or name of pool. (required)
:param int file_parent_id: Numeric ID of parent folder. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str file_id: Filter on file id
:param str name: Filter on name
:param str type: Filter on type
:param str permission: Filter on permission
:param str size: Filter on size
:param str change_date: Filter on change date
:param str modification_date: Filter on modification date
:param str access_date: Filter on access date
:param str gid: Filter on gid
:param str uid: Filter on uid
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ImportFileCollection, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'pool_id',
'file_parent_id',
'limit',
'offset',
'file_id',
'name',
'type',
'permission',
'size',
'change_date',
'modification_date',
'access_date',
'gid',
'uid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method import_files_children_by_pool" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'pool_id' is set
if self.api_client.client_side_validation and ('pool_id' not in local_var_params or # noqa: E501
local_var_params['pool_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `pool_id` when calling `import_files_children_by_pool`") # noqa: E501
# verify the required parameter 'file_parent_id' is set
if self.api_client.client_side_validation and ('file_parent_id' not in local_var_params or # noqa: E501
local_var_params['file_parent_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `file_parent_id` when calling `import_files_children_by_pool`") # noqa: E501
collection_formats = {}
path_params = {}
if 'pool_id' in local_var_params:
path_params['pool_id'] = local_var_params['pool_id'] # noqa: E501
if 'file_parent_id' in local_var_params:
path_params['file_parent_id'] = local_var_params['file_parent_id'] # noqa: E501
query_params = []
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'file_id' in local_var_params and local_var_params['file_id'] is not None: # noqa: E501
query_params.append(('file_id', local_var_params['file_id'])) # noqa: E501
if 'name' in local_var_params and local_var_params['name'] is not None: # noqa: E501
query_params.append(('name', local_var_params['name'])) # noqa: E501
if 'type' in local_var_params and local_var_params['type'] is not None: # noqa: E501
query_params.append(('type', local_var_params['type'])) # noqa: E501
if 'permission' in local_var_params and local_var_params['permission'] is not None: # noqa: E501
query_params.append(('permission', local_var_params['permission'])) # noqa: E501
if 'size' in local_var_params and local_var_params['size'] is not None: # noqa: E501
query_params.append(('size', local_var_params['size'])) # noqa: E501
if 'change_date' in local_var_params and local_var_params['change_date'] is not None: # noqa: E501
query_params.append(('change_date', local_var_params['change_date'])) # noqa: E501
if 'modification_date' in local_var_params and local_var_params['modification_date'] is not None: # noqa: E501
query_params.append(('modification_date', local_var_params['modification_date'])) # noqa: E501
if 'access_date' in local_var_params and local_var_params['access_date'] is not None: # noqa: E501
query_params.append(('access_date', local_var_params['access_date'])) # noqa: E501
if 'gid' in local_var_params and local_var_params['gid'] is not None: # noqa: E501
query_params.append(('gid', local_var_params['gid'])) # noqa: E501
if 'uid' in local_var_params and local_var_params['uid'] is not None: # noqa: E501
query_params.append(('uid', local_var_params['uid'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/pools/{pool_id}/import_files/{file_parent_id}/children', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ImportFileCollection', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def index_files(self, **kwargs): # noqa: E501
"""Lists files on root. # noqa: E501
**API Key Scope**: files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_files(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str file_id: Filter on file id
:param str name: Filter on name
:param str type: Filter on type
:param str permission: Filter on permission
:param str size: Filter on size
:param str change_date: Filter on change date
:param str modification_date: Filter on modification date
:param str access_date: Filter on access date
:param str gid: Filter on gid
:param str uid: Filter on uid
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: NodeumFileCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.index_files_with_http_info(**kwargs) # noqa: E501
def index_files_with_http_info(self, **kwargs): # noqa: E501
"""Lists files on root. # noqa: E501
**API Key Scope**: files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_files_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str file_id: Filter on file id
:param str name: Filter on name
:param str type: Filter on type
:param str permission: Filter on permission
:param str size: Filter on size
:param str change_date: Filter on change date
:param str modification_date: Filter on modification date
:param str access_date: Filter on access date
:param str gid: Filter on gid
:param str uid: Filter on uid
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(NodeumFileCollection, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'limit',
'offset',
'file_id',
'name',
'type',
'permission',
'size',
'change_date',
'modification_date',
'access_date',
'gid',
'uid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method index_files" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'file_id' in local_var_params and local_var_params['file_id'] is not None: # noqa: E501
query_params.append(('file_id', local_var_params['file_id'])) # noqa: E501
if 'name' in local_var_params and local_var_params['name'] is not None: # noqa: E501
query_params.append(('name', local_var_params['name'])) # noqa: E501
if 'type' in local_var_params and local_var_params['type'] is not None: # noqa: E501
query_params.append(('type', local_var_params['type'])) # noqa: E501
if 'permission' in local_var_params and local_var_params['permission'] is not None: # noqa: E501
query_params.append(('permission', local_var_params['permission'])) # noqa: E501
if 'size' in local_var_params and local_var_params['size'] is not None: # noqa: E501
query_params.append(('size', local_var_params['size'])) # noqa: E501
if 'change_date' in local_var_params and local_var_params['change_date'] is not None: # noqa: E501
query_params.append(('change_date', local_var_params['change_date'])) # noqa: E501
if 'modification_date' in local_var_params and local_var_params['modification_date'] is not None: # noqa: E501
query_params.append(('modification_date', local_var_params['modification_date'])) # noqa: E501
if 'access_date' in local_var_params and local_var_params['access_date'] is not None: # noqa: E501
query_params.append(('access_date', local_var_params['access_date'])) # noqa: E501
if 'gid' in local_var_params and local_var_params['gid'] is not None: # noqa: E501
query_params.append(('gid', local_var_params['gid'])) # noqa: E501
if 'uid' in local_var_params and local_var_params['uid'] is not None: # noqa: E501
query_params.append(('uid', local_var_params['uid'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/files', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NodeumFileCollection', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def index_files_by_container(self, **kwargs): # noqa: E501
"""Lists files on root. # noqa: E501
**API Key Scope**: files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_files_by_container(container_id=container_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str container_id: Numeric ID or name of container. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str file_id: Filter on file id
:param str name: Filter on name
:param str type: Filter on type
:param str permission: Filter on permission
:param str size: Filter on size
:param str change_date: Filter on change date
:param str modification_date: Filter on modification date
:param str access_date: Filter on access date
:param str gid: Filter on gid
:param str uid: Filter on uid
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: NodeumFileCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.index_files_by_container_with_http_info(**kwargs) # noqa: E501
def index_files_by_container_with_http_info(self, **kwargs): # noqa: E501
"""Lists files on root. # noqa: E501
**API Key Scope**: files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_files_by_container_with_http_info(container_id=container_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str container_id: Numeric ID or name of container. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str file_id: Filter on file id
:param str name: Filter on name
:param str type: Filter on type
:param str permission: Filter on permission
:param str size: Filter on size
:param str change_date: Filter on change date
:param str modification_date: Filter on modification date
:param str access_date: Filter on access date
:param str gid: Filter on gid
:param str uid: Filter on uid
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(NodeumFileCollection, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'container_id',
'limit',
'offset',
'file_id',
'name',
'type',
'permission',
'size',
'change_date',
'modification_date',
'access_date',
'gid',
'uid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method index_files_by_container" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'container_id' is set
if self.api_client.client_side_validation and ('container_id' not in local_var_params or # noqa: E501
local_var_params['container_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `container_id` when calling `index_files_by_container`") # noqa: E501
collection_formats = {}
path_params = {}
if 'container_id' in local_var_params:
path_params['container_id'] = local_var_params['container_id'] # noqa: E501
query_params = []
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'file_id' in local_var_params and local_var_params['file_id'] is not None: # noqa: E501
query_params.append(('file_id', local_var_params['file_id'])) # noqa: E501
if 'name' in local_var_params and local_var_params['name'] is not None: # noqa: E501
query_params.append(('name', local_var_params['name'])) # noqa: E501
if 'type' in local_var_params and local_var_params['type'] is not None: # noqa: E501
query_params.append(('type', local_var_params['type'])) # noqa: E501
if 'permission' in local_var_params and local_var_params['permission'] is not None: # noqa: E501
query_params.append(('permission', local_var_params['permission'])) # noqa: E501
if 'size' in local_var_params and local_var_params['size'] is not None: # noqa: E501
query_params.append(('size', local_var_params['size'])) # noqa: E501
if 'change_date' in local_var_params and local_var_params['change_date'] is not None: # noqa: E501
query_params.append(('change_date', local_var_params['change_date'])) # noqa: E501
if 'modification_date' in local_var_params and local_var_params['modification_date'] is not None: # noqa: E501
query_params.append(('modification_date', local_var_params['modification_date'])) # noqa: E501
if 'access_date' in local_var_params and local_var_params['access_date'] is not None: # noqa: E501
query_params.append(('access_date', local_var_params['access_date'])) # noqa: E501
if 'gid' in local_var_params and local_var_params['gid'] is not None: # noqa: E501
query_params.append(('gid', local_var_params['gid'])) # noqa: E501
if 'uid' in local_var_params and local_var_params['uid'] is not None: # noqa: E501
query_params.append(('uid', local_var_params['uid'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/containers/{container_id}/files', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NodeumFileCollection', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def index_files_by_pool(self, **kwargs): # noqa: E501
"""Lists files on root. # noqa: E501
**API Key Scope**: files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_files_by_pool(pool_id=pool_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str pool_id: Numeric ID, or name of pool. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str file_id: Filter on file id
:param str name: Filter on name
:param str type: Filter on type
:param str permission: Filter on permission
:param str size: Filter on size
:param str change_date: Filter on change date
:param str modification_date: Filter on modification date
:param str access_date: Filter on access date
:param str gid: Filter on gid
:param str uid: Filter on uid
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: NodeumFileCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.index_files_by_pool_with_http_info(**kwargs) # noqa: E501
def index_files_by_pool_with_http_info(self, **kwargs): # noqa: E501
"""Lists files on root. # noqa: E501
**API Key Scope**: files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_files_by_pool_with_http_info(pool_id=pool_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str pool_id: Numeric ID, or name of pool. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str file_id: Filter on file id
:param str name: Filter on name
:param str type: Filter on type
:param str permission: Filter on permission
:param str size: Filter on size
:param str change_date: Filter on change date
:param str modification_date: Filter on modification date
:param str access_date: Filter on access date
:param str gid: Filter on gid
:param str uid: Filter on uid
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(NodeumFileCollection, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'pool_id',
'limit',
'offset',
'file_id',
'name',
'type',
'permission',
'size',
'change_date',
'modification_date',
'access_date',
'gid',
'uid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method index_files_by_pool" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'pool_id' is set
if self.api_client.client_side_validation and ('pool_id' not in local_var_params or # noqa: E501
local_var_params['pool_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `pool_id` when calling `index_files_by_pool`") # noqa: E501
collection_formats = {}
path_params = {}
if 'pool_id' in local_var_params:
path_params['pool_id'] = local_var_params['pool_id'] # noqa: E501
query_params = []
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'file_id' in local_var_params and local_var_params['file_id'] is not None: # noqa: E501
query_params.append(('file_id', local_var_params['file_id'])) # noqa: E501
if 'name' in local_var_params and local_var_params['name'] is not None: # noqa: E501
query_params.append(('name', local_var_params['name'])) # noqa: E501
if 'type' in local_var_params and local_var_params['type'] is not None: # noqa: E501
query_params.append(('type', local_var_params['type'])) # noqa: E501
if 'permission' in local_var_params and local_var_params['permission'] is not None: # noqa: E501
query_params.append(('permission', local_var_params['permission'])) # noqa: E501
if 'size' in local_var_params and local_var_params['size'] is not None: # noqa: E501
query_params.append(('size', local_var_params['size'])) # noqa: E501
if 'change_date' in local_var_params and local_var_params['change_date'] is not None: # noqa: E501
query_params.append(('change_date', local_var_params['change_date'])) # noqa: E501
if 'modification_date' in local_var_params and local_var_params['modification_date'] is not None: # noqa: E501
query_params.append(('modification_date', local_var_params['modification_date'])) # noqa: E501
if 'access_date' in local_var_params and local_var_params['access_date'] is not None: # noqa: E501
query_params.append(('access_date', local_var_params['access_date'])) # noqa: E501
if 'gid' in local_var_params and local_var_params['gid'] is not None: # noqa: E501
query_params.append(('gid', local_var_params['gid'])) # noqa: E501
if 'uid' in local_var_params and local_var_params['uid'] is not None: # noqa: E501
query_params.append(('uid', local_var_params['uid'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/pools/{pool_id}/files', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NodeumFileCollection', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def index_files_by_task(self, **kwargs): # noqa: E501
"""Lists files on root. # noqa: E501
**API Key Scope**: files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_files_by_task(task_id=task_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str task_id: Numeric ID or name of task. Task names are not unique, it's recommanded to use numeric ID. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str file_id: Filter on file id
:param str name: Filter on name
:param str type: Filter on type
:param str permission: Filter on permission
:param str size: Filter on size
:param str change_date: Filter on change date
:param str modification_date: Filter on modification date
:param str access_date: Filter on access date
:param str gid: Filter on gid
:param str uid: Filter on uid
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: NodeumFileCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.index_files_by_task_with_http_info(**kwargs) # noqa: E501
def index_files_by_task_with_http_info(self, **kwargs): # noqa: E501
"""Lists files on root. # noqa: E501
**API Key Scope**: files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_files_by_task_with_http_info(task_id=task_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str task_id: Numeric ID or name of task. Task names are not unique, it's recommanded to use numeric ID. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str file_id: Filter on file id
:param str name: Filter on name
:param str type: Filter on type
:param str permission: Filter on permission
:param str size: Filter on size
:param str change_date: Filter on change date
:param str modification_date: Filter on modification date
:param str access_date: Filter on access date
:param str gid: Filter on gid
:param str uid: Filter on uid
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(NodeumFileCollection, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'task_id',
'limit',
'offset',
'file_id',
'name',
'type',
'permission',
'size',
'change_date',
'modification_date',
'access_date',
'gid',
'uid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method index_files_by_task" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'task_id' is set
if self.api_client.client_side_validation and ('task_id' not in local_var_params or # noqa: E501
local_var_params['task_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `task_id` when calling `index_files_by_task`") # noqa: E501
collection_formats = {}
path_params = {}
if 'task_id' in local_var_params:
path_params['task_id'] = local_var_params['task_id'] # noqa: E501
query_params = []
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'file_id' in local_var_params and local_var_params['file_id'] is not None: # noqa: E501
query_params.append(('file_id', local_var_params['file_id'])) # noqa: E501
if 'name' in local_var_params and local_var_params['name'] is not None: # noqa: E501
query_params.append(('name', local_var_params['name'])) # noqa: E501
if 'type' in local_var_params and local_var_params['type'] is not None: # noqa: E501
query_params.append(('type', local_var_params['type'])) # noqa: E501
if 'permission' in local_var_params and local_var_params['permission'] is not None: # noqa: E501
query_params.append(('permission', local_var_params['permission'])) # noqa: E501
if 'size' in local_var_params and local_var_params['size'] is not None: # noqa: E501
query_params.append(('size', local_var_params['size'])) # noqa: E501
if 'change_date' in local_var_params and local_var_params['change_date'] is not None: # noqa: E501
query_params.append(('change_date', local_var_params['change_date'])) # noqa: E501
if 'modification_date' in local_var_params and local_var_params['modification_date'] is not None: # noqa: E501
query_params.append(('modification_date', local_var_params['modification_date'])) # noqa: E501
if 'access_date' in local_var_params and local_var_params['access_date'] is not None: # noqa: E501
query_params.append(('access_date', local_var_params['access_date'])) # noqa: E501
if 'gid' in local_var_params and local_var_params['gid'] is not None: # noqa: E501
query_params.append(('gid', local_var_params['gid'])) # noqa: E501
if 'uid' in local_var_params and local_var_params['uid'] is not None: # noqa: E501
query_params.append(('uid', local_var_params['uid'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/tasks/{task_id}/files', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NodeumFileCollection', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def index_files_by_task_execution(self, **kwargs): # noqa: E501
"""Lists files on root. # noqa: E501
**API Key Scope**: files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_files_by_task_execution(task_execution_id=task_execution_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str task_execution_id: Numeric ID of task execution. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str file_id: Filter on file id
:param str name: Filter on name
:param str type: Filter on type
:param str permission: Filter on permission
:param str size: Filter on size
:param str change_date: Filter on change date
:param str modification_date: Filter on modification date
:param str access_date: Filter on access date
:param str gid: Filter on gid
:param str uid: Filter on uid
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: NodeumFileCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.index_files_by_task_execution_with_http_info(**kwargs) # noqa: E501
def index_files_by_task_execution_with_http_info(self, **kwargs): # noqa: E501
"""Lists files on root. # noqa: E501
**API Key Scope**: files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_files_by_task_execution_with_http_info(task_execution_id=task_execution_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str task_execution_id: Numeric ID of task execution. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str file_id: Filter on file id
:param str name: Filter on name
:param str type: Filter on type
:param str permission: Filter on permission
:param str size: Filter on size
:param str change_date: Filter on change date
:param str modification_date: Filter on modification date
:param str access_date: Filter on access date
:param str gid: Filter on gid
:param str uid: Filter on uid
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(NodeumFileCollection, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'task_execution_id',
'limit',
'offset',
'file_id',
'name',
'type',
'permission',
'size',
'change_date',
'modification_date',
'access_date',
'gid',
'uid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method index_files_by_task_execution" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'task_execution_id' is set
if self.api_client.client_side_validation and ('task_execution_id' not in local_var_params or # noqa: E501
local_var_params['task_execution_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `task_execution_id` when calling `index_files_by_task_execution`") # noqa: E501
collection_formats = {}
path_params = {}
if 'task_execution_id' in local_var_params:
path_params['task_execution_id'] = local_var_params['task_execution_id'] # noqa: E501
query_params = []
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'file_id' in local_var_params and local_var_params['file_id'] is not None: # noqa: E501
query_params.append(('file_id', local_var_params['file_id'])) # noqa: E501
if 'name' in local_var_params and local_var_params['name'] is not None: # noqa: E501
query_params.append(('name', local_var_params['name'])) # noqa: E501
if 'type' in local_var_params and local_var_params['type'] is not None: # noqa: E501
query_params.append(('type', local_var_params['type'])) # noqa: E501
if 'permission' in local_var_params and local_var_params['permission'] is not None: # noqa: E501
query_params.append(('permission', local_var_params['permission'])) # noqa: E501
if 'size' in local_var_params and local_var_params['size'] is not None: # noqa: E501
query_params.append(('size', local_var_params['size'])) # noqa: E501
if 'change_date' in local_var_params and local_var_params['change_date'] is not None: # noqa: E501
query_params.append(('change_date', local_var_params['change_date'])) # noqa: E501
if 'modification_date' in local_var_params and local_var_params['modification_date'] is not None: # noqa: E501
query_params.append(('modification_date', local_var_params['modification_date'])) # noqa: E501
if 'access_date' in local_var_params and local_var_params['access_date'] is not None: # noqa: E501
query_params.append(('access_date', local_var_params['access_date'])) # noqa: E501
if 'gid' in local_var_params and local_var_params['gid'] is not None: # noqa: E501
query_params.append(('gid', local_var_params['gid'])) # noqa: E501
if 'uid' in local_var_params and local_var_params['uid'] is not None: # noqa: E501
query_params.append(('uid', local_var_params['uid'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/task_executions/{task_execution_id}/files', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NodeumFileCollection', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def index_files_by_task_execution_by_task(self, **kwargs): # noqa: E501
"""Lists files on root. # noqa: E501
**API Key Scope**: files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_files_by_task_execution_by_task(task_id=task_id_value, task_execution_id=task_execution_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str task_id: Numeric ID or name of task. Task names are not unique, it's recommanded to use numeric ID. (required)
:param str task_execution_id: Numeric ID of task execution. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str file_id: Filter on file id
:param str name: Filter on name
:param str type: Filter on type
:param str permission: Filter on permission
:param str size: Filter on size
:param str change_date: Filter on change date
:param str modification_date: Filter on modification date
:param str access_date: Filter on access date
:param str gid: Filter on gid
:param str uid: Filter on uid
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: NodeumFileCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.index_files_by_task_execution_by_task_with_http_info(**kwargs) # noqa: E501
def index_files_by_task_execution_by_task_with_http_info(self, **kwargs): # noqa: E501
"""Lists files on root. # noqa: E501
**API Key Scope**: files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_files_by_task_execution_by_task_with_http_info(task_id=task_id_value, task_execution_id=task_execution_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str task_id: Numeric ID or name of task. Task names are not unique, it's recommanded to use numeric ID. (required)
:param str task_execution_id: Numeric ID of task execution. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str file_id: Filter on file id
:param str name: Filter on name
:param str type: Filter on type
:param str permission: Filter on permission
:param str size: Filter on size
:param str change_date: Filter on change date
:param str modification_date: Filter on modification date
:param str access_date: Filter on access date
:param str gid: Filter on gid
:param str uid: Filter on uid
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(NodeumFileCollection, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'task_id',
'task_execution_id',
'limit',
'offset',
'file_id',
'name',
'type',
'permission',
'size',
'change_date',
'modification_date',
'access_date',
'gid',
'uid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method index_files_by_task_execution_by_task" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'task_id' is set
if self.api_client.client_side_validation and ('task_id' not in local_var_params or # noqa: E501
local_var_params['task_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `task_id` when calling `index_files_by_task_execution_by_task`") # noqa: E501
# verify the required parameter 'task_execution_id' is set
if self.api_client.client_side_validation and ('task_execution_id' not in local_var_params or # noqa: E501
local_var_params['task_execution_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `task_execution_id` when calling `index_files_by_task_execution_by_task`") # noqa: E501
collection_formats = {}
path_params = {}
if 'task_id' in local_var_params:
path_params['task_id'] = local_var_params['task_id'] # noqa: E501
if 'task_execution_id' in local_var_params:
path_params['task_execution_id'] = local_var_params['task_execution_id'] # noqa: E501
query_params = []
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'file_id' in local_var_params and local_var_params['file_id'] is not None: # noqa: E501
query_params.append(('file_id', local_var_params['file_id'])) # noqa: E501
if 'name' in local_var_params and local_var_params['name'] is not None: # noqa: E501
query_params.append(('name', local_var_params['name'])) # noqa: E501
if 'type' in local_var_params and local_var_params['type'] is not None: # noqa: E501
query_params.append(('type', local_var_params['type'])) # noqa: E501
if 'permission' in local_var_params and local_var_params['permission'] is not None: # noqa: E501
query_params.append(('permission', local_var_params['permission'])) # noqa: E501
if 'size' in local_var_params and local_var_params['size'] is not None: # noqa: E501
query_params.append(('size', local_var_params['size'])) # noqa: E501
if 'change_date' in local_var_params and local_var_params['change_date'] is not None: # noqa: E501
query_params.append(('change_date', local_var_params['change_date'])) # noqa: E501
if 'modification_date' in local_var_params and local_var_params['modification_date'] is not None: # noqa: E501
query_params.append(('modification_date', local_var_params['modification_date'])) # noqa: E501
if 'access_date' in local_var_params and local_var_params['access_date'] is not None: # noqa: E501
query_params.append(('access_date', local_var_params['access_date'])) # noqa: E501
if 'gid' in local_var_params and local_var_params['gid'] is not None: # noqa: E501
query_params.append(('gid', local_var_params['gid'])) # noqa: E501
if 'uid' in local_var_params and local_var_params['uid'] is not None: # noqa: E501
query_params.append(('uid', local_var_params['uid'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/tasks/{task_id}/task_executions/{task_execution_id}/files', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NodeumFileCollection', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def index_import_files_by_pool(self, **kwargs): # noqa: E501
"""Lists files on root of tape of pools, specific for Data Exchange. # noqa: E501
**API Key Scope**: import_files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_import_files_by_pool(pool_id=pool_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str pool_id: Numeric ID, or name of pool. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str file_id: Filter on file id
:param str name: Filter on name
:param str type: Filter on type
:param str permission: Filter on permission
:param str size: Filter on size
:param str change_date: Filter on change date
:param str modification_date: Filter on modification date
:param str access_date: Filter on access date
:param str gid: Filter on gid
:param str uid: Filter on uid
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ImportFileCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.index_import_files_by_pool_with_http_info(**kwargs) # noqa: E501
def index_import_files_by_pool_with_http_info(self, **kwargs): # noqa: E501
"""Lists files on root of tape of pools, specific for Data Exchange. # noqa: E501
**API Key Scope**: import_files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_import_files_by_pool_with_http_info(pool_id=pool_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str pool_id: Numeric ID, or name of pool. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str file_id: Filter on file id
:param str name: Filter on name
:param str type: Filter on type
:param str permission: Filter on permission
:param str size: Filter on size
:param str change_date: Filter on change date
:param str modification_date: Filter on modification date
:param str access_date: Filter on access date
:param str gid: Filter on gid
:param str uid: Filter on uid
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ImportFileCollection, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'pool_id',
'limit',
'offset',
'file_id',
'name',
'type',
'permission',
'size',
'change_date',
'modification_date',
'access_date',
'gid',
'uid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method index_import_files_by_pool" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'pool_id' is set
if self.api_client.client_side_validation and ('pool_id' not in local_var_params or # noqa: E501
local_var_params['pool_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `pool_id` when calling `index_import_files_by_pool`") # noqa: E501
collection_formats = {}
path_params = {}
if 'pool_id' in local_var_params:
path_params['pool_id'] = local_var_params['pool_id'] # noqa: E501
query_params = []
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'file_id' in local_var_params and local_var_params['file_id'] is not None: # noqa: E501
query_params.append(('file_id', local_var_params['file_id'])) # noqa: E501
if 'name' in local_var_params and local_var_params['name'] is not None: # noqa: E501
query_params.append(('name', local_var_params['name'])) # noqa: E501
if 'type' in local_var_params and local_var_params['type'] is not None: # noqa: E501
query_params.append(('type', local_var_params['type'])) # noqa: E501
if 'permission' in local_var_params and local_var_params['permission'] is not None: # noqa: E501
query_params.append(('permission', local_var_params['permission'])) # noqa: E501
if 'size' in local_var_params and local_var_params['size'] is not None: # noqa: E501
query_params.append(('size', local_var_params['size'])) # noqa: E501
if 'change_date' in local_var_params and local_var_params['change_date'] is not None: # noqa: E501
query_params.append(('change_date', local_var_params['change_date'])) # noqa: E501
if 'modification_date' in local_var_params and local_var_params['modification_date'] is not None: # noqa: E501
query_params.append(('modification_date', local_var_params['modification_date'])) # noqa: E501
if 'access_date' in local_var_params and local_var_params['access_date'] is not None: # noqa: E501
query_params.append(('access_date', local_var_params['access_date'])) # noqa: E501
if 'gid' in local_var_params and local_var_params['gid'] is not None: # noqa: E501
query_params.append(('gid', local_var_params['gid'])) # noqa: E501
if 'uid' in local_var_params and local_var_params['uid'] is not None: # noqa: E501
query_params.append(('uid', local_var_params['uid'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/pools/{pool_id}/import_files', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ImportFileCollection', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def index_on_tapes_files_by_pool(self, **kwargs): # noqa: E501
"""Lists files on root of tape of pools, specific for Active and Offline. # noqa: E501
**API Key Scope**: on_tapes_files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_on_tapes_files_by_pool(pool_id=pool_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str pool_id: Numeric ID, or name of pool. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str name: Filter on name
:param str type: Filter on type
:param str size: Filter on size
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: OnTapesFileCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.index_on_tapes_files_by_pool_with_http_info(**kwargs) # noqa: E501
def index_on_tapes_files_by_pool_with_http_info(self, **kwargs): # noqa: E501
"""Lists files on root of tape of pools, specific for Active and Offline. # noqa: E501
**API Key Scope**: on_tapes_files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_on_tapes_files_by_pool_with_http_info(pool_id=pool_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str pool_id: Numeric ID, or name of pool. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str name: Filter on name
:param str type: Filter on type
:param str size: Filter on size
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(OnTapesFileCollection, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'pool_id',
'limit',
'offset',
'name',
'type',
'size'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method index_on_tapes_files_by_pool" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'pool_id' is set
if self.api_client.client_side_validation and ('pool_id' not in local_var_params or # noqa: E501
local_var_params['pool_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `pool_id` when calling `index_on_tapes_files_by_pool`") # noqa: E501
collection_formats = {}
path_params = {}
if 'pool_id' in local_var_params:
path_params['pool_id'] = local_var_params['pool_id'] # noqa: E501
query_params = []
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'name' in local_var_params and local_var_params['name'] is not None: # noqa: E501
query_params.append(('name', local_var_params['name'])) # noqa: E501
if 'type' in local_var_params and local_var_params['type'] is not None: # noqa: E501
query_params.append(('type', local_var_params['type'])) # noqa: E501
if 'size' in local_var_params and local_var_params['size'] is not None: # noqa: E501
query_params.append(('size', local_var_params['size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/pools/{pool_id}/on_tapes_files', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OnTapesFileCollection', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def index_tapes_by_file_by_pool(self, **kwargs): # noqa: E501
"""Displays tapes containing specific file, related to the specific pool. # noqa: E501
**API Key Scope**: files / tapes # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_tapes_by_file_by_pool(pool_id=pool_id_value, file_id=file_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str pool_id: Numeric ID, or name of pool. (required)
:param int file_id: Numeric ID of file. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: TapeCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.index_tapes_by_file_by_pool_with_http_info(**kwargs) # noqa: E501
def index_tapes_by_file_by_pool_with_http_info(self, **kwargs): # noqa: E501
"""Displays tapes containing specific file, related to the specific pool. # noqa: E501
**API Key Scope**: files / tapes # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_tapes_by_file_by_pool_with_http_info(pool_id=pool_id_value, file_id=file_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str pool_id: Numeric ID, or name of pool. (required)
:param int file_id: Numeric ID of file. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(TapeCollection, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'pool_id',
'file_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method index_tapes_by_file_by_pool" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'pool_id' is set
if self.api_client.client_side_validation and ('pool_id' not in local_var_params or # noqa: E501
local_var_params['pool_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `pool_id` when calling `index_tapes_by_file_by_pool`") # noqa: E501
# verify the required parameter 'file_id' is set
if self.api_client.client_side_validation and ('file_id' not in local_var_params or # noqa: E501
local_var_params['file_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `file_id` when calling `index_tapes_by_file_by_pool`") # noqa: E501
collection_formats = {}
path_params = {}
if 'pool_id' in local_var_params:
path_params['pool_id'] = local_var_params['pool_id'] # noqa: E501
if 'file_id' in local_var_params:
path_params['file_id'] = local_var_params['file_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/pools/{pool_id}/files/{file_id}/tapes', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TapeCollection', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def index_tapes_by_file_by_task(self, **kwargs): # noqa: E501
"""Displays tapes containing specific file, related to the specific task. # noqa: E501
**API Key Scope**: files / tapes # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_tapes_by_file_by_task(task_id=task_id_value, file_id=file_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str task_id: Numeric ID or name of task. Task names are not unique, it's recommanded to use numeric ID. (required)
:param int file_id: Numeric ID of file. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: TapeCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.index_tapes_by_file_by_task_with_http_info(**kwargs) # noqa: E501
def index_tapes_by_file_by_task_with_http_info(self, **kwargs): # noqa: E501
"""Displays tapes containing specific file, related to the specific task. # noqa: E501
**API Key Scope**: files / tapes # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_tapes_by_file_by_task_with_http_info(task_id=task_id_value, file_id=file_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str task_id: Numeric ID or name of task. Task names are not unique, it's recommanded to use numeric ID. (required)
:param int file_id: Numeric ID of file. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(TapeCollection, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'task_id',
'file_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method index_tapes_by_file_by_task" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'task_id' is set
if self.api_client.client_side_validation and ('task_id' not in local_var_params or # noqa: E501
local_var_params['task_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `task_id` when calling `index_tapes_by_file_by_task`") # noqa: E501
# verify the required parameter 'file_id' is set
if self.api_client.client_side_validation and ('file_id' not in local_var_params or # noqa: E501
local_var_params['file_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `file_id` when calling `index_tapes_by_file_by_task`") # noqa: E501
collection_formats = {}
path_params = {}
if 'task_id' in local_var_params:
path_params['task_id'] = local_var_params['task_id'] # noqa: E501
if 'file_id' in local_var_params:
path_params['file_id'] = local_var_params['file_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/tasks/{task_id}/files/{file_id}/tapes', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TapeCollection', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def index_tapes_by_file_by_task_execution(self, **kwargs): # noqa: E501
"""Displays tapes containing specific file, related to the specific task. # noqa: E501
**API Key Scope**: files / tapes # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_tapes_by_file_by_task_execution(task_execution_id=task_execution_id_value, file_id=file_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str task_execution_id: Numeric ID of task execution. (required)
:param int file_id: Numeric ID of file. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: TapeCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.index_tapes_by_file_by_task_execution_with_http_info(**kwargs) # noqa: E501
def index_tapes_by_file_by_task_execution_with_http_info(self, **kwargs): # noqa: E501
"""Displays tapes containing specific file, related to the specific task. # noqa: E501
**API Key Scope**: files / tapes # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_tapes_by_file_by_task_execution_with_http_info(task_execution_id=task_execution_id_value, file_id=file_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str task_execution_id: Numeric ID of task execution. (required)
:param int file_id: Numeric ID of file. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(TapeCollection, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'task_execution_id',
'file_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method index_tapes_by_file_by_task_execution" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'task_execution_id' is set
if self.api_client.client_side_validation and ('task_execution_id' not in local_var_params or # noqa: E501
local_var_params['task_execution_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `task_execution_id` when calling `index_tapes_by_file_by_task_execution`") # noqa: E501
# verify the required parameter 'file_id' is set
if self.api_client.client_side_validation and ('file_id' not in local_var_params or # noqa: E501
local_var_params['file_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `file_id` when calling `index_tapes_by_file_by_task_execution`") # noqa: E501
collection_formats = {}
path_params = {}
if 'task_execution_id' in local_var_params:
path_params['task_execution_id'] = local_var_params['task_execution_id'] # noqa: E501
if 'file_id' in local_var_params:
path_params['file_id'] = local_var_params['file_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/task_executions/{task_execution_id}/files/{file_id}/tapes', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TapeCollection', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def index_tapes_by_file_by_task_execution_by_task(self, **kwargs): # noqa: E501
"""Displays tapes containing specific file, related to the specific task. # noqa: E501
**API Key Scope**: files / tapes # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_tapes_by_file_by_task_execution_by_task(task_id=task_id_value, task_execution_id=task_execution_id_value, file_id=file_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str task_id: Numeric ID or name of task. Task names are not unique, it's recommanded to use numeric ID. (required)
:param str task_execution_id: Numeric ID of task execution. (required)
:param int file_id: Numeric ID of file. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: TapeCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.index_tapes_by_file_by_task_execution_by_task_with_http_info(**kwargs) # noqa: E501
def index_tapes_by_file_by_task_execution_by_task_with_http_info(self, **kwargs): # noqa: E501
"""Displays tapes containing specific file, related to the specific task. # noqa: E501
**API Key Scope**: files / tapes # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.index_tapes_by_file_by_task_execution_by_task_with_http_info(task_id=task_id_value, task_execution_id=task_execution_id_value, file_id=file_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str task_id: Numeric ID or name of task. Task names are not unique, it's recommanded to use numeric ID. (required)
:param str task_execution_id: Numeric ID of task execution. (required)
:param int file_id: Numeric ID of file. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(TapeCollection, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'task_id',
'task_execution_id',
'file_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method index_tapes_by_file_by_task_execution_by_task" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'task_id' is set
if self.api_client.client_side_validation and ('task_id' not in local_var_params or # noqa: E501
local_var_params['task_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `task_id` when calling `index_tapes_by_file_by_task_execution_by_task`") # noqa: E501
# verify the required parameter 'task_execution_id' is set
if self.api_client.client_side_validation and ('task_execution_id' not in local_var_params or # noqa: E501
local_var_params['task_execution_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `task_execution_id` when calling `index_tapes_by_file_by_task_execution_by_task`") # noqa: E501
# verify the required parameter 'file_id' is set
if self.api_client.client_side_validation and ('file_id' not in local_var_params or # noqa: E501
local_var_params['file_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `file_id` when calling `index_tapes_by_file_by_task_execution_by_task`") # noqa: E501
collection_formats = {}
path_params = {}
if 'task_id' in local_var_params:
path_params['task_id'] = local_var_params['task_id'] # noqa: E501
if 'task_execution_id' in local_var_params:
path_params['task_execution_id'] = local_var_params['task_execution_id'] # noqa: E501
if 'file_id' in local_var_params:
path_params['file_id'] = local_var_params['file_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/tasks/{task_id}/task_executions/{task_execution_id}/files/{file_id}/tapes', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TapeCollection', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def on_tapes_files_children_by_pool(self, **kwargs): # noqa: E501
"""Lists files under a specific folder on tape of pools, specific for Active and Offline. # noqa: E501
**API Key Scope**: on_tapes_files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.on_tapes_files_children_by_pool(pool_id=pool_id_value, file_parent_id=file_parent_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str pool_id: Numeric ID, or name of pool. (required)
:param int file_parent_id: Numeric ID of parent folder. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str name: Filter on name
:param str type: Filter on type
:param str size: Filter on size
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: OnTapesFileCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.on_tapes_files_children_by_pool_with_http_info(**kwargs) # noqa: E501
def on_tapes_files_children_by_pool_with_http_info(self, **kwargs): # noqa: E501
"""Lists files under a specific folder on tape of pools, specific for Active and Offline. # noqa: E501
**API Key Scope**: on_tapes_files / index # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.on_tapes_files_children_by_pool_with_http_info(pool_id=pool_id_value, file_parent_id=file_parent_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str pool_id: Numeric ID, or name of pool. (required)
:param int file_parent_id: Numeric ID of parent folder. (required)
:param int limit: The number of items to display for pagination.
:param int offset: The number of items to skip for pagination.
:param str name: Filter on name
:param str type: Filter on type
:param str size: Filter on size
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(OnTapesFileCollection, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'pool_id',
'file_parent_id',
'limit',
'offset',
'name',
'type',
'size'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method on_tapes_files_children_by_pool" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'pool_id' is set
if self.api_client.client_side_validation and ('pool_id' not in local_var_params or # noqa: E501
local_var_params['pool_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `pool_id` when calling `on_tapes_files_children_by_pool`") # noqa: E501
# verify the required parameter 'file_parent_id' is set
if self.api_client.client_side_validation and ('file_parent_id' not in local_var_params or # noqa: E501
local_var_params['file_parent_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `file_parent_id` when calling `on_tapes_files_children_by_pool`") # noqa: E501
collection_formats = {}
path_params = {}
if 'pool_id' in local_var_params:
path_params['pool_id'] = local_var_params['pool_id'] # noqa: E501
if 'file_parent_id' in local_var_params:
path_params['file_parent_id'] = local_var_params['file_parent_id'] # noqa: E501
query_params = []
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'name' in local_var_params and local_var_params['name'] is not None: # noqa: E501
query_params.append(('name', local_var_params['name'])) # noqa: E501
if 'type' in local_var_params and local_var_params['type'] is not None: # noqa: E501
query_params.append(('type', local_var_params['type'])) # noqa: E501
if 'size' in local_var_params and local_var_params['size'] is not None: # noqa: E501
query_params.append(('size', local_var_params['size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/pools/{pool_id}/on_tapes_files/{file_parent_id}/children', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OnTapesFileCollection', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def show_file(self, **kwargs): # noqa: E501
"""Displays a specific file. # noqa: E501
**API Key Scope**: files / show # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.show_file(file_id=file_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int file_id: Numeric ID of file. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: NodeumFileWithPath
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.show_file_with_http_info(**kwargs) # noqa: E501
def show_file_with_http_info(self, **kwargs): # noqa: E501
"""Displays a specific file. # noqa: E501
**API Key Scope**: files / show # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.show_file_with_http_info(file_id=file_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int file_id: Numeric ID of file. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(NodeumFileWithPath, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'file_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method show_file" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'file_id' is set
if self.api_client.client_side_validation and ('file_id' not in local_var_params or # noqa: E501
local_var_params['file_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `file_id` when calling `show_file`") # noqa: E501
collection_formats = {}
path_params = {}
if 'file_id' in local_var_params:
path_params['file_id'] = local_var_params['file_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/files/{file_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NodeumFileWithPath', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def show_file_by_container(self, **kwargs): # noqa: E501
"""Displays a specific file. # noqa: E501
**API Key Scope**: files / show # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.show_file_by_container(container_id=container_id_value, file_id=file_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str container_id: Numeric ID or name of container. (required)
:param int file_id: Numeric ID of file. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: NodeumFileWithPath
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.show_file_by_container_with_http_info(**kwargs) # noqa: E501
def show_file_by_container_with_http_info(self, **kwargs): # noqa: E501
"""Displays a specific file. # noqa: E501
**API Key Scope**: files / show # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.show_file_by_container_with_http_info(container_id=container_id_value, file_id=file_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str container_id: Numeric ID or name of container. (required)
:param int file_id: Numeric ID of file. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(NodeumFileWithPath, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'container_id',
'file_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method show_file_by_container" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'container_id' is set
if self.api_client.client_side_validation and ('container_id' not in local_var_params or # noqa: E501
local_var_params['container_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `container_id` when calling `show_file_by_container`") # noqa: E501
# verify the required parameter 'file_id' is set
if self.api_client.client_side_validation and ('file_id' not in local_var_params or # noqa: E501
local_var_params['file_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `file_id` when calling `show_file_by_container`") # noqa: E501
collection_formats = {}
path_params = {}
if 'container_id' in local_var_params:
path_params['container_id'] = local_var_params['container_id'] # noqa: E501
if 'file_id' in local_var_params:
path_params['file_id'] = local_var_params['file_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/containers/{container_id}/files/{file_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NodeumFileWithPath', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def show_file_by_pool(self, **kwargs): # noqa: E501
"""Displays a specific file. # noqa: E501
**API Key Scope**: files / show # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.show_file_by_pool(pool_id=pool_id_value, file_id=file_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str pool_id: Numeric ID, or name of pool. (required)
:param int file_id: Numeric ID of file. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: NodeumFileWithPath
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.show_file_by_pool_with_http_info(**kwargs) # noqa: E501
def show_file_by_pool_with_http_info(self, **kwargs): # noqa: E501
"""Displays a specific file. # noqa: E501
**API Key Scope**: files / show # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.show_file_by_pool_with_http_info(pool_id=pool_id_value, file_id=file_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str pool_id: Numeric ID, or name of pool. (required)
:param int file_id: Numeric ID of file. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(NodeumFileWithPath, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'pool_id',
'file_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method show_file_by_pool" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'pool_id' is set
if self.api_client.client_side_validation and ('pool_id' not in local_var_params or # noqa: E501
local_var_params['pool_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `pool_id` when calling `show_file_by_pool`") # noqa: E501
# verify the required parameter 'file_id' is set
if self.api_client.client_side_validation and ('file_id' not in local_var_params or # noqa: E501
local_var_params['file_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `file_id` when calling `show_file_by_pool`") # noqa: E501
collection_formats = {}
path_params = {}
if 'pool_id' in local_var_params:
path_params['pool_id'] = local_var_params['pool_id'] # noqa: E501
if 'file_id' in local_var_params:
path_params['file_id'] = local_var_params['file_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/pools/{pool_id}/files/{file_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NodeumFileWithPath', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def show_file_by_task(self, **kwargs): # noqa: E501
"""Displays a specific file. # noqa: E501
**API Key Scope**: files / show # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.show_file_by_task(task_id=task_id_value, file_id=file_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str task_id: Numeric ID or name of task. Task names are not unique, it's recommanded to use numeric ID. (required)
:param int file_id: Numeric ID of file. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: NodeumFileWithPath
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.show_file_by_task_with_http_info(**kwargs) # noqa: E501
def show_file_by_task_with_http_info(self, **kwargs): # noqa: E501
"""Displays a specific file. # noqa: E501
**API Key Scope**: files / show # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.show_file_by_task_with_http_info(task_id=task_id_value, file_id=file_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str task_id: Numeric ID or name of task. Task names are not unique, it's recommanded to use numeric ID. (required)
:param int file_id: Numeric ID of file. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(NodeumFileWithPath, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'task_id',
'file_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method show_file_by_task" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'task_id' is set
if self.api_client.client_side_validation and ('task_id' not in local_var_params or # noqa: E501
local_var_params['task_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `task_id` when calling `show_file_by_task`") # noqa: E501
# verify the required parameter 'file_id' is set
if self.api_client.client_side_validation and ('file_id' not in local_var_params or # noqa: E501
local_var_params['file_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `file_id` when calling `show_file_by_task`") # noqa: E501
collection_formats = {}
path_params = {}
if 'task_id' in local_var_params:
path_params['task_id'] = local_var_params['task_id'] # noqa: E501
if 'file_id' in local_var_params:
path_params['file_id'] = local_var_params['file_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/tasks/{task_id}/files/{file_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NodeumFileWithPath', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def show_file_by_task_execution(self, **kwargs): # noqa: E501
"""Displays a specific file. # noqa: E501
**API Key Scope**: files / show # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.show_file_by_task_execution(task_execution_id=task_execution_id_value, file_id=file_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str task_execution_id: Numeric ID of task execution. (required)
:param int file_id: Numeric ID of file. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: NodeumFileWithPath
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.show_file_by_task_execution_with_http_info(**kwargs) # noqa: E501
def show_file_by_task_execution_with_http_info(self, **kwargs): # noqa: E501
"""Displays a specific file. # noqa: E501
**API Key Scope**: files / show # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.show_file_by_task_execution_with_http_info(task_execution_id=task_execution_id_value, file_id=file_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str task_execution_id: Numeric ID of task execution. (required)
:param int file_id: Numeric ID of file. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(NodeumFileWithPath, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'task_execution_id',
'file_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method show_file_by_task_execution" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'task_execution_id' is set
if self.api_client.client_side_validation and ('task_execution_id' not in local_var_params or # noqa: E501
local_var_params['task_execution_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `task_execution_id` when calling `show_file_by_task_execution`") # noqa: E501
# verify the required parameter 'file_id' is set
if self.api_client.client_side_validation and ('file_id' not in local_var_params or # noqa: E501
local_var_params['file_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `file_id` when calling `show_file_by_task_execution`") # noqa: E501
collection_formats = {}
path_params = {}
if 'task_execution_id' in local_var_params:
path_params['task_execution_id'] = local_var_params['task_execution_id'] # noqa: E501
if 'file_id' in local_var_params:
path_params['file_id'] = local_var_params['file_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/task_executions/{task_execution_id}/files/{file_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NodeumFileWithPath', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def show_file_by_task_execution_by_task(self, **kwargs): # noqa: E501
"""Displays a specific file. # noqa: E501
**API Key Scope**: files / show # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.show_file_by_task_execution_by_task(task_id=task_id_value, task_execution_id=task_execution_id_value, file_id=file_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str task_id: Numeric ID or name of task. Task names are not unique, it's recommanded to use numeric ID. (required)
:param str task_execution_id: Numeric ID of task execution. (required)
:param int file_id: Numeric ID of file. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: NodeumFileWithPath
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.show_file_by_task_execution_by_task_with_http_info(**kwargs) # noqa: E501
def show_file_by_task_execution_by_task_with_http_info(self, **kwargs): # noqa: E501
"""Displays a specific file. # noqa: E501
**API Key Scope**: files / show # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.show_file_by_task_execution_by_task_with_http_info(task_id=task_id_value, task_execution_id=task_execution_id_value, file_id=file_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str task_id: Numeric ID or name of task. Task names are not unique, it's recommanded to use numeric ID. (required)
:param str task_execution_id: Numeric ID of task execution. (required)
:param int file_id: Numeric ID of file. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(NodeumFileWithPath, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'task_id',
'task_execution_id',
'file_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method show_file_by_task_execution_by_task" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'task_id' is set
if self.api_client.client_side_validation and ('task_id' not in local_var_params or # noqa: E501
local_var_params['task_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `task_id` when calling `show_file_by_task_execution_by_task`") # noqa: E501
# verify the required parameter 'task_execution_id' is set
if self.api_client.client_side_validation and ('task_execution_id' not in local_var_params or # noqa: E501
local_var_params['task_execution_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `task_execution_id` when calling `show_file_by_task_execution_by_task`") # noqa: E501
# verify the required parameter 'file_id' is set
if self.api_client.client_side_validation and ('file_id' not in local_var_params or # noqa: E501
local_var_params['file_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `file_id` when calling `show_file_by_task_execution_by_task`") # noqa: E501
collection_formats = {}
path_params = {}
if 'task_id' in local_var_params:
path_params['task_id'] = local_var_params['task_id'] # noqa: E501
if 'task_execution_id' in local_var_params:
path_params['task_execution_id'] = local_var_params['task_execution_id'] # noqa: E501
if 'file_id' in local_var_params:
path_params['file_id'] = local_var_params['file_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/tasks/{task_id}/task_executions/{task_execution_id}/files/{file_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NodeumFileWithPath', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def show_import_file_by_pool(self, **kwargs): # noqa: E501
"""Displays a specific file on tape of pools, specific for Data Exchange. # noqa: E501
**API Key Scope**: import_files / show # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.show_import_file_by_pool(pool_id=pool_id_value, file_id=file_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str pool_id: Numeric ID, or name of pool. (required)
:param int file_id: Numeric ID of file. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ImportFileWithPath
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.show_import_file_by_pool_with_http_info(**kwargs) # noqa: E501
def show_import_file_by_pool_with_http_info(self, **kwargs): # noqa: E501
"""Displays a specific file on tape of pools, specific for Data Exchange. # noqa: E501
**API Key Scope**: import_files / show # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.show_import_file_by_pool_with_http_info(pool_id=pool_id_value, file_id=file_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str pool_id: Numeric ID, or name of pool. (required)
:param int file_id: Numeric ID of file. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ImportFileWithPath, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'pool_id',
'file_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method show_import_file_by_pool" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'pool_id' is set
if self.api_client.client_side_validation and ('pool_id' not in local_var_params or # noqa: E501
local_var_params['pool_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `pool_id` when calling `show_import_file_by_pool`") # noqa: E501
# verify the required parameter 'file_id' is set
if self.api_client.client_side_validation and ('file_id' not in local_var_params or # noqa: E501
local_var_params['file_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `file_id` when calling `show_import_file_by_pool`") # noqa: E501
collection_formats = {}
path_params = {}
if 'pool_id' in local_var_params:
path_params['pool_id'] = local_var_params['pool_id'] # noqa: E501
if 'file_id' in local_var_params:
path_params['file_id'] = local_var_params['file_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/pools/{pool_id}/import_files/{file_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ImportFileWithPath', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def show_on_tape_file_by_pool(self, **kwargs): # noqa: E501
"""Displays a specific file on tape of pools, specific for Active and Offline. # noqa: E501
**API Key Scope**: on_tapes_files / show # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.show_on_tape_file_by_pool(pool_id=pool_id_value, file_id=file_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str pool_id: Numeric ID, or name of pool. (required)
:param int file_id: Numeric ID of file. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: OnTapesFile
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.show_on_tape_file_by_pool_with_http_info(**kwargs) # noqa: E501
def show_on_tape_file_by_pool_with_http_info(self, **kwargs): # noqa: E501
"""Displays a specific file on tape of pools, specific for Active and Offline. # noqa: E501
**API Key Scope**: on_tapes_files / show # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.show_on_tape_file_by_pool_with_http_info(pool_id=pool_id_value, file_id=file_id_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str pool_id: Numeric ID, or name of pool. (required)
:param int file_id: Numeric ID of file. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(OnTapesFile, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'pool_id',
'file_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method show_on_tape_file_by_pool" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'pool_id' is set
if self.api_client.client_side_validation and ('pool_id' not in local_var_params or # noqa: E501
local_var_params['pool_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `pool_id` when calling `show_on_tape_file_by_pool`") # noqa: E501
# verify the required parameter 'file_id' is set
if self.api_client.client_side_validation and ('file_id' not in local_var_params or # noqa: E501
local_var_params['file_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `file_id` when calling `show_on_tape_file_by_pool`") # noqa: E501
collection_formats = {}
path_params = {}
if 'pool_id' in local_var_params:
path_params['pool_id'] = local_var_params['pool_id'] # noqa: E501
if 'file_id' in local_var_params:
path_params['file_id'] = local_var_params['file_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerAuth'] # noqa: E501
return self.api_client.call_api(
'/pools/{pool_id}/on_tapes_files/{file_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OnTapesFile', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 51.829002 | 2,886 | 0.612755 | 27,288 | 223,383 | 4.752125 | 0.014292 | 0.062309 | 0.102995 | 0.034054 | 0.979665 | 0.978863 | 0.978369 | 0.978215 | 0.975932 | 0.974105 | 0 | 0.017172 | 0.308649 | 223,383 | 4,309 | 2,887 | 51.84103 | 0.822503 | 0.435964 | 0 | 0.842563 | 0 | 0 | 0.203287 | 0.044887 | 0 | 0 | 0 | 0 | 0 | 1 | 0.026087 | false | 0 | 0.012815 | 0 | 0.064989 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
45f714a2558cd6d20883773851cc5a3a9a7cbd7f | 6,235 | py | Python | src/ps_collector/SSLNodeInfo.py | djw8605/ps-collector | 34d5fe8bd3db231157140f3fbfee7dd46f35be2a | [
"Apache-2.0"
] | null | null | null | src/ps_collector/SSLNodeInfo.py | djw8605/ps-collector | 34d5fe8bd3db231157140f3fbfee7dd46f35be2a | [
"Apache-2.0"
] | null | null | null | src/ps_collector/SSLNodeInfo.py | djw8605/ps-collector | 34d5fe8bd3db231157140f3fbfee7dd46f35be2a | [
"Apache-2.0"
] | null | null | null | import json
import os
import copy
from esmond_client.perfsonar.query import EventType
from esmond_client.perfsonar.query import Metadata
from esmond_client.perfsonar.query import Summary
from esmond_client.perfsonar.query import QueryLimitException
import requests
requests.packages.urllib3.disable_warnings()
class EventTypeSSL(EventType):
def __init__(self, EventTypeParent, cert=None, key=None):
self.cert = cert
self.key = key
super(EventTypeSSL, self).__init__(EventTypeParent._data, EventTypeParent.api_url, EventTypeParent.filters)
def _query_with_limit(self, verify=False):
"""Internal method used by the get_data() methods in the EventType
and Summary sub-classes. Make a series of limited queries in a loop
and return the compiled results.
Meant to optimize pulls of large amounts of data."""
if self.filters.verbose: print ' * looping query for: {0}'.format(self.query_uri)
# XXX(mmg) - revisit this value?
LIMIT = 1000
q_params = copy.copy(self.filters.time_filters)
q_params['limit'] = LIMIT
data_payload = []
while 1:
if self.cert and self.key:
self.api_url = self.api_url.replace("http://", "https://", 1)
if self.filters.verbose: print 'Changed api url for: {0}'.format(self.api_url)
r = requests.get('{0}{1}'.format(self.api_url, self.query_uri),
params=q_params,
headers=self.request_headers,
verify=verify, cert=(self.cert,self.key))
else:
r = requests.get('{0}{1}'.format(self.api_url, self.query_uri),
params=q_params ,
headers=self.request_headers)
self.inspect_request(r)
if r.status_code == 200 and \
r.headers['content-type'] == 'application/json':
data = json.loads(r.text)
data_payload += data
if self.filters.verbose: print ' ** got {0} results'.format(len(data))
if len(data) < LIMIT:
# got less than requested - done
break
else:
# reset start time to last ts + 1 and loop
q_params['time-start'] = data[-1].get('ts') + 1
# sanity check - this should not happen other than the unlikely
# scenario where the final request results is exactly == LIMIT
if q_params['time-start'] >= q_params['time-end']:
self.warn('time start >= time end - exiting query loop')
break
else:
print 'Problems with the the connection to'
self.http_alert(r)
raise QueryLimitException
if self.filters.verbose: print ' *** finished with {0} results'.format(len(data_payload))
return data_payload
class SummarySSL(Summary):
def __init__(self, SummaryParent, cert=None, key=None):
self.cert = cert
self.key = key
super(SummarySSL, self).__init__(SummaryParent._data, SummaryParent.api_url, SummaryParent.filters, SummaryParent._data_type)
def _query_with_limit(self, verify=False):
"""Internal method used by the get_data() methods in the EventType
and Summary sub-classes. Make a series of limited queries in a loop
and return the compiled results.
Meant to optimize pulls of large amounts of data."""
if self.filters.verbose: print ' * looping query for: {0}'.format(self.query_uri)
# XXX(mmg) - revisit this value?
LIMIT = 1000
q_params = copy.copy(self.filters.time_filters)
q_params['limit'] = LIMIT
data_payload = []
while 1:
if self.cert and self.key:
self.api_url = self.api_url.replace("http://", "https://", 1)
if self.filters.verbose: print 'Changed api url for: {0}'.format(self.api_url)
r = requests.get('{0}{1}'.format(self.api_url, self.query_uri),
params=q_params,
headers=self.request_headers,
verify=verify, cert=(self.cert,self.key))
else:
r = requests.get('{0}{1}'.format(self.api_url, self.query_uri),
params=q_params ,
headers=self.request_headers)
self.inspect_request(r)
if r.status_code == 200 and \
r.headers['content-type'] == 'application/json':
data = json.loads(r.text)
data_payload += data
if self.filters.verbose: print ' ** got {0} results'.format(len(data))
if len(data) < LIMIT:
# got less than requested - done
break
else:
# reset start time to last ts + 1 and loop
q_params['time-start'] = data[-1].get('ts') + 1
# sanity check - this should not happen other than the unlikely
# scenario where the final request results is exactly == LIMIT
if q_params['time-start'] >= q_params['time-end']:
self.warn('time start >= time end - exiting query loop')
break
else:
self.http_alert(r)
raise QueryLimitException
if self.filters.verbose: print ' *** finished with {0} results'.format(len(data_payload))
return data_payload
| 46.185185 | 135 | 0.515156 | 671 | 6,235 | 4.660209 | 0.198212 | 0.026863 | 0.03198 | 0.051167 | 0.861529 | 0.861529 | 0.815478 | 0.815478 | 0.815478 | 0.815478 | 0 | 0.010872 | 0.395188 | 6,235 | 134 | 136 | 46.529851 | 0.818351 | 0.121893 | 0 | 0.826087 | 0 | 0 | 0.104593 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.086957 | null | null | 0.097826 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
341de993e980ccb52bdb1e9ef8cfff6add70933f | 180 | py | Python | recruiting/None.py | andrewt-cville/recruiting | 7bcc8994fe02a5538f20af6cd9846c9b7ed6f62c | [
"Apache-2.0"
] | null | null | null | recruiting/None.py | andrewt-cville/recruiting | 7bcc8994fe02a5538f20af6cd9846c9b7ed6f62c | [
"Apache-2.0"
] | 14 | 2021-02-21T00:34:48.000Z | 2021-03-05T21:16:40.000Z | recruiting/None.py | andrewt-cville/recruiting | 7bcc8994fe02a5538f20af6cd9846c9b7ed6f62c | [
"Apache-2.0"
] | 1 | 2021-03-05T00:51:14.000Z | 2021-03-05T00:51:14.000Z |
# Cell
from bs4 import BeautifulSoup
import requests
import lxml
import time
import json
# Cell
from bs4 import BeautifulSoup
import requests
import lxml
import time
import json | 12 | 29 | 0.822222 | 26 | 180 | 5.692308 | 0.346154 | 0.108108 | 0.148649 | 0.22973 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0.013333 | 0.166667 | 180 | 15 | 30 | 12 | 0.973333 | 0.05 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 12 |
3436dc13c12e158e32da9c3fa862676f71120faf | 16,506 | py | Python | tests/elf/test_core.py | rafael-santiago/LIEF | f230094d5877dd63d40915dc944c53c2a4be5ed9 | [
"Apache-2.0"
] | 2,999 | 2017-04-03T13:43:23.000Z | 2022-03-31T15:24:27.000Z | tests/elf/test_core.py | rafael-santiago/LIEF | f230094d5877dd63d40915dc944c53c2a4be5ed9 | [
"Apache-2.0"
] | 626 | 2017-04-04T15:57:04.000Z | 2022-03-31T19:25:18.000Z | tests/elf/test_core.py | rafael-santiago/LIEF | f230094d5877dd63d40915dc944c53c2a4be5ed9 | [
"Apache-2.0"
] | 498 | 2017-04-04T14:18:25.000Z | 2022-03-29T19:31:38.000Z | #!/usr/bin/env python
import itertools
import logging
import os
import random
import stat
import subprocess
import sys
import tempfile
import unittest
from unittest import TestCase
import lief
from utils import get_sample
lief.logging.set_level(lief.logging.LOGGING_LEVEL.INFO)
class TestCore(TestCase):
LOGGER = logging.getLogger(__name__)
def setUp(self):
self.logger = logging.getLogger(__name__)
def test_core_arm(self):
core = lief.parse(get_sample('ELF/ELF32_ARM_core_hello.core'))
notes = core.notes
self.assertEqual(len(notes), 6)
# Check NT_PRPSINFO
# =================
prpsinfo = notes[0]
self.assertTrue(prpsinfo.is_core)
self.assertEqual(prpsinfo.type_core, lief.ELF.NOTE_TYPES_CORE.PRPSINFO)
# Check details
details = prpsinfo.details
self.assertIsInstance(details, lief.ELF.CorePrPsInfo)
self.assertEqual(details.file_name, "hello-exe")
self.assertEqual(details.uid, 2000)
self.assertEqual(details.gid, 2000)
self.assertEqual(details.pid, 8166)
self.assertEqual(details.ppid, 8163)
self.assertEqual(details.pgrp, 8166)
self.assertEqual(details.sid, 7997)
# Check NT_PRSTATUS
# =================
prstatus = notes[1]
self.assertTrue(prstatus.is_core)
self.assertEqual(prstatus.type_core, lief.ELF.NOTE_TYPES_CORE.PRSTATUS)
# Check details
details = prstatus.details
self.assertEqual(details.current_sig, 7)
self.assertEqual(details.sigpend, 0)
self.assertEqual(details.sighold, 0)
self.assertEqual(details.pid, 8166)
self.assertEqual(details.ppid, 0)
self.assertEqual(details.pgrp, 0)
self.assertEqual(details.sid, 0)
self.assertEqual(details.utime.sec, 0)
self.assertEqual(details.utime.usec, 0)
self.assertEqual(details.stime.sec, 0)
self.assertEqual(details.stime.usec, 0)
self.assertEqual(details.cutime.sec, 0)
self.assertEqual(details.cutime.usec, 0)
self.assertEqual(details.cstime.sec, 0)
self.assertEqual(details.cstime.usec, 0)
reg_ctx = details.register_context
self.assertEqual(len(reg_ctx), 17)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.ARM_R0], 0xaad75074)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.ARM_R1], 0)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.ARM_R2], 0xb)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.ARM_R3], 0)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.ARM_R4], 0)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.ARM_R5], 0)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.ARM_R6], 0)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.ARM_R7], 0)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.ARM_R8], 0)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.ARM_R9], 0)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.ARM_R10], 0)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.ARM_R11], 0)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.ARM_R12], 0xA)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.ARM_R13], 1)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.ARM_R14], 0xf7728841)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.ARM_R15], 0xaad7507c)
self.assertEqual(details.get(lief.ELF.CorePrStatus.REGISTERS.ARM_CPSR), 0x60010010)
arm_vfp = notes[2]
# Check NT_NOTE
# =================
siginfo = notes[3]
self.assertTrue(siginfo.is_core)
self.assertEqual(siginfo.type_core, lief.ELF.NOTE_TYPES_CORE.SIGINFO)
# Check details
details = siginfo.details
self.assertEqual(details.signo, 7)
self.assertEqual(details.sigcode, 0)
self.assertEqual(details.sigerrno, 1)
# Check NT_AUXV
# =================
auxv = notes[4]
self.assertTrue(auxv.is_core)
self.assertEqual(auxv.type_core, lief.ELF.NOTE_TYPES_CORE.AUXV)
# Check details
details = auxv.details
self.assertEqual(len(details.values), 18)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.PHDR], 0xaad74034)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.PHENT], 0x20)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.PHNUM], 0x9)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.PAGESZ], 4096)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.BASE], 0xf7716000)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.FLAGS], 0)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.ENTRY], 0xaad75074)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.UID], 2000)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.EUID], 2000)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.GID], 2000)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.EGID], 2000)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.PLATFORM], 0xfffefb5c)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.HWCAP], 0x27b0d6)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.CKLTCK], 0x64)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.SECURE], 0)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.RANDOM], 0xfffefb4c)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.HWCAP2], 0x1f)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.EXECFN], 0xfffeffec)
# Check NT_FILE
# =================
note = notes[5]
self.assertTrue(note.is_core)
self.assertEqual(note.type_core, lief.ELF.NOTE_TYPES_CORE.FILE)
# Check details
details = note.details
files = details.files
self.assertEqual(len(files), len(details))
self.assertEqual(21, len(details))
self.assertEqual(files[0].start, 0xaad74000)
self.assertEqual(files[0].end, 0xaad78000)
self.assertEqual(files[0].file_ofs, 0)
self.assertEqual(files[0].path, "/data/local/tmp/hello-exe")
last = files.pop()
self.assertEqual(last.start, 0xf77a1000)
self.assertEqual(last.end, 0xf77a2000)
self.assertEqual(last.file_ofs, 0x8a000)
self.assertEqual(last.path, "/system/bin/linker")
self.assertTrue(all(len(c.path) > 0 for c in details))
def test_core_arm64(self):
core = lief.parse(get_sample('ELF/ELF64_AArch64_core_hello.core'))
notes = core.notes
self.assertEqual(len(notes), 6)
# Check NT_PRPSINFO
# =================
prpsinfo = notes[0]
self.assertTrue(prpsinfo.is_core)
self.assertEqual(prpsinfo.type_core, lief.ELF.NOTE_TYPES_CORE.PRPSINFO)
# Check details
details = prpsinfo.details
self.assertIsInstance(details, lief.ELF.CorePrPsInfo)
self.assertEqual(details.file_name, "hello-exe")
self.assertEqual(details.uid, 2000)
self.assertEqual(details.gid, 2000)
self.assertEqual(details.pid, 8104)
self.assertEqual(details.ppid, 8101)
self.assertEqual(details.pgrp, 8104)
self.assertEqual(details.sid, 7997)
# Check NT_PRSTATUS
# =================
prstatus = notes[1]
self.assertTrue(prstatus.is_core)
self.assertEqual(prstatus.type_core, lief.ELF.NOTE_TYPES_CORE.PRSTATUS)
# Check details
details = prstatus.details
self.assertEqual(details.current_sig, 5)
self.assertEqual(details.sigpend, 0)
self.assertEqual(details.sighold, 0)
self.assertEqual(details.pid, 8104)
self.assertEqual(details.ppid, 0)
self.assertEqual(details.pgrp, 0)
self.assertEqual(details.sid, 0)
self.assertEqual(details.utime.sec, 0)
self.assertEqual(details.utime.usec, 0)
self.assertEqual(details.stime.sec, 0)
self.assertEqual(details.stime.usec, 0)
self.assertEqual(details.cutime.sec, 0)
self.assertEqual(details.cutime.usec, 0)
self.assertEqual(details.cstime.sec, 0)
self.assertEqual(details.cstime.usec, 0)
reg_ctx = details.register_context
self.assertEqual(len(reg_ctx), 34)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X0], 0x5580b86f50)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X1], 0)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X2], 0x1)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X3], 0x7fb7e2e160)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X4], 0x7fb7e83030)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X5], 0x4)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X6], 0x6f6c2f617461642f)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X7], 0x2f706d742f6c6163)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X8], 0)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X9], 0xa)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X10], 0)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X11], 0xA)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X12], 0x0)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X13], 0)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X14], 0x878ca62ae01a9a5)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X15], 0x7fb7e7a000)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X16], 0x7fb7c132c8)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X17], 0x7fb7bb0adc)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X18], 0x7fb7c1e000)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X19], 0)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X20], 0)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X21], 0)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X22], 0)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X23], 0)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X24], 0)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X25], 0)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X26], 0)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X27], 0)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X28], 0)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X29], 0)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X30], 0x7fb7eb6068)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_X31], 0x7ffffff950)
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.AARCH64_PC], 0x5580b86f50)
arm_vfp = notes[2]
# Check NT_NOTE
# =================
siginfo = notes[3]
self.assertTrue(siginfo.is_core)
self.assertEqual(siginfo.type_core, lief.ELF.NOTE_TYPES_CORE.SIGINFO)
# Check details
details = siginfo.details
self.assertEqual(details.signo, 5)
self.assertEqual(details.sigcode, 0)
self.assertEqual(details.sigerrno, 1)
# Check NT_AUXV
# =================
auxv = notes[4]
self.assertTrue(auxv.is_core)
self.assertEqual(auxv.type_core, lief.ELF.NOTE_TYPES_CORE.AUXV)
# Check details
details = auxv.details
self.assertEqual(len(details.values), 18)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.PHDR], 0x5580b86040)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.PHENT], 0x38)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.PHNUM], 0x9)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.PAGESZ], 4096)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.BASE], 0x7fb7e93000)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.FLAGS], 0)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.ENTRY], 0x5580b86f50)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.UID], 2000)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.EUID], 2000)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.GID], 2000)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.EGID], 2000)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.PLATFORM], 0x7ffffffb58)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.HWCAP], 0xff)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.CKLTCK], 0x64)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.SECURE], 0)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.RANDOM], 0x7ffffffb48)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.EXECFN], 0x7fffffffec)
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.SYSINFO_EHDR], 0x7fb7e91000)
# Check NT_FILE
# =================
note = notes[5]
self.assertTrue(note.is_core)
self.assertEqual(note.type_core, lief.ELF.NOTE_TYPES_CORE.FILE)
# Check details
details = note.details
files = details.files
self.assertEqual(len(files), len(details))
self.assertEqual(22, len(details))
self.assertEqual(files[0].start, 0x5580b86000)
self.assertEqual(files[0].end, 0x5580b88000)
self.assertEqual(files[0].file_ofs, 0)
self.assertEqual(files[0].path, "/data/local/tmp/hello-exe")
last = files.pop()
self.assertEqual(last.start, 0x7fb7f8c000)
self.assertEqual(last.end, 0x7fb7f8d000)
self.assertEqual(last.file_ofs, 0xf8000)
self.assertEqual(last.path, "/system/bin/linker64")
def test_core_write(self):
core = lief.parse(get_sample('ELF/ELF64_x86-64_core_hello.core'))
note = core.notes[1]
self.assertEqual(note.type_core, lief.ELF.NOTE_TYPES_CORE.PRSTATUS)
details = note.details
details[lief.ELF.CorePrStatus.REGISTERS.X86_64_RIP] = 0xBADC0DE
note = core.notes[5]
self.assertEqual(note.type_core, lief.ELF.NOTE_TYPES_CORE.AUXV)
details = note.details
details[lief.ELF.CoreAuxv.TYPES.ENTRY] = 0xBADC0DE
note = core.notes[4]
self.assertEqual(note.type_core, lief.ELF.NOTE_TYPES_CORE.SIGINFO)
orig_siginfo_len = len(note.description)
details = note.details
details.sigerrno = 0xCC
# Cannot re-open a file on Windows, so handle it by hand
with tempfile.NamedTemporaryFile(prefix="", suffix=".core", delete=False) as f:
tmpfilename = f.name
core.write(tmpfilename)
try:
with open(tmpfilename, 'rb') as f:
core_new = lief.parse(f.name)
self.assertIsNotNone(core_new)
note = core_new.notes[1]
self.assertEqual(note.type_core, lief.ELF.NOTE_TYPES_CORE.PRSTATUS)
details = note.details
self.assertEqual(details[lief.ELF.CorePrStatus.REGISTERS.X86_64_RIP], 0xBADC0DE)
note = core_new.notes[5]
self.assertEqual(note.type_core, lief.ELF.NOTE_TYPES_CORE.AUXV)
details = note.details
self.assertEqual(details[lief.ELF.CoreAuxv.TYPES.ENTRY], 0xBADC0DE)
note = core_new.notes[4]
self.assertEqual(note.type_core, lief.ELF.NOTE_TYPES_CORE.SIGINFO)
self.assertEqual(len(note.description), orig_siginfo_len)
details = note.details
self.assertEqual(details.sigerrno, 0xCC)
finally:
try:
os.remove(tmpfilename)
except OSError:
pass
if __name__ == '__main__':
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
root_logger.addHandler(ch)
unittest.main(verbosity=2)
| 41.265 | 98 | 0.675815 | 1,942 | 16,506 | 5.647271 | 0.147786 | 0.248929 | 0.278837 | 0.206255 | 0.844807 | 0.827027 | 0.813532 | 0.796663 | 0.649403 | 0.494757 | 0 | 0.052336 | 0.202411 | 16,506 | 399 | 99 | 41.368421 | 0.780706 | 0.033442 | 0 | 0.480565 | 0 | 0 | 0.013503 | 0.009044 | 0 | 0 | 0.035297 | 0 | 0.69258 | 1 | 0.014134 | false | 0.003534 | 0.042403 | 0 | 0.063604 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
347263c9aa033674b11c778f52ce38abe6c60718 | 387 | py | Python | oxe-api/utils/re.py | CybersecurityLuxembourg/openxeco | 8d4e5578bde6a07f5d6d569b16b4de224abf7bf0 | [
"BSD-2-Clause"
] | null | null | null | oxe-api/utils/re.py | CybersecurityLuxembourg/openxeco | 8d4e5578bde6a07f5d6d569b16b4de224abf7bf0 | [
"BSD-2-Clause"
] | null | null | null | oxe-api/utils/re.py | CybersecurityLuxembourg/openxeco | 8d4e5578bde6a07f5d6d569b16b4de224abf7bf0 | [
"BSD-2-Clause"
] | null | null | null | import re
def has_password_format(value):
return re.fullmatch(r'(?=.*[A-Z])(?=.*[a-z])(?=.*[0-9])(?=.*[@#$%^&+=!])[A-Za-z0-9@#$%^&+=!]{8,30}', value)
def has_mail_format(value):
return re.fullmatch(r'^[A-Za-z0-9.+_-]+@[A-Za-z0-9._-]+\.[a-zA-Z]{2,}$', value)
def has_date_format(value):
return re.fullmatch(r'^([12]\d{3}-(0[1-9]|1[0-2])-(0[1-9]|[12]\d|3[01]))$', value)
| 27.642857 | 111 | 0.524548 | 72 | 387 | 2.708333 | 0.361111 | 0.061538 | 0.261538 | 0.292308 | 0.564103 | 0.528205 | 0.307692 | 0 | 0 | 0 | 0 | 0.082386 | 0.090439 | 387 | 13 | 112 | 29.769231 | 0.471591 | 0 | 0 | 0 | 0 | 0.428571 | 0.452196 | 0.452196 | 0 | 0 | 0 | 0 | 0 | 1 | 0.428571 | false | 0.142857 | 0.142857 | 0.428571 | 1 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 9 |
34769bcfa68c501fb824480bc171c7a61fb5a5a7 | 2,325 | py | Python | tests/test_input_validation.py | mcstro/natural-neighbor-interpolation | 76ba7bb50c84aef35e993902c46824e5991df45d | [
"MIT"
] | 64 | 2017-09-17T00:37:20.000Z | 2022-02-03T20:16:54.000Z | tests/test_input_validation.py | mcstro/natural-neighbor-interpolation | 76ba7bb50c84aef35e993902c46824e5991df45d | [
"MIT"
] | 5 | 2018-07-27T16:31:35.000Z | 2020-06-15T02:53:48.000Z | tests/test_input_validation.py | mcstro/natural-neighbor-interpolation | 76ba7bb50c84aef35e993902c46824e5991df45d | [
"MIT"
] | 13 | 2018-06-06T18:51:50.000Z | 2021-12-26T02:47:05.000Z | import pytest
import numpy as np
from naturalneighbor import griddata
def test_invalid_known_points_shape():
num_points = 5
num_dimensions = 3
bad_third_dim = 2
known_points = np.random.rand(num_points, num_dimensions, bad_third_dim)
known_values = np.random.rand(num_points)
interp_ranges = [
[0, 1, 1],
[0, 1, 1],
[0, 1, 1],
]
with pytest.raises(ValueError):
griddata(known_points, known_values, interp_ranges)
def test_different_num_points_and_values():
num_points = 5
num_dimensions = 3
known_points = np.random.rand(num_points, num_dimensions)
known_values = np.random.rand(num_points + 1)
interp_ranges = [
[0, 1, 1],
[0, 1, 1],
[0, 1, 1],
]
with pytest.raises(ValueError):
griddata(known_points, known_values, interp_ranges)
def test_zero_step_size():
num_points = 5
num_dimensions = 3
known_points = np.random.rand(num_points, num_dimensions)
known_values = np.random.rand(num_points)
interp_ranges = [
[0, 1, 0],
[0, 1, 1],
[0, 1, 1],
]
with pytest.raises(ValueError):
griddata(known_points, known_values, interp_ranges)
def test_negative_step_size():
num_points = 5
num_dimensions = 3
known_points = np.random.rand(num_points, num_dimensions)
known_values = np.random.rand(num_points)
interp_ranges = [
[0, 1, -1],
[0, 1, 1],
[0, 1, 1],
]
with pytest.raises(ValueError):
griddata(known_points, known_values, interp_ranges)
def test_step_before_stop():
num_points = 5
num_dimensions = 3
known_points = np.random.rand(num_points, num_dimensions)
known_values = np.random.rand(num_points)
interp_ranges = [
[2, 1, 1],
[0, 1, 1],
[0, 1, 1],
]
with pytest.raises(ValueError):
griddata(known_points, known_values, interp_ranges)
def test_zero_length_complex_step():
num_points = 5
num_dimensions = 3
known_points = np.random.rand(num_points, num_dimensions)
known_values = np.random.rand(num_points)
interp_ranges = [
[2, 1, 1],
[0, 1, 1],
[0, 1, 0j],
]
with pytest.raises(ValueError):
griddata(known_points, known_values, interp_ranges)
| 24.21875 | 76 | 0.633118 | 316 | 2,325 | 4.367089 | 0.136076 | 0.123913 | 0.030435 | 0.130435 | 0.867391 | 0.867391 | 0.85 | 0.85 | 0.85 | 0.817391 | 0 | 0.039352 | 0.256774 | 2,325 | 95 | 77 | 24.473684 | 0.759259 | 0 | 0 | 0.723684 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.078947 | false | 0 | 0.039474 | 0 | 0.118421 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
caabd5c4559b24a5d7ce3dd97c8b851978214d9a | 35,605 | py | Python | samples/currencies/currency.py | zoho/zohocrm-python-sdk-2.0 | 3a93eb3b57fed4e08f26bd5b311e101cb2995411 | [
"Apache-2.0"
] | null | null | null | samples/currencies/currency.py | zoho/zohocrm-python-sdk-2.0 | 3a93eb3b57fed4e08f26bd5b311e101cb2995411 | [
"Apache-2.0"
] | null | null | null | samples/currencies/currency.py | zoho/zohocrm-python-sdk-2.0 | 3a93eb3b57fed4e08f26bd5b311e101cb2995411 | [
"Apache-2.0"
] | null | null | null | from zcrmsdk.src.com.zoho.crm.api.currencies import *
from zcrmsdk.src.com.zoho.crm.api.currencies import Currency as ZCRMCurrency
from zcrmsdk.src.com.zoho.crm.api.util import Choice
class Currency(object):
@staticmethod
def get_currencies():
"""
This method is used to get all the available currencies in your organization.
"""
# Get instance of CurrenciesOperations Class
currencies_operations = CurrenciesOperations()
# Call get_currencies method
response = currencies_operations.get_currencies()
if response is not None:
# Get the status code from response
print('Status Code: ' + str(response.get_status_code()))
if response.get_status_code() in [204, 304]:
print('No Content' if response.get_status_code() == 204 else 'Not Modified')
return
# Get object from response
response_object = response.get_object()
if response_object is not None:
# Check if expected ResponseWrapper instance is received
if isinstance(response_object, ResponseWrapper):
# Get the list of Currency instances
currencies_list = response_object.get_currencies()
for currency in currencies_list:
# Get the Id of each currency
print("Currency Id: " + str(currency.get_id()))
# Get the IsoCode of each currency
print("Currency IsoCode: " + str(currency.get_iso_code()))
# Get the Symbol of each currency
print("Currency Symbol: " + str(currency.get_symbol()))
# Get the CreatedTime of each currency
print("Currency CreatedTime: " + str(currency.get_created_time()))
# Get if the currency is active
print("Currency IsActive: " + str(currency.get_is_active()))
# Get the ExchangeRate of each currency
print("Currency ExchangeRate: " + str(currency.get_exchange_rate()))
# Get the format instance of each currency
format = currency.get_format()
if format is not None:
# Get the DecimalSeparator of the Format
print("Currency Format DecimalSeparator: " + format.get_decimal_separator().get_value())
# Get the ThousandSeparator of the Format
print("Currency Format ThousandSeparator: " + format.get_thousand_separator().get_value())
# Get the DecimalPlaces of the Format
print("Currency Format DecimalPlaces: " + format.get_decimal_places().get_value())
# Get the createdBy User instance of each currency
created_by = currency.get_created_by()
# Check if created_by is not None
if created_by is not None:
# Get the Name of the created_by User
print("Currency Created By - Name: " + created_by.get_name())
# Get the ID of the created_by User
print("Currency Created By - ID: " + str(created_by.get_id()))
# Get the modified_by User instance of each currency
modified_by = currency.get_modified_by()
# Check if modified_by is not None
if modified_by is not None:
# Get the Name of the modifiedBy User
print("Currency Modified By - Name: " + modified_by.get_name())
# Get the ID of the modifiedBy User
print("Currency Modified By - ID: " + str(modified_by.get_id()))
# Get the PrefixSymbol of each currency
print("Currency PrefixSymbol: " + str(currency.get_prefix_symbol()))
# Get the IsBase of each currency
print("Currency IsBase: " + str(currency.get_is_base()))
# Get the ModifiedTime of each currency
print("Currency ModifiedTime: " + str(currency.get_modified_time()))
# Get the Name of each currency
print("Currency Name: " + currency.get_name())
# Check if the request returned an exception
elif isinstance(response_object, APIException):
# Get the Status
print("Status: " + response_object.get_status().get_value())
# Get the Code
print("Code: " + response_object.get_code().get_value())
print("Details")
# Get the details dict
details = response_object.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + response_object.get_message().get_value())
@staticmethod
def get_currency(currency_id):
"""
This method is used to get the details of a specific currency.
:param currency_id: Specify the unique ID of the currency.
"""
"""
example
currency_id = 3409643000002293037
"""
# Get instance of CurrenciesOperations Class
currencies_operations = CurrenciesOperations()
# Call get_currency method that takes currency_id as parameter
response = currencies_operations.get_currency(currency_id)
if response is not None:
# Get the status code from response
print('Status Code: ' + str(response.get_status_code()))
if response.get_status_code() in [204, 304]:
print('No Content' if response.get_status_code() == 204 else 'Not Modified')
return
# Get object from response
response_object = response.get_object()
if response_object is not None:
# Check if expected ResponseWrapper instance is received
if isinstance(response_object, ResponseWrapper):
# Get the list of Currency instances
currencies_list = response_object.get_currencies()
for currency in currencies_list:
# Get the Id of each currency
print("Currency Id: " + str(currency.get_id()))
# Get the IsoCode of each currency
print("Currency IsoCode: " + str(currency.get_iso_code()))
# Get the Symbol of each currency
print("Currency Symbol: " + str(currency.get_symbol()))
# Get the CreatedTime of each currency
print("Currency CreatedTime: " + str(currency.get_created_time()))
# Get if the currency is active
print("Currency IsActive: " + str(currency.get_is_active()))
# Get the ExchangeRate of each currency
print("Currency ExchangeRate: " + str(currency.get_exchange_rate()))
# Get the format instance of each currency
format = currency.get_format()
if format is not None:
# Get the DecimalSeparator of the Format
print("Currency Format DecimalSeparator: " + format.get_decimal_separator().get_value())
# Get the ThousandSeparator of the Format
print("Currency Format ThousandSeparator: " + format.get_thousand_separator().get_value())
# Get the DecimalPlaces of the Format
print("Currency Format DecimalPlaces: " + format.get_decimal_places().get_value())
# Get the createdBy User instance of each currency
created_by = currency.get_created_by()
# Check if created_by is not None
if created_by is not None:
# Get the Name of the created_by User
print("Currency Created By - Name: " + created_by.get_name())
# Get the ID of the created_by User
print("Currency Created By - ID: " + str(created_by.get_id()))
# Get the createdBy User instance of each currency
modified_by = currency.get_modified_by()
# Check if modified_by is not None
if modified_by is not None:
# Get the Name of the modifiedBy User
print("Currency Modified By - Name: " + modified_by.get_name())
# Get the ID of the modifiedBy User
print("Currency Modified By - ID: " + str(modified_by.get_id()))
# Get the PrefixSymbol of each currency
print("Currency PrefixSymbol: " + str(currency.get_prefix_symbol()))
# Get the IsBase of each currency
print("Currency IsBase: " + str(currency.get_is_base()))
# Get the ModifiedTime of each currency
print("Currency ModifiedTime: " + str(currency.get_modified_time()))
# Get the Name of each currency
print("Currency Name: " + currency.get_name())
# Check if the request returned an exception
elif isinstance(response_object, APIException):
# Get the Status
print("Status: " + response_object.get_status().get_value())
# Get the Code
print("Code: " + response_object.get_code().get_value())
print("Details")
# Get the details dict
details = response_object.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + response_object.get_message().get_value())
@staticmethod
def add_currencies():
"""
This method is used to add new currencies to your organization.
"""
# Get instance of CurrenciesOperations Class
currencies_operations = CurrenciesOperations()
# Get instance of BodyWrapper Class that will contain the request body
request = BodyWrapper()
# List to hold Currency instances
currencies_list = []
# Get instance of Currency Class
currency = ZCRMCurrency()
# To set the position of the ISO code in the currency.
# True: Display ISO code before the currency value.
# False: Display ISO code after the currency value.
currency.set_prefix_symbol(True)
# To set the name of the currency.
currency.set_name("Angolan Kwanza - AOA")
# To set the ISO code of the currency.
currency.set_iso_code("AOA")
# To set the symbol of the currency.
currency.set_symbol("Kz")
# To set the rate at which the currency has to be exchanged for home currency.
currency.set_exchange_rate("20.000000000")
# To set the status of the currency.
# True: The currency is active.
# False: The currency is inactive.
currency.set_is_active(True)
format = Format()
# It can be a Period or Comma, depending on the currency.
format.set_decimal_separator(Choice('Period'))
# It can be a Period, Comma, or Space, depending on the currency.
format.set_thousand_separator(Choice('Comma'))
# To set the number of decimal places allowed for the currency. It can be 0, 2, or 3.
format.set_decimal_places(Choice('2'))
# To set the format of the currency
currency.set_format(format)
currencies_list.append(currency)
# Set the list to Currency in BodyWrapper instance
request.set_currencies(currencies_list)
# Call add_currencies method that takes BodyWrapper instance as parameter
response = currencies_operations.add_currencies(request)
if response is not None:
# Get the status code from response
print('Status Code: ' + str(response.get_status_code()))
# Get object from response
response_object = response.get_object()
if response_object is not None:
# Check if expected ActionWrapper instance is received.
if isinstance(response_object, ActionWrapper):
# Get the obtained ActionResponse instances
action_response_list = response_object.get_currencies()
for action_response in action_response_list:
# Check if the request is successful
if isinstance(action_response, SuccessResponse):
# Get the Status
print("Status: " + action_response.get_status().get_value())
# Get the Code
print("Code: " + action_response.get_code().get_value())
print("Details")
# Get the details dict
details = action_response.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + action_response.get_message().get_value())
# Check if the request returned an exception
elif isinstance(action_response, APIException):
# Get the Status
print("Status: " + action_response.get_status().get_value())
# Get the Code
print("Code: " + action_response.get_code().get_value())
print("Details")
# Get the details dict
details = action_response.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + action_response.get_message().get_value())
# Check if the request returned an exception
elif isinstance(response_object, APIException):
# Get the Status
print("Status: " + response_object.get_status().get_value())
# Get the Code
print("Code: " + response_object.get_code().get_value())
print("Details")
# Get the details dict
details = response_object.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + response_object.get_message().get_value())
@staticmethod
def update_currencies():
"""
This method is used to update currency details.
"""
# Get instance of CurrenciesOperations Class
currencies_operations = CurrenciesOperations()
# Get instance of BodyWrapper Class that will contain the request body
request = BodyWrapper()
# List to hold Currency instances
currencies_list = []
# Get instance of Currency Class
currency = ZCRMCurrency()
# To set currency Id
currency.set_id(3409643000002293037)
# To set the position of the ISO code in the currency.
# True: Display ISO code before the currency value.
# False: Display ISO code after the currency value.
currency.set_prefix_symbol(True)
# To set the rate at which the currency has to be exchanged for home currency.
currency.set_exchange_rate("28.000000000")
# To set the status of the currency.
# True: The currency is active.
# False: The currency is inactive.
currency.set_is_active(True)
format = Format()
# It can be a Period or Comma, depending on the currency.
format.set_decimal_separator(Choice('Period'))
# It can be a Period, Comma, or Space, depending on the currency.
format.set_thousand_separator(Choice('Comma'))
# To set the number of decimal places allowed for the currency. It can be 0, 2, or 3.
format.set_decimal_places(Choice('2'))
# To set the format of the currency
currency.set_format(format)
currencies_list.append(currency)
# Set the list to Currency in BodyWrapper instance
request.set_currencies(currencies_list)
# Call update_currencies method that takes BodyWrapper instance as parameter
response = currencies_operations.update_currencies(request)
if response is not None:
# Get the status code from response
print('Status Code: ' + str(response.get_status_code()))
# Get object from response
response_object = response.get_object()
if response_object is not None:
# Check if expected ActionWrapper instance is received.
if isinstance(response_object, ActionWrapper):
# Get the obtained ActionResponse instances
action_response_list = response_object.get_currencies()
for action_response in action_response_list:
# Check if the request is successful
if isinstance(action_response, SuccessResponse):
# Get the Status
print("Status: " + action_response.get_status().get_value())
# Get the Code
print("Code: " + action_response.get_code().get_value())
print("Details")
# Get the details dict
details = action_response.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + action_response.get_message().get_value())
# Check if the request returned an exception
elif isinstance(action_response, APIException):
# Get the Status
print("Status: " + action_response.get_status().get_value())
# Get the Code
print("Code: " + action_response.get_code().get_value())
print("Details")
# Get the details dict
details = action_response.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + action_response.get_message().get_value())
# Check if the request returned an exception
elif isinstance(response_object, APIException):
# Get the Status
print("Status: " + response_object.get_status().get_value())
# Get the Code
print("Code: " + response_object.get_code().get_value())
print("Details")
# Get the details dict
details = response_object.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + response_object.get_message().get_value())
@staticmethod
def update_currency(currency_id):
"""
This method is used to update single currency details.
:param currency_id: Specify the unique ID of the currency.
"""
"""
example
currency_id = 3409643000002293037
"""
# Get instance of CurrenciesOperations Class
currencies_operations = CurrenciesOperations()
# Get instance of BodyWrapper Class that will contain the request body
request = BodyWrapper()
# List to hold Currency instances
currencies_list = []
# Get instance of Currency Class
currency = ZCRMCurrency()
# To set the position of the ISO code in the currency.
# True: Display ISO code before the currency value.
# False: Display ISO code after the currency value.
currency.set_prefix_symbol(True)
# To set the rate at which the currency has to be exchanged for home currency.
currency.set_exchange_rate("28.000000000")
# To set the status of the currency.
# True: The currency is active.
# False: The currency is inactive.
currency.set_is_active(True)
format = Format()
# It can be a Period or Comma, depending on the currency.
format.set_decimal_separator(Choice('Period'))
# It can be a Period, Comma, or Space, depending on the currency.
format.set_thousand_separator(Choice('Comma'))
# To set the number of decimal places allowed for the currency. It can be 0, 2, or 3.
format.set_decimal_places(Choice('2'))
# To set the format of the currency
currency.set_format(format)
currencies_list.append(currency)
# Set the list to Currency in BodyWrapper instance
request.set_currencies(currencies_list)
# Call update_currency method that takes BodyWrapper instance and currency_id as parameters
response = currencies_operations.update_currency(currency_id, request)
if response is not None:
# Get the status code from response
print('Status Code: ' + str(response.get_status_code()))
# Get object from response
response_object = response.get_object()
if response_object is not None:
# Check if expected ActionWrapper instance is received.
if isinstance(response_object, ActionWrapper):
# Get the obtained ActionResponse instances
action_response_list = response_object.get_currencies()
for action_response in action_response_list:
# Check if the request is successful
if isinstance(action_response, SuccessResponse):
# Get the Status
print("Status: " + action_response.get_status().get_value())
# Get the Code
print("Code: " + action_response.get_code().get_value())
print("Details")
# Get the details dict
details = action_response.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + action_response.get_message().get_value())
# Check if the request returned an exception
elif isinstance(action_response, APIException):
# Get the Status
print("Status: " + action_response.get_status().get_value())
# Get the Code
print("Code: " + action_response.get_code().get_value())
print("Details")
# Get the details dict
details = action_response.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + action_response.get_message().get_value())
# Check if the request returned an exception
elif isinstance(response_object, APIException):
# Get the Status
print("Status: " + response_object.get_status().get_value())
# Get the Code
print("Code: " + response_object.get_code().get_value())
print("Details")
# Get the details dict
details = response_object.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + response_object.get_message().get_value())
@staticmethod
def enable_multiple_currencies():
"""
This method is used to enable multiple currencies for your organization.
"""
# Get instance of CurrenciesOperations Class
currencies_operations = CurrenciesOperations()
# Get instance of BaseCurrencyWrapper Class that will contain the request body
request = BaseCurrencyWrapper()
# Get instance of Currency Class
currency = ZCRMCurrency()
# To set the position of the ISO code in the currency.
# True: Display ISO code before the currency value.
# False: Display ISO code after the currency value.
currency.set_prefix_symbol(True)
# To set the name of the currency.
currency.set_name("Algerian Dinar-ADN")
# To set the ISO code of the currency.
currency.set_iso_code("DZD")
# To set the symbol of the currency.
currency.set_symbol("Af")
# To set the rate at which the currency has to be exchanged for home currency.
currency.set_exchange_rate("1.000000000")
# To set the status of the currency.
# True: The currency is active.
# False: The currency is inactive.
currency.set_is_active(True)
format = Format()
# It can be a Period or Comma, depending on the currency.
format.set_decimal_separator(Choice('Period'))
# It can be a Period, Comma, or Space, depending on the currency.
format.set_thousand_separator(Choice('Comma'))
# To set the number of decimal places allowed for the currency. It can be 0, 2, or 3.
format.set_decimal_places(Choice('2'))
# To set the format of the currency
currency.set_format(format)
# Set the Currency in BodyWrapper instance
request.set_base_currency(currency)
# Call enable_multiple_currencies method that takes BaseCurrencyWrapper instance as parameter
response = currencies_operations.enable_multiple_currencies(request)
if response is not None:
# Get the status code from response
print('Status Code: ' + str(response.get_status_code()))
# Get object from response
response_object = response.get_object()
if response_object is not None:
# Check if expected ActionWrapper instance is received.
if isinstance(response_object, BaseCurrencyActionWrapper):
# Get the obtained ActionResponse instances
action_response = response_object.get_base_currency()
# Check if the request is successful
if isinstance(action_response, SuccessResponse):
# Get the Status
print("Status: " + action_response.get_status().get_value())
# Get the Code
print("Code: " + action_response.get_code().get_value())
print("Details")
# Get the details dict
details = action_response.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + action_response.get_message().get_value())
# Check if the request returned an exception
elif isinstance(action_response, APIException):
# Get the Status
print("Status: " + action_response.get_status().get_value())
# Get the Code
print("Code: " + action_response.get_code().get_value())
print("Details")
# Get the details dict
details = action_response.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + action_response.get_message().get_value())
# Check if the request returned an exception
elif isinstance(response_object, APIException):
# Get the Status
print("Status: " + response_object.get_status().get_value())
# Get the Code
print("Code: " + response_object.get_code().get_value())
print("Details")
# Get the details dict
details = response_object.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + response_object.get_message().get_value())
@staticmethod
def update_base_currency():
"""
This method is used to update base currency details.
"""
# Get instance of CurrenciesOperations Class
currencies_operations = CurrenciesOperations()
# Get instance of BaseCurrencyWrapper Class that will contain the request body
request = BaseCurrencyWrapper()
# Get instance of Currency Class
currency = ZCRMCurrency()
# To set currency Id
currency.set_id(3409643000002293001)
# To set the position of the ISO code in the currency.
# True: Display ISO code before the currency value.
# False: Display ISO code after the currency value.
currency.set_prefix_symbol(True)
# To set the symbol of the currency.
currency.set_symbol("Af")
# To set the rate at which the currency has to be exchanged for home currency.
currency.set_exchange_rate("1.000000000")
# To set the status of the currency.
# True: The currency is active.
# False: The currency is inactive.
currency.set_is_active(True)
format = Format()
# It can be a Period or Comma, depending on the currency.
format.set_decimal_separator(Choice('Period'))
# It can be a Period, Comma, or Space, depending on the currency.
format.set_thousand_separator(Choice('Comma'))
# To set the number of decimal places allowed for the currency. It can be 0, 2, or 3.
format.set_decimal_places(Choice('3'))
# To set the format of the currency
currency.set_format(format)
# Set the Currency in BodyWrapper instance
request.set_base_currency(currency)
# Call update_base_currency method that takes BaseCurrencyWrapper instance as parameter
response = currencies_operations.update_base_currency(request)
if response is not None:
# Get the status code from response
print('Status Code: ' + str(response.get_status_code()))
# Get object from response
response_object = response.get_object()
if response_object is not None:
# Check if expected BaseCurrencyActionWrapper instance is received.
if isinstance(response_object, BaseCurrencyActionWrapper):
# Get the obtained ActionResponse instance
action_response = response_object.get_base_currency()
# Check if the request is successful
if isinstance(action_response, SuccessResponse):
# Get the Status
print("Status: " + action_response.get_status().get_value())
# Get the Code
print("Code: " + action_response.get_code().get_value())
print("Details")
# Get the details dict
details = action_response.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + action_response.get_message().get_value())
# Check if the request returned an exception
elif isinstance(action_response, APIException):
# Get the Status
print("Status: " + action_response.get_status().get_value())
# Get the Code
print("Code: " + action_response.get_code().get_value())
print("Details")
# Get the details dict
details = action_response.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + action_response.get_message().get_value())
# Check if the request returned an exception
elif isinstance(response_object, APIException):
# Get the Status
print("Status: " + response_object.get_status().get_value())
# Get the Code
print("Code: " + response_object.get_code().get_value())
print("Details")
# Get the details dict
details = response_object.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + response_object.get_message().get_value())
| 38.243824 | 118 | 0.54116 | 3,611 | 35,605 | 5.189421 | 0.048186 | 0.038743 | 0.02348 | 0.017183 | 0.96654 | 0.962645 | 0.954427 | 0.951865 | 0.947543 | 0.942953 | 0 | 0.007666 | 0.388176 | 35,605 | 930 | 119 | 38.284946 | 0.852552 | 0.274737 | 0 | 0.919881 | 0 | 0 | 0.065917 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.020772 | false | 0 | 0.008902 | 0 | 0.038576 | 0.379822 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1b0a61db0e114669fc458b17142167c3aa497578 | 18,487 | py | Python | test/test_bowtie2.py | UdoGi/dark-matter | 3d49e89fa5e81f83144119f6216c5774176d203b | [
"MIT"
] | 10 | 2016-03-09T09:43:14.000Z | 2021-04-03T21:46:12.000Z | test/test_bowtie2.py | terrycojones/dark-matter | 67d16f870db6b4239e17e542bc6e3f072dc29c75 | [
"MIT"
] | 332 | 2015-01-07T12:37:30.000Z | 2022-01-20T15:48:11.000Z | test/test_bowtie2.py | terrycojones/dark-matter | 67d16f870db6b4239e17e542bc6e3f072dc29c75 | [
"MIT"
] | 4 | 2016-03-08T14:56:39.000Z | 2021-01-27T08:11:27.000Z | from unittest import TestCase
from io import StringIO
try:
from unittest.mock import patch
except ImportError:
from mock import patch
from dark.bowtie2 import Bowtie2
from dark.process import Executor
class TestBowtie2(TestCase):
"""
Test the Bowtie2 class.
"""
def testIndexAccession(self):
"""
Making a Bowtie index from an accession number must result in the
expected commands being run and output being produced.
"""
e = Executor(dryRun=True)
fp = StringIO()
bt = Bowtie2(executor=e, dryRun=True, verboseFp=fp)
bt.buildIndex('MN908947.3')
self.assertEqual(
"$ bowtie2-build --quiet '/tmp/xxx/MN908947.3.fasta' "
"'/tmp/xxx/index'",
e.log[-1])
log = fp.getvalue()
self.assertTrue(
log.startswith('Downloading FASTA for accession MN908947.3 '
'from NCBI.\n'))
@patch('os.path.exists')
def testIndexFromFile(self, existsMock):
"""
Making a Bowtie index from a file name must result in the
expected commands being run and output being produced.
"""
e = Executor(dryRun=True)
fp = StringIO()
bt = Bowtie2(executor=e, dryRun=True, verboseFp=fp)
bt.buildIndex('file.fasta')
self.assertEqual(
"$ bowtie2-build --quiet 'file.fasta' '/tmp/xxx/index'",
e.log[-1])
log = fp.getvalue()
self.assertTrue(
log.find("Building Bowtie2 index from file.fasta.\n") > -1)
@patch('os.path.exists')
def testIndexFromBowtie2File(self, existsMock):
"""
Making a Bowtie index from a file that is an existing Bowtie2 index
file must result in the expected commands being run and output being
produced.
"""
e = Executor(dryRun=True)
fp = StringIO()
bt = Bowtie2(executor=e, dryRun=True, verboseFp=fp)
bt.buildIndex('file.1.bt2')
self.assertEqual(-1, e.log[-1].find('bowtie2-build'))
log = fp.getvalue()
self.assertEqual("Using pre-existing Bowtie2 index 'file'.\n", log)
@patch('os.path.exists')
def testIndexFromBowtie2FilePrefix(self, existsMock):
"""
Making a Bowtie index from a file that is a prefix of an existing
Bowtie2 index file name must result in the expected commands being run
and output being produced.
"""
class SideEffect(object):
def __init__(self, test):
self.test = test
self.count = 0
def sideEffect(self, filename):
if self.count == 0:
self.test.assertEqual('idx-file', filename)
self.count += 1
return False
elif self.count == 1:
self.test.assertEqual('idx-file.1.bt2', filename)
self.count += 1
return True
else:
self.test.fail('Unexpected third call to exists.')
existsMock.side_effect = SideEffect(self).sideEffect
e = Executor(dryRun=True)
fp = StringIO()
bt = Bowtie2(executor=e, dryRun=True, verboseFp=fp)
bt.buildIndex('idx-file')
self.assertEqual(-1, e.log[-1].find('bowtie2-build'))
log = fp.getvalue()
self.assertEqual("Using pre-existing Bowtie2 index 'idx-file'.\n", log)
@patch('os.path.exists')
def testAlignOneFASTQ(self, existsMock):
"""
Making a Bowtie index from a file name and running an alignment with
one FASTQ file must result in the expected commands being run and
output being produced.
"""
e = Executor(dryRun=True)
fp = StringIO()
bt = Bowtie2(executor=e, dryRun=True, verboseFp=fp)
bt.buildIndex('file.fasta')
bt.align(fastq1='file1.fastq', threads=4)
log = fp.getvalue()
self.assertTrue(log.endswith('\nAligning with Bowtie2.\n'))
self.assertEqual(
"$ bowtie2 --no-unal --threads 4 --rg-id 'orig' --rg 'SM:orig' "
"-x '/tmp/xxx/index' "
"-U 'file1.fastq' > '/tmp/xxx/result.sam'", e.log[-1])
@patch('os.path.exists')
def testAlignOneFASTQWithReadGroup(self, existsMock):
"""
Making a Bowtie index from a file name and running an alignment with
one FASTQ file and specifying a read group must result in the expected
commands being run and output being produced.
"""
e = Executor(dryRun=True)
fp = StringIO()
bt = Bowtie2(executor=e, dryRun=True, verboseFp=fp)
bt.buildIndex('file.fasta')
bt.align(fastq1='file1.fastq', threads=4, readGroup='xxx')
log = fp.getvalue()
self.assertTrue(log.endswith('\nAligning with Bowtie2.\n'))
self.assertEqual(
"$ bowtie2 --no-unal --threads 4 --rg-id 'xxx' --rg 'SM:orig' "
"-x '/tmp/xxx/index' "
"-U 'file1.fastq' > '/tmp/xxx/result.sam'", e.log[-1])
@patch('os.path.exists')
def testAlignOneFASTQWithSampleName(self, existsMock):
"""
Making a Bowtie index from a file name and running an alignment with
one FASTQ file and specifying a sample name must result in the expected
commands being run and output being produced.
"""
e = Executor(dryRun=True)
fp = StringIO()
bt = Bowtie2(executor=e, dryRun=True, verboseFp=fp)
bt.buildIndex('file.fasta')
bt.align(fastq1='file1.fastq', threads=4, sampleName='xxx')
log = fp.getvalue()
self.assertTrue(log.endswith('\nAligning with Bowtie2.\n'))
self.assertEqual(
"$ bowtie2 --no-unal --threads 4 --rg-id 'orig' --rg 'SM:xxx' "
"-x '/tmp/xxx/index' "
"-U 'file1.fastq' > '/tmp/xxx/result.sam'", e.log[-1])
@patch('os.path.exists')
def testAlignOneFASTQWithFlags(self, existsMock):
"""
Making a Bowtie index from a file name and running an alignment with
one FASTQ file and some (fictional) args must result in the expected
commands being run and output being produced.
"""
e = Executor(dryRun=True)
fp = StringIO()
bt = Bowtie2(executor=e, dryRun=True, verboseFp=fp)
bt.buildIndex('file.fasta')
bt.align(fastq1='file1.fastq', threads=4, bowtie2Args='--up --dn')
log = fp.getvalue()
self.assertTrue(log.endswith('\nAligning with Bowtie2.\n'))
self.assertEqual(
"$ bowtie2 --up --dn --threads 4 --rg-id 'orig' --rg 'SM:orig' -x "
"'/tmp/xxx/index' -U 'file1.fastq' > '/tmp/xxx/result.sam'",
e.log[-1])
@patch('os.path.exists')
def testAlignTwoFASTQs(self, existsMock):
"""
Making a Bowtie index from a file name and running an alignment with
two FASTQ files must result in the expected commands being run and
output being produced.
"""
e = Executor(dryRun=True)
fp = StringIO()
bt = Bowtie2(executor=e, dryRun=True, verboseFp=fp)
bt.buildIndex('file.fasta')
bt.align(fastq1='file1.fastq', fastq2='file2.fastq', threads=4)
log = fp.getvalue()
self.assertTrue(log.endswith('\nAligning with Bowtie2.\n'))
self.assertEqual(
"$ bowtie2 --no-unal --threads 4 --rg-id 'orig' --rg 'SM:orig' -x "
"'/tmp/xxx/index' "
"-1 'file1.fastq' -2 'file2.fastq' > '/tmp/xxx/result.sam'",
e.log[-1])
@patch('os.path.exists')
def testAlignTwoFASTQsWithReadGroup(self, existsMock):
"""
Making a Bowtie index from a file name and running an alignment with
two FASTQ files and a read group must result in the expected commands
being run and output being produced.
"""
e = Executor(dryRun=True)
fp = StringIO()
bt = Bowtie2(executor=e, dryRun=True, verboseFp=fp)
bt.buildIndex('file.fasta')
bt.align(fastq1='file1.fastq', fastq2='file2.fastq', threads=4,
readGroup='xxx')
log = fp.getvalue()
self.assertTrue(log.endswith('\nAligning with Bowtie2.\n'))
self.assertEqual(
"$ bowtie2 --no-unal --threads 4 --rg-id 'xxx' --rg 'SM:orig' -x "
"'/tmp/xxx/index' "
"-1 'file1.fastq' -2 'file2.fastq' > '/tmp/xxx/result.sam'",
e.log[-1])
@patch('os.path.exists')
def testAlignTwoFASTQsWithSampleName(self, existsMock):
"""
Making a Bowtie index from a file name and running an alignment with
two FASTQ files and a sample name must result in the expected commands
being run and output being produced.
"""
e = Executor(dryRun=True)
fp = StringIO()
bt = Bowtie2(executor=e, dryRun=True, verboseFp=fp)
bt.buildIndex('file.fasta')
bt.align(fastq1='file1.fastq', fastq2='file2.fastq', threads=4,
sampleName='xxx')
log = fp.getvalue()
self.assertTrue(log.endswith('\nAligning with Bowtie2.\n'))
self.assertEqual(
"$ bowtie2 --no-unal --threads 4 --rg-id 'orig' --rg 'SM:xxx' -x "
"'/tmp/xxx/index' "
"-1 'file1.fastq' -2 'file2.fastq' > '/tmp/xxx/result.sam'",
e.log[-1])
@patch('os.path.exists')
def testMakeBAM(self, existsMock):
"""
Making a BAM file must result in the expected commands being run and
output being produced.
"""
e = Executor(dryRun=True)
fp = StringIO()
bt = Bowtie2(executor=e, dryRun=True, verboseFp=fp)
bt.buildIndex('file.fasta')
bt.align(fastq1='file1.fastq')
bt.makeBAM()
log = fp.getvalue()
self.assertTrue(log.endswith('\nConverting SAM to BAM.\n'))
self.assertEqual(
"$ samtools view -b '/tmp/xxx/result.sam' > "
"'/tmp/xxx/result.bam'",
e.log[-1])
@patch('os.path.exists')
def testMakeIndexedBAM(self, existsMock):
"""
Making a BAM file and indexing it must result in the expected commands
being run and output being produced.
"""
e = Executor(dryRun=True)
fp = StringIO()
bt = Bowtie2(executor=e, dryRun=True, verboseFp=fp)
bt.buildIndex('file.fasta')
bt.align(fastq1='file1.fastq')
bt.makeBAM()
bt.indexBAM()
log = fp.getvalue()
self.assertTrue(log.endswith('\nIndexing BAM.\n'))
self.assertEqual("$ samtools index '/tmp/xxx/result.bam'", e.log[-1])
@patch('os.path.exists')
def testRemoveDuplicates(self, existsMock):
"""
Removing duplicates must result in the expected commands
being run and output being produced.
"""
e = Executor(dryRun=True)
fp = StringIO()
bt = Bowtie2(executor=e, dryRun=True, verboseFp=fp)
bt.buildIndex('file.fasta')
bt.align(fastq1='file1.fastq')
bt.removeDuplicates()
log = fp.getvalue()
self.assertTrue(log.endswith('\nRemoving marked duplicates.\n'))
self.assertEqual("$ samtools view -b -F 1024 '/tmp/xxx/result.sam' "
"> '/tmp/xxx/non-duplicates.sam'", e.log[-2])
self.assertEqual("$ mv '/tmp/xxx/non-duplicates.sam' "
"'/tmp/xxx/result.sam'", e.log[-1])
@patch('os.path.exists')
def testSortByCoord(self, existsMock):
"""
Sorting by coord (the default) must result in the expected commands
being run and output being produced.
"""
e = Executor(dryRun=True)
fp = StringIO()
bt = Bowtie2(executor=e, dryRun=True, verboseFp=fp)
bt.buildIndex('file.fasta')
bt.align(fastq1='file1.fastq')
bt.sort()
log = fp.getvalue()
self.assertTrue(log.endswith('\nSorting SAM (by coord).\n'))
self.assertEqual("$ samtools sort '/tmp/xxx/result.sam' > "
"'/tmp/xxx/result-sorted.sam'", e.log[-2])
self.assertEqual("$ mv '/tmp/xxx/result-sorted.sam' "
"'/tmp/xxx/result.sam'", e.log[-1])
@patch('os.path.exists')
def testSortByName(self, existsMock):
"""
Sorting by name must result in the expected commands
being run and output being produced.
"""
e = Executor(dryRun=True)
fp = StringIO()
bt = Bowtie2(executor=e, dryRun=True, verboseFp=fp)
bt.buildIndex('file.fasta')
bt.align(fastq1='file1.fastq')
bt.sort(byName=True)
log = fp.getvalue()
self.assertTrue(log.endswith('\nSorting SAM (by name).\n'))
self.assertEqual("$ samtools sort -n '/tmp/xxx/result.sam' > "
"'/tmp/xxx/result-sorted.sam'", e.log[-2])
self.assertEqual("$ mv '/tmp/xxx/result-sorted.sam' "
"'/tmp/xxx/result.sam'", e.log[-1])
@patch('os.path.exists')
def testOutputFileSAM(self, existsMock):
"""
The output file must be a SAM file if BAM hasn't been produced.
"""
class SideEffect(object):
def __init__(self, test):
self.test = test
self.count = 0
def sideEffect(self, filename):
if self.count == 0:
self.test.assertEqual('index', filename)
self.count += 1
return False
elif self.count == 1:
self.test.assertEqual('index.1.bt2', filename)
self.count += 1
return True
elif self.count == 2:
self.test.assertEqual('/tmp/xxx/result.bam', filename)
self.count += 1
return False
elif self.count == 3:
self.test.assertEqual('/tmp/xxx/result.sam', filename)
self.count += 1
return True
else:
self.test.fail(
'Unexpected 5th call to exists. Filename: %r.' %
filename)
existsMock.side_effect = SideEffect(self).sideEffect
e = Executor(dryRun=True)
fp = StringIO()
bt = Bowtie2(executor=e, dryRun=True, verboseFp=fp)
bt.buildIndex('index')
bt.align(fastq1='file1.fastq')
self.assertEqual('/tmp/xxx/result.sam', bt.outputFile())
@patch('os.path.exists')
def testOutputFileBAM(self, existsMock):
"""
The output file must be BAM if it has been produced.
"""
e = Executor(dryRun=True)
fp = StringIO()
bt = Bowtie2(executor=e, dryRun=True, verboseFp=fp)
bt.buildIndex('file.fasta')
bt.align(fastq1='file1.fastq')
bt.makeBAM()
self.assertEqual('/tmp/xxx/result.bam', bt.outputFile())
@patch('os.path.exists')
def testMarkDuplicatesPicard(self, existsMock):
"""
Using Picard to mark duplicates must result in the expected commands
being run and output being produced.
"""
e = Executor(dryRun=True)
fp = StringIO()
bt = Bowtie2(executor=e, dryRun=True, verboseFp=fp)
bt.buildIndex('file.fasta')
bt.align(fastq1='file1.fastq')
bt.makeBAM()
bt.indexBAM()
bt.markDuplicatesPicard('picard.jar')
log = fp.getvalue()
self.assertTrue(log.endswith('\nMarking duplicates with Picard.\n'))
self.assertEqual(
'$ java -Xmn2g -Xms2g -Xmx2g -jar picard.jar MarkDuplicates '
"I='/tmp/xxx/result.bam' O='/tmp/xxx/picard-duplicates.bam' "
"M=/dev/null >'/tmp/xxx/picard.errs' 2>&1",
e.log[-2])
self.assertEqual("$ mv '/tmp/xxx/picard-duplicates.bam' "
"'/tmp/xxx/result.bam'", e.log[-1])
@patch('os.path.exists')
def testMarkDuplicatesGATKBowtie2Threads(self, existsMock):
"""
Using GATK to mark duplicates must result in the expected commands
being run and output being produced. The number of threads is taken
from the Bowtie2 instance.
"""
e = Executor(dryRun=True)
fp = StringIO()
bt = Bowtie2(executor=e, dryRun=True, verboseFp=fp, threads=4)
bt.buildIndex('file.fasta')
bt.align(fastq1='file1.fastq')
bt.makeBAM()
bt.indexBAM()
bt.markDuplicatesGATK()
log = fp.getvalue()
self.assertTrue(log.endswith('\nMarking duplicates with GATK.\n'))
self.assertEqual(
"$ gatk MarkDuplicatesSpark -I '/tmp/xxx/result.bam' -O "
"'/tmp/xxx/gatk-duplicates.bam' --conf spark.executor.cores=4",
e.log[-2])
self.assertEqual("$ mv '/tmp/xxx/gatk-duplicates.bam' "
"'/tmp/xxx/result.bam'", e.log[-1])
@patch('os.path.exists')
def testMarkDuplicatesGATKThreadsInArg(self, existsMock):
"""
Using GATK to mark duplicates must result in the expected commands
being run and output being produced. The number of threads is given
in the call to markDuplicatesGATK and must override the number given
to the Bowtie2 instance.
"""
e = Executor(dryRun=True)
fp = StringIO()
bt = Bowtie2(executor=e, dryRun=True, verboseFp=fp, threads=4)
bt.buildIndex('file.fasta')
bt.align(fastq1='file1.fastq')
bt.makeBAM()
bt.indexBAM()
bt.markDuplicatesGATK(threads=32)
log = fp.getvalue()
self.assertTrue(log.endswith('\nMarking duplicates with GATK.\n'))
self.assertEqual(
"$ gatk MarkDuplicatesSpark -I '/tmp/xxx/result.bam' -O "
"'/tmp/xxx/gatk-duplicates.bam' --conf spark.executor.cores=32",
e.log[-2])
self.assertEqual("$ mv '/tmp/xxx/gatk-duplicates.bam' "
"'/tmp/xxx/result.bam'", e.log[-1])
def testClose(self):
"""
Calling close() must result in the temporary directory being removed.
"""
e = Executor(dryRun=True)
fp = StringIO()
bt = Bowtie2(executor=e, dryRun=True, verboseFp=fp)
bt.close()
self.assertEqual("$ rm -r '/tmp/xxx'", e.log[-1])
| 39.502137 | 79 | 0.569535 | 2,179 | 18,487 | 4.827444 | 0.103258 | 0.02852 | 0.034224 | 0.039738 | 0.853218 | 0.816142 | 0.795798 | 0.771176 | 0.757582 | 0.750642 | 0 | 0.016393 | 0.300481 | 18,487 | 467 | 80 | 39.586724 | 0.797015 | 0.160545 | 0 | 0.702381 | 0 | 0.035714 | 0.24639 | 0.062038 | 0 | 0 | 0 | 0 | 0.157738 | 1 | 0.077381 | false | 0 | 0.020833 | 0 | 0.125 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1bc44c5626432215c2bc817916f59dcce597f07c | 28,992 | py | Python | data/projects/flutils/tests/unit/pathutils/test_directory_present.py | se2p/artifact-pynguin-ssbse2020 | 32b5f4d27ef1b81e5c541471e98fa6e50f5ce8a6 | [
"CC-BY-4.0"
] | 3 | 2020-08-20T10:27:13.000Z | 2021-11-02T20:28:16.000Z | data/projects/flutils/tests/unit/pathutils/test_directory_present.py | se2p/artifact-pynguin-ssbse2020 | 32b5f4d27ef1b81e5c541471e98fa6e50f5ce8a6 | [
"CC-BY-4.0"
] | null | null | null | data/projects/flutils/tests/unit/pathutils/test_directory_present.py | se2p/artifact-pynguin-ssbse2020 | 32b5f4d27ef1b81e5c541471e98fa6e50f5ce8a6 | [
"CC-BY-4.0"
] | null | null | null | import unittest
from unittest.mock import patch
from flutils.pathutils import directory_present
from ..mocks.pathlib import PosixPathMock
class TestDirectoryPresent(unittest.TestCase):
def setUp(self):
# /home/test_user/tmp
# │
# └── dir_one
# │
# └── dir_two
# │
# └── path
#
self.tmp = PosixPathMock(
'/home/test_user/tmp',
is_dir=True,
exists=True
)
self.dir_one = PosixPathMock(
'/home/test_user/tmp/dir_one',
is_dir=False,
exists=False,
parent=self.tmp,
)
self.dir_two = PosixPathMock(
'/home/test_user/tmp/dir_one/dir_two',
is_dir=False,
exists=False,
parent=self.dir_one
)
self.path = PosixPathMock(
'/home/test_user/tmp/dir_one/dir_two/path',
is_dir=False,
exists=False,
parent=self.dir_two
)
# patch the normalize_path() function to return self.path_one.
patcher = patch(
'flutils.pathutils.normalize_path',
return_value=self.path
)
self.normalize_path = patcher.start()
self.addCleanup(patcher.stop)
# patch the exists_as() function to return multiple results.
patcher = patch(
'flutils.pathutils.exists_as',
side_effect=[
'',
'',
'',
'directory'
]
)
self.exists_as = patcher.start()
self.addCleanup(patcher.stop)
# patch the chown() function.
patcher = patch(
'flutils.pathutils.chown',
return_value=None
)
self.chown = patcher.start()
self.addCleanup(patcher.stop)
# patch the chmod() function.
patcher = patch(
'flutils.pathutils.chmod',
return_value=None
)
self.chmod = patcher.start()
self.addCleanup(patcher.stop)
def test_directory_present_with_parents_default(self):
directory_present(self.path.as_posix())
self.normalize_path.assert_called_with(self.path.as_posix())
self.path.mkdir.assert_called_with(mode=0o700)
self.dir_two.mkdir.assert_called_with(mode=0o700)
self.dir_one.mkdir.assert_called_with(mode=0o700)
self.tmp.mkdir.assert_not_called()
self.chown.assert_any_call(self.path, user=None, group=None)
self.chown.assert_any_call(self.dir_two, user=None, group=None)
self.chown.assert_any_call(self.dir_one, user=None, group=None)
self.chmod.assert_not_called()
def test_directory_present_with_parents_mode_user_group(self):
mode = 0o770
user = 'test_user'
group = 'test_group'
directory_present(
self.path.as_posix(),
mode=mode,
user=user,
group=group
)
self.normalize_path.assert_called_with(self.path.as_posix())
self.path.mkdir.assert_called_with(mode=mode)
self.dir_two.mkdir.assert_called_with(mode=mode)
self.dir_one.mkdir.assert_called_with(mode=mode)
self.tmp.mkdir.assert_not_called()
self.chown.assert_any_call(self.path, user=user, group=group)
self.chown.assert_any_call(self.dir_two, user=user, group=group)
self.chown.assert_any_call(self.dir_one, user=user, group=group)
self.chmod.assert_not_called()
class TestDirectoryPresentExisting(unittest.TestCase):
def setUp(self):
# /home/test_user/tmp
# │
# └── dir_one
# │
# └── dir_two
# │
# └── path
#
self.tmp = PosixPathMock(
'/home/test_user/tmp',
is_dir=True,
exists=True
)
self.dir_one = PosixPathMock(
'/home/test_user/tmp/dir_one',
is_dir=True,
exists=True,
parent=self.tmp,
)
self.dir_two = PosixPathMock(
'/home/test_user/tmp/dir_one/dir_two',
is_dir=True,
exists=True,
parent=self.dir_one
)
self.path = PosixPathMock(
'/home/test_user/tmp/dir_one/dir_two/path',
is_dir=True,
exists=True,
parent=self.dir_two
)
# patch the normalize_path() function to return self.path_one.
patcher = patch(
'flutils.pathutils.normalize_path',
return_value=self.path
)
self.normalize_path = patcher.start()
self.addCleanup(patcher.stop)
# patch the exists_as() function to return multiple results.
patcher = patch(
'flutils.pathutils.exists_as',
side_effect=[
'directory',
'directory',
'directory',
'directory'
]
)
self.exists_as = patcher.start()
self.addCleanup(patcher.stop)
# patch the chown() function.
patcher = patch(
'flutils.pathutils.chown',
return_value=None
)
self.chown = patcher.start()
self.addCleanup(patcher.stop)
# patch the chmod() function.
patcher = patch(
'flutils.pathutils.chmod',
return_value=None
)
self.chmod = patcher.start()
self.addCleanup(patcher.stop)
def test_directory_present_exists(self):
directory_present(self.path.as_posix())
self.normalize_path.assert_called_with(self.path.as_posix())
self.path.mkdir.assert_not_called()
self.dir_two.mkdir.assert_not_called()
self.dir_one.mkdir.assert_not_called()
self.tmp.mkdir.assert_not_called()
self.chmod.assert_called_once_with(self.path, mode_dir=0o700)
self.chown.assert_called_once_with(self.path, user=None, group=None)
class TestDirectoryPresentGlobError(unittest.TestCase):
def setUp(self):
# /home/test_user/tmp
# │
# └── dir_one
# │
# └── dir_two
# │
# └── **
#
self.tmp = PosixPathMock(
'/home/test_user/tmp',
is_dir=True,
exists=True
)
self.dir_one = PosixPathMock(
'/home/test_user/tmp/dir_one',
is_dir=True,
exists=True,
parent=self.tmp,
)
self.dir_two = PosixPathMock(
'/home/test_user/tmp/dir_one/dir_two',
is_dir=True,
exists=True,
parent=self.dir_one
)
self.path = PosixPathMock(
'/home/test_user/tmp/dir_one/dir_two/**',
is_dir=False,
exists=False,
parent=self.dir_two
)
# patch the normalize_path() function to return self.path_one.
patcher = patch(
'flutils.pathutils.normalize_path',
return_value=self.path
)
self.normalize_path = patcher.start()
self.addCleanup(patcher.stop)
# patch the exists_as() function to return multiple results.
patcher = patch(
'flutils.pathutils.exists_as',
side_effect=[
'',
'directory',
'directory',
'directory'
]
)
self.exists_as = patcher.start()
self.addCleanup(patcher.stop)
# patch the chown() function.
patcher = patch(
'flutils.pathutils.chown',
return_value=None
)
self.chown = patcher.start()
self.addCleanup(patcher.stop)
# patch the chmod() function.
patcher = patch(
'flutils.pathutils.chmod',
return_value=None
)
self.chmod = patcher.start()
self.addCleanup(patcher.stop)
def test_directory_present_glob_error(self):
self.assertRaises(ValueError, directory_present, self.path.as_posix())
self.normalize_path.assert_called_with(self.path.as_posix())
self.path.mkdir.assert_not_called()
self.dir_two.mkdir.assert_not_called()
self.dir_one.mkdir.assert_not_called()
self.tmp.mkdir.assert_not_called()
self.chmod.assert_not_called()
self.chown.assert_not_called()
class TestDirectoryPresentAbsoluteError(unittest.TestCase):
def setUp(self):
self.path = PosixPathMock(
'home/test_user/tmp/dir_one/dir_two',
exists=False,
)
# patch the normalize_path() function to return self.path_one.
patcher = patch(
'flutils.pathutils.normalize_path',
return_value=self.path
)
self.normalize_path = patcher.start()
self.addCleanup(patcher.stop)
# patch the exists_as() function to return multiple results.
patcher = patch(
'flutils.pathutils.exists_as',
side_effect=[
'',
''
]
)
self.exists_as = patcher.start()
self.addCleanup(patcher.stop)
# patch the chown() function.
patcher = patch(
'flutils.pathutils.chown',
return_value=None
)
self.chown = patcher.start()
self.addCleanup(patcher.stop)
# patch the chmod() function.
patcher = patch(
'flutils.pathutils.chmod',
return_value=None
)
self.chmod = patcher.start()
self.addCleanup(patcher.stop)
def test_directory_present_absolute_error(self):
self.assertRaises(ValueError, directory_present, self.path.as_posix())
self.normalize_path.assert_called_with(self.path.as_posix())
self.path.mkdir.assert_not_called()
self.chmod.assert_not_called()
self.chown.assert_not_called()
class TestDirectoryPresentFileError(unittest.TestCase):
def setUp(self):
# /home/test_user/tmp
# │
# └── path
#
self.tmp = PosixPathMock(
'/home/test_user/tmp',
is_dir=True,
exists=True
)
self.path = PosixPathMock(
'/home/test_user/tmp/path',
is_file=True,
exists=True,
parent=self.tmp
)
# patch the normalize_path() function to return self.path_one.
patcher = patch(
'flutils.pathutils.normalize_path',
return_value=self.path
)
self.normalize_path = patcher.start()
self.addCleanup(patcher.stop)
# patch the exists_as() function to return multiple results.
patcher = patch(
'flutils.pathutils.exists_as',
side_effect=[
'file',
'directory'
]
)
self.exists_as = patcher.start()
self.addCleanup(patcher.stop)
# patch the chown() function.
patcher = patch(
'flutils.pathutils.chown',
return_value=None
)
self.chown = patcher.start()
self.addCleanup(patcher.stop)
# patch the chmod() function.
patcher = patch(
'flutils.pathutils.chmod',
return_value=None
)
self.chmod = patcher.start()
self.addCleanup(patcher.stop)
def test_directory_present_file_error(self):
self.assertRaises(
FileExistsError,
directory_present,
self.path.as_posix()
)
self.normalize_path.assert_called_with(self.path.as_posix())
self.path.mkdir.assert_not_called()
self.tmp.mkdir.assert_not_called()
self.chmod.assert_not_called()
self.chown.assert_not_called()
class TestDirectoryPresentBlockDeviceError(unittest.TestCase):
def setUp(self):
# /home/test_user/tmp
# │
# └── path
#
self.tmp = PosixPathMock(
'/home/test_user/tmp',
is_dir=True,
exists=True
)
self.path = PosixPathMock(
'/home/test_user/tmp/path',
is_block_device=True,
exists=False,
parent=self.tmp
)
# patch the normalize_path() function to return self.path_one.
patcher = patch(
'flutils.pathutils.normalize_path',
return_value=self.path
)
self.normalize_path = patcher.start()
self.addCleanup(patcher.stop)
# patch the exists_as() function to return multiple values.
patcher = patch(
'flutils.pathutils.exists_as',
side_effect=[
'block device',
'directory'
]
)
self.exists_as = patcher.start()
self.addCleanup(patcher.stop)
# patch the chown() function.
patcher = patch(
'flutils.pathutils.chown',
return_value=None
)
self.chown = patcher.start()
self.addCleanup(patcher.stop)
# patch the chmod() function.
patcher = patch(
'flutils.pathutils.chmod',
return_value=None
)
self.chmod = patcher.start()
self.addCleanup(patcher.stop)
def test_directory_present_block_device_error(self):
self.assertRaises(
FileExistsError,
directory_present,
self.path.as_posix()
)
self.normalize_path.assert_called_with(self.path.as_posix())
self.path.mkdir.assert_not_called()
self.tmp.mkdir.assert_not_called()
self.chmod.assert_not_called()
self.chown.assert_not_called()
class TestDirectoryPresentCharDeviceError(unittest.TestCase):
def setUp(self):
# /home/test_user/tmp
# │
# └── path
#
self.tmp = PosixPathMock(
'/home/test_user/tmp',
is_dir=True,
exists=True
)
self.path = PosixPathMock(
'/home/test_user/tmp/path',
is_char_device=True,
exists=False,
parent=self.tmp
)
# patch the normalize_path() function to return self.path_one.
patcher = patch(
'flutils.pathutils.normalize_path',
return_value=self.path
)
self.normalize_path = patcher.start()
self.addCleanup(patcher.stop)
# patch the exists_as() function to return multiple values.
patcher = patch(
'flutils.pathutils.exists_as',
side_effect=[
'char device',
'directory'
]
)
self.exists_as = patcher.start()
self.addCleanup(patcher.stop)
# patch the chown() function.
patcher = patch(
'flutils.pathutils.chown',
return_value=None
)
self.chown = patcher.start()
self.addCleanup(patcher.stop)
# patch the chmod() function.
patcher = patch(
'flutils.pathutils.chmod',
return_value=None
)
self.chmod = patcher.start()
self.addCleanup(patcher.stop)
def test_directory_present_char_device_error(self):
self.assertRaises(
FileExistsError,
directory_present,
self.path.as_posix()
)
self.normalize_path.assert_called_with(self.path.as_posix())
self.path.mkdir.assert_not_called()
self.tmp.mkdir.assert_not_called()
self.chmod.assert_not_called()
self.chown.assert_not_called()
class TestDirectoryPresentFifoError(unittest.TestCase):
def setUp(self):
# /home/test_user/tmp
# │
# └── path
#
self.tmp = PosixPathMock(
'/home/test_user/tmp',
is_dir=True,
exists=True
)
self.path = PosixPathMock(
'/home/test_user/tmp/path',
is_fifo=True,
exists=False,
parent=self.tmp
)
# patch the normalize_path() function to return self.path_one.
patcher = patch(
'flutils.pathutils.normalize_path',
return_value=self.path
)
self.normalize_path = patcher.start()
self.addCleanup(patcher.stop)
# patch the exists_as() function to return mutiple values.
patcher = patch(
'flutils.pathutils.exists_as',
side_effect=[
'FIFO',
'directory'
]
)
self.exists_as = patcher.start()
self.addCleanup(patcher.stop)
# patch the chown() function.
patcher = patch(
'flutils.pathutils.chown',
return_value=None
)
self.chown = patcher.start()
self.addCleanup(patcher.stop)
# patch the chmod() function.
patcher = patch(
'flutils.pathutils.chmod',
return_value=None
)
self.chmod = patcher.start()
self.addCleanup(patcher.stop)
def test_directory_present_fifo_error(self):
self.assertRaises(
FileExistsError,
directory_present,
self.path.as_posix()
)
self.normalize_path.assert_called_with(self.path.as_posix())
self.path.mkdir.assert_not_called()
self.tmp.mkdir.assert_not_called()
self.chmod.assert_not_called()
self.chown.assert_not_called()
class TestDirectoryPresentSocketError(unittest.TestCase):
def setUp(self):
# /home/test_user/tmp
# │
# └── path
#
self.tmp = PosixPathMock(
'/home/test_user/tmp',
is_dir=True,
exists=True
)
self.path = PosixPathMock(
'/home/test_user/tmp/path',
is_socket=True,
exists=False,
parent=self.tmp
)
# patch the normalize_path() function to return self.path_one.
patcher = patch(
'flutils.pathutils.normalize_path',
return_value=self.path
)
self.normalize_path = patcher.start()
self.addCleanup(patcher.stop)
# patch the exists_as() function to return multiple values.
patcher = patch(
'flutils.pathutils.exists_as',
side_effect=[
'socket',
'directory'
]
)
self.exists_as = patcher.start()
self.addCleanup(patcher.stop)
# patch the chown() function.
patcher = patch(
'flutils.pathutils.chown',
return_value=None
)
self.chown = patcher.start()
self.addCleanup(patcher.stop)
# patch the chmod() function.
patcher = patch(
'flutils.pathutils.chmod',
return_value=None
)
self.chmod = patcher.start()
self.addCleanup(patcher.stop)
def test_directory_present_socket_error(self):
self.assertRaises(
FileExistsError,
directory_present,
self.path.as_posix()
)
self.normalize_path.assert_called_with(self.path.as_posix())
self.path.mkdir.assert_not_called()
self.tmp.mkdir.assert_not_called()
self.chmod.assert_not_called()
self.chown.assert_not_called()
class TestDirectoryPresentParentFileError(unittest.TestCase):
def setUp(self):
# /home/test_user/tmp
# │
# └── path
#
self.tmp = PosixPathMock(
'/home/test_user/tmp',
is_file=True,
exists=True
)
self.path = PosixPathMock(
'/home/test_user/tmp/path',
exists=False,
parent=self.tmp
)
# patch the normalize_path() function to return self.path_one.
patcher = patch(
'flutils.pathutils.normalize_path',
return_value=self.path
)
self.normalize_path = patcher.start()
self.addCleanup(patcher.stop)
# patch the exists_as() function to return multiple results.
patcher = patch(
'flutils.pathutils.exists_as',
side_effect=[
'',
'file'
]
)
self.exists_as = patcher.start()
self.addCleanup(patcher.stop)
# patch the chown() function.
patcher = patch(
'flutils.pathutils.chown',
return_value=None
)
self.chown = patcher.start()
self.addCleanup(patcher.stop)
# patch the chmod() function.
patcher = patch(
'flutils.pathutils.chmod',
return_value=None
)
self.chmod = patcher.start()
self.addCleanup(patcher.stop)
def test_directory_present_parent_file_error(self):
self.assertRaises(
FileExistsError,
directory_present,
self.path.as_posix()
)
self.normalize_path.assert_called_with(self.path.as_posix())
self.path.mkdir.assert_not_called()
self.tmp.mkdir.assert_not_called()
self.chmod.assert_not_called()
self.chown.assert_not_called()
class TestDirectoryPresentParentBlockDeviceError(unittest.TestCase):
def setUp(self):
# /home/test_user/tmp
# │
# └── path
#
self.tmp = PosixPathMock(
'/home/test_user/tmp',
is_block_device=True,
exists=False
)
self.path = PosixPathMock(
'/home/test_user/tmp/path',
exists=False,
parent=self.tmp
)
# patch the normalize_path() function to return self.path_one.
patcher = patch(
'flutils.pathutils.normalize_path',
return_value=self.path
)
self.normalize_path = patcher.start()
self.addCleanup(patcher.stop)
# patch the exists_as() function to return multiple results.
patcher = patch(
'flutils.pathutils.exists_as',
side_effect=[
'',
'block device'
]
)
self.exists_as = patcher.start()
self.addCleanup(patcher.stop)
# patch the chown() function.
patcher = patch(
'flutils.pathutils.chown',
return_value=None
)
self.chown = patcher.start()
self.addCleanup(patcher.stop)
# patch the chmod() function.
patcher = patch(
'flutils.pathutils.chmod',
return_value=None
)
self.chmod = patcher.start()
self.addCleanup(patcher.stop)
def test_directory_present_parent_block_device_error(self):
self.assertRaises(
FileExistsError,
directory_present,
self.path.as_posix()
)
self.normalize_path.assert_called_with(self.path.as_posix())
self.path.mkdir.assert_not_called()
self.tmp.mkdir.assert_not_called()
self.chmod.assert_not_called()
self.chown.assert_not_called()
class TestDirectoryPresentParentCharDeviceError(unittest.TestCase):
def setUp(self):
# /home/test_user/tmp
# │
# └── path
#
self.tmp = PosixPathMock(
'/home/test_user/tmp',
is_char_device=True,
exists=False
)
self.path = PosixPathMock(
'/home/test_user/tmp/path',
exists=False,
parent=self.tmp
)
# patch the normalize_path() function to return self.path_one.
patcher = patch(
'flutils.pathutils.normalize_path',
return_value=self.path
)
self.normalize_path = patcher.start()
self.addCleanup(patcher.stop)
# patch the exists_as() function to return multiple results.
patcher = patch(
'flutils.pathutils.exists_as',
side_effect=[
'',
'char device'
]
)
self.exists_as = patcher.start()
self.addCleanup(patcher.stop)
# patch the chown() function.
patcher = patch(
'flutils.pathutils.chown',
return_value=None
)
self.chown = patcher.start()
self.addCleanup(patcher.stop)
# patch the chmod() function.
patcher = patch(
'flutils.pathutils.chmod',
return_value=None
)
self.chmod = patcher.start()
self.addCleanup(patcher.stop)
def test_directory_present_parent_char_device_error(self):
self.assertRaises(
FileExistsError,
directory_present,
self.path.as_posix()
)
self.normalize_path.assert_called_with(self.path.as_posix())
self.path.mkdir.assert_not_called()
self.tmp.mkdir.assert_not_called()
self.chmod.assert_not_called()
self.chown.assert_not_called()
class TestDirectoryPresentParentFifoError(unittest.TestCase):
def setUp(self):
# /home/test_user/tmp
# │
# └── path
#
self.tmp = PosixPathMock(
'/home/test_user/tmp',
is_fifo=True,
exists=False
)
self.path = PosixPathMock(
'/home/test_user/tmp/path',
exists=False,
parent=self.tmp
)
# patch the normalize_path() function to return self.path_one.
patcher = patch(
'flutils.pathutils.normalize_path',
return_value=self.path
)
self.normalize_path = patcher.start()
self.addCleanup(patcher.stop)
# patch the exists_as() function to return multiple results.
patcher = patch(
'flutils.pathutils.exists_as',
side_effect=[
'',
'FIFO'
]
)
self.exists_as = patcher.start()
self.addCleanup(patcher.stop)
# patch the chown() function.
patcher = patch(
'flutils.pathutils.chown',
return_value=None
)
self.chown = patcher.start()
self.addCleanup(patcher.stop)
# patch the chmod() function.
patcher = patch(
'flutils.pathutils.chmod',
return_value=None
)
self.chmod = patcher.start()
self.addCleanup(patcher.stop)
def test_directory_present_parent_fifo_error(self):
self.assertRaises(
FileExistsError,
directory_present,
self.path.as_posix()
)
self.normalize_path.assert_called_with(self.path.as_posix())
self.path.mkdir.assert_not_called()
self.tmp.mkdir.assert_not_called()
self.chmod.assert_not_called()
self.chown.assert_not_called()
class TestDirectoryPresentParentSocketError(unittest.TestCase):
def setUp(self):
# /home/test_user/tmp
# │
# └── path
#
self.tmp = PosixPathMock(
'/home/test_user/tmp',
is_socket=True,
exists=False
)
self.path = PosixPathMock(
'/home/test_user/tmp/path',
exists=False,
parent=self.tmp
)
# patch the normalize_path() function to return self.path_one.
patcher = patch(
'flutils.pathutils.normalize_path',
return_value=self.path
)
self.normalize_path = patcher.start()
self.addCleanup(patcher.stop)
# patch the exists_as() function to return multiple results.
patcher = patch(
'flutils.pathutils.exists_as',
side_effect=[
'',
'socket'
]
)
self.exists_as = patcher.start()
self.addCleanup(patcher.stop)
# patch the chown() function.
patcher = patch(
'flutils.pathutils.chown',
return_value=None
)
self.chown = patcher.start()
self.addCleanup(patcher.stop)
# patch the chmod() function.
patcher = patch(
'flutils.pathutils.chmod',
return_value=None
)
self.chmod = patcher.start()
self.addCleanup(patcher.stop)
def test_directory_present_parent_socket_error(self):
self.assertRaises(
FileExistsError,
directory_present,
self.path.as_posix()
)
self.normalize_path.assert_called_with(self.path.as_posix())
self.path.mkdir.assert_not_called()
self.tmp.mkdir.assert_not_called()
self.chmod.assert_not_called()
self.chown.assert_not_called()
| 29.167002 | 78 | 0.55705 | 2,956 | 28,992 | 5.290257 | 0.031461 | 0.046553 | 0.054675 | 0.100269 | 0.95415 | 0.944622 | 0.933431 | 0.931833 | 0.920834 | 0.914951 | 0 | 0.001053 | 0.345061 | 28,992 | 993 | 79 | 29.196375 | 0.818517 | 0.105719 | 0 | 0.759894 | 0 | 0 | 0.097074 | 0.079364 | 0 | 0 | 0 | 0 | 0.129288 | 1 | 0.038259 | false | 0 | 0.005277 | 0 | 0.062005 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
849373917274a9549435871658c62eb4f5f0edb2 | 6,493 | py | Python | forum/tests/auth.py | kraft99/forum | 59b35bd102da3bdeb0d6bc104de77572158992b3 | [
"MIT"
] | 8 | 2015-12-25T06:33:20.000Z | 2021-01-04T22:37:56.000Z | forum/tests/auth.py | insin/forum | 59b35bd102da3bdeb0d6bc104de77572158992b3 | [
"MIT"
] | null | null | null | forum/tests/auth.py | insin/forum | 59b35bd102da3bdeb0d6bc104de77572158992b3 | [
"MIT"
] | 7 | 2015-07-18T17:52:52.000Z | 2020-04-09T10:46:47.000Z | from django.contrib.auth.models import User
from django.test import TestCase
from forum import auth
from forum.models import Post, Topic
class AuthTestCase(TestCase):
"""
Tests for the authorisation module.
"""
fixtures = ['testdata.json']
def setUp(self):
"""
Retrieves a user from each user group for convenience.
"""
self.admin = User.objects.get(pk=1)
self.moderator = User.objects.get(pk=2)
self.user = User.objects.get(pk=3)
def test_is_admin(self):
"""
Verifies the check for a user having Administrator privileges.
"""
self.assertTrue(auth.is_admin(self.admin))
self.assertFalse(auth.is_admin(self.moderator))
self.assertFalse(auth.is_admin(self.user))
def test_is_moderator(self):
"""
Verifies the check for a user having Moderator privileges.
"""
self.assertTrue(auth.is_moderator(self.admin))
self.assertTrue(auth.is_moderator(self.moderator))
self.assertFalse(auth.is_moderator(self.user))
def test_user_can_edit_post(self):
"""
Verifies the check for a given user being able to edit a given
Post.
Members of the User group may only edit their own Posts if they
are not in unlocked Topics.
"""
# Post by admin
post = Post.objects.get(pk=1)
topic = post.topic
self.assertTrue(auth.user_can_edit_post(self.admin, post))
self.assertTrue(auth.user_can_edit_post(self.moderator, post))
self.assertFalse(auth.user_can_edit_post(self.user, post))
self.assertTrue(auth.user_can_edit_post(self.admin, post, topic))
self.assertTrue(auth.user_can_edit_post(self.moderator, post, topic))
self.assertFalse(auth.user_can_edit_post(self.user, post, topic))
topic.locked = True
self.assertTrue(auth.user_can_edit_post(self.admin, post, topic))
self.assertTrue(auth.user_can_edit_post(self.moderator, post, topic))
self.assertFalse(auth.user_can_edit_post(self.user, post, topic))
# Post by moderator
post = Post.objects.get(pk=4)
topic = post.topic
self.assertTrue(auth.user_can_edit_post(self.admin, post))
self.assertTrue(auth.user_can_edit_post(self.moderator, post))
self.assertFalse(auth.user_can_edit_post(self.user, post))
self.assertTrue(auth.user_can_edit_post(self.admin, post, topic))
self.assertTrue(auth.user_can_edit_post(self.moderator, post, topic))
self.assertFalse(auth.user_can_edit_post(self.user, post, topic))
topic.locked = True
self.assertTrue(auth.user_can_edit_post(self.admin, post, topic))
self.assertTrue(auth.user_can_edit_post(self.moderator, post, topic))
self.assertFalse(auth.user_can_edit_post(self.user, post, topic))
# Post by user
post = Post.objects.get(pk=7)
topic = post.topic
self.assertTrue(auth.user_can_edit_post(self.admin, post))
self.assertTrue(auth.user_can_edit_post(self.moderator, post))
self.assertTrue(auth.user_can_edit_post(self.user, post))
self.assertTrue(auth.user_can_edit_post(self.admin, post, topic))
self.assertTrue(auth.user_can_edit_post(self.moderator, post, topic))
self.assertTrue(auth.user_can_edit_post(self.user, post, topic))
topic.locked = True
self.assertTrue(auth.user_can_edit_post(self.admin, post, topic))
self.assertTrue(auth.user_can_edit_post(self.moderator, post, topic))
self.assertFalse(auth.user_can_edit_post(self.user, post, topic))
def test_user_can_edit_topic(self):
"""
Verifies the check for a given user being able to edit a given
Topic.
Members of the User group may only edit their own Topics if they
are not locked.
"""
# Topic creeated by admin
topic = Topic.objects.get(pk=1)
self.assertTrue(auth.user_can_edit_topic(self.admin, topic))
self.assertTrue(auth.user_can_edit_topic(self.moderator, topic))
self.assertFalse(auth.user_can_edit_topic(self.user, topic))
topic.locked = True
self.assertTrue(auth.user_can_edit_topic(self.admin, topic))
self.assertTrue(auth.user_can_edit_topic(self.moderator, topic))
self.assertFalse(auth.user_can_edit_topic(self.user, topic))
# Topic created by moderator
topic = Topic.objects.get(pk=2)
self.assertTrue(auth.user_can_edit_topic(self.admin, topic))
self.assertTrue(auth.user_can_edit_topic(self.moderator, topic))
self.assertFalse(auth.user_can_edit_topic(self.user, topic))
topic.locked = True
self.assertTrue(auth.user_can_edit_topic(self.admin, topic))
self.assertTrue(auth.user_can_edit_topic(self.moderator, topic))
self.assertFalse(auth.user_can_edit_topic(self.user, topic))
# Topic created by user
topic = Topic.objects.get(pk=3)
self.assertTrue(auth.user_can_edit_topic(self.admin, topic))
self.assertTrue(auth.user_can_edit_topic(self.moderator, topic))
self.assertTrue(auth.user_can_edit_topic(self.user, topic))
topic.locked = True
self.assertTrue(auth.user_can_edit_topic(self.admin, topic))
self.assertTrue(auth.user_can_edit_topic(self.moderator, topic))
self.assertFalse(auth.user_can_edit_topic(self.user, topic))
def test_user_can_edit_user_profile(self):
"""
Verifies the check for a given user being able to edit another
given user's public ForumProfile.
Members of the User group may only edit their own ForumProfile.
"""
self.assertTrue(auth.user_can_edit_user_profile(self.admin, self.admin))
self.assertTrue(auth.user_can_edit_user_profile(self.moderator, self.admin))
self.assertFalse(auth.user_can_edit_user_profile(self.user, self.admin))
self.assertTrue(auth.user_can_edit_user_profile(self.admin, self.moderator))
self.assertTrue(auth.user_can_edit_user_profile(self.moderator, self.moderator))
self.assertFalse(auth.user_can_edit_user_profile(self.user, self.moderator))
self.assertTrue(auth.user_can_edit_user_profile(self.admin, self.user))
self.assertTrue(auth.user_can_edit_user_profile(self.moderator, self.user))
self.assertTrue(auth.user_can_edit_user_profile(self.user, self.user)) | 45.725352 | 88 | 0.694132 | 908 | 6,493 | 4.75 | 0.085903 | 0.092511 | 0.145374 | 0.187804 | 0.872247 | 0.814746 | 0.763506 | 0.763506 | 0.747739 | 0.743566 | 0 | 0.001739 | 0.202988 | 6,493 | 142 | 89 | 45.725352 | 0.831691 | 0.124288 | 0 | 0.566667 | 0 | 0 | 0.002379 | 0 | 0 | 0 | 0 | 0 | 0.666667 | 1 | 0.066667 | false | 0 | 0.044444 | 0 | 0.133333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
ca12c42cd5a356e32cfa9a3f423c5210c59a6d73 | 710 | py | Python | rsa-wiener-attack-master/Wiener.py | HocRiser01/CTF | 558b8effd66f258e30a2c98a273a4a8dd22e6dd8 | [
"MIT"
] | null | null | null | rsa-wiener-attack-master/Wiener.py | HocRiser01/CTF | 558b8effd66f258e30a2c98a273a4a8dd22e6dd8 | [
"MIT"
] | null | null | null | rsa-wiener-attack-master/Wiener.py | HocRiser01/CTF | 558b8effd66f258e30a2c98a273a4a8dd22e6dd8 | [
"MIT"
] | null | null | null | from Crypto.Util.number import *
from RSAwienerHacker import *
e = 46731919563265721307105180410302518676676135509737992912625092976849075262192092549323082367518264378630543338219025744820916471913696072050291990620486581719410354385121760761374229374847695148230596005409978383369740305816082770283909611956355972181848077519920922059268376958811713365106925235218265173085
n = 101991809777553253470276751399264740131157682329252673501792154507006158434432009141995367241962525705950046253400188884658262496534706438791515071885860897552736656899566915731297225817250639873643376310103992170646906557242832893914902053581087502512787303322747780420210884852166586717636559058152544979471
print(hack_RSA(e,n)) | 142 | 313 | 0.964789 | 17 | 710 | 40.235294 | 0.764706 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.886494 | 0.019718 | 710 | 5 | 314 | 142 | 0.096264 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.4 | 0 | 0.4 | 0.2 | 1 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 8 |
ca18c1e3d8fa65ef29453e94b2f053904ab517da | 30,644 | py | Python | tests/test_MarketObjects/test_Events/test_ordereventchain.py | AgalmicVentures/Buttonwood | f86098cd6d97b4767cbd3428e667b307255ba571 | [
"MIT"
] | 2 | 2020-01-04T05:28:20.000Z | 2021-07-21T21:19:43.000Z | tests/test_MarketObjects/test_Events/test_ordereventchain.py | AgalmicVentures/Buttonwood | f86098cd6d97b4767cbd3428e667b307255ba571 | [
"MIT"
] | 45 | 2019-07-04T15:13:34.000Z | 2020-06-30T19:41:45.000Z | tests/test_MarketObjects/test_Events/test_ordereventchain.py | AgalmicVentures/Buttonwood | f86098cd6d97b4767cbd3428e667b307255ba571 | [
"MIT"
] | 3 | 2020-04-18T16:49:14.000Z | 2021-07-21T21:19:52.000Z | """
This file is part of Buttonwood.
Buttonwood is a python software package created to help quickly create, (re)build, or
analyze markets, market structures, and market participants.
MIT License
Copyright (c) 2016-2019 Peter F. Nabicht
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import logging
import pytest
from buttonwood.MarketObjects import CancelReasons
from buttonwood.MarketObjects.Events.EventChains import Exposure
from buttonwood.MarketObjects.Events.EventChains import OrderEventChain
from buttonwood.MarketObjects.Events.OrderEventConstants import FAR, FAK, FOK
from buttonwood.MarketObjects.Events.OrderEvents import AcknowledgementReport
from buttonwood.MarketObjects.Events.OrderEvents import CancelCommand
from buttonwood.MarketObjects.Events.OrderEvents import CancelReplaceCommand
from buttonwood.MarketObjects.Events.OrderEvents import CancelReport
from buttonwood.MarketObjects.Events.OrderEvents import FullFillReport
from buttonwood.MarketObjects.Events.OrderEvents import NewOrderCommand
from buttonwood.MarketObjects.Events.OrderEvents import PartialFillReport
from buttonwood.MarketObjects.Endpoint import Endpoint
from buttonwood.MarketObjects.Market import Market
from buttonwood.MarketObjects.Price import Price
from buttonwood.MarketObjects.Price import PriceFactory
from buttonwood.MarketObjects.Product import Product
from buttonwood.MarketObjects.Side import BID_SIDE, ASK_SIDE
from buttonwood.utils.IDGenerators import MonotonicIntID
MARKET = Market(Product("MSFT", "Microsoft"), Endpoint("Nasdaq", "NSDQ"), PriceFactory("0.01"))
LOGGER = logging.getLogger()
def test_exposure():
e1 = Exposure(Price("1.1"), 2, 12345)
e2 = Exposure(Price("1.1"), 2, 12345)
assert e1 == e2
assert e1.equivalent_exposure(e2)
assert e1.equivalent_exposure(e1)
assert e2.equivalent_exposure(e1)
e3 = Exposure(Price("1.1"), 2, 6789)
assert e1 != e3
assert e1.price() == e2.price()
print(e1, e2)
assert e1.equivalent_exposure(e3)
assert e3.equivalent_exposure(e1)
e4 = Exposure(Price("1.1"), 3, 6789)
assert e1 != e4
assert e1.equivalent_exposure(e4) is False
e5 = Exposure(Price("1.2"), 2, 6789)
assert e1 != e5
assert e1.equivalent_exposure(e5) is False
def test_creation():
n = NewOrderCommand(121234, 1234235.123, 2342, "user_x", MARKET, BID_SIDE, FAR, Price("34.52"), 1000)
oec = OrderEventChain(n, LOGGER, MonotonicIntID())
assert oec.side().is_bid()
assert oec.market() == MARKET
assert oec.time_in_force() == FAR
# no ack yet
assert oec.current_exposure() is None
assert len(oec.open_exposure_requests()) == 1
assert oec.most_recent_requested_exposure() == Exposure(Price("34.52"), 1000, 121234)
# visible qty should be nothing still
assert oec.visible_qty() == 0
def test_acknowledgement():
n = NewOrderCommand(121234, 1234235.123, 2342, "user_x", MARKET, BID_SIDE, FAR, Price("34.52"), 1000)
oec = OrderEventChain(n, LOGGER, MonotonicIntID())
# no ack yet
assert oec.most_recent_event() == n
# check exposure
assert len(oec.open_exposure_requests()) == 1
assert oec.most_recent_requested_exposure() == oec.open_exposure_requests()[-1]
assert oec.current_exposure() is None
assert oec.most_recent_requested_exposure() == Exposure(Price("34.52"), 1000, 121234)
# visible qty should be nothing still
assert oec.visible_qty() == 0
# now ack it
ack = AcknowledgementReport(121235, 1234235.123, 2342, "user_x", MARKET, n, Price("34.52"), 1000, None)
oec.apply_acknowledgement_report(ack)
assert oec.most_recent_event() == ack
# check exposure
assert len(oec.open_exposure_requests()) == 0
assert oec.most_recent_requested_exposure() is None
assert oec.current_exposure() == Exposure(Price("34.52"), 1000, 121235)
# check visible qty
print(oec.visible_qty())
assert oec.visible_qty() == 1000
def test_new_iceberg_order_ack():
n = NewOrderCommand(121234, 1234235.123, 2342, "user_x", MARKET, BID_SIDE, FAR, Price("34.52"), 1000, 50)
oec = OrderEventChain(n, LOGGER, MonotonicIntID())
# no ack yet
assert oec.most_recent_event() == n
# check exposure
assert len(oec.open_exposure_requests()) == 1
assert oec.most_recent_requested_exposure() == oec.open_exposure_requests()[-1]
assert oec.current_exposure() is None
assert oec.most_recent_requested_exposure() == Exposure(Price("34.52"), 1000, 121234)
# visible qty should be nothing still
assert oec.visible_qty() == 0
# now ack it
ack = AcknowledgementReport(121235, 1234235.123, 2342, "user_x", MARKET, n, Price("34.52"), 1000, 50)
oec.apply_acknowledgement_report(ack)
assert oec.most_recent_event() == ack
# check exposure
assert len(oec.open_exposure_requests()) == 0
assert oec.most_recent_requested_exposure() is None
assert oec.current_exposure() == Exposure(Price("34.52"), 1000, 121235)
# check visible qty
assert oec.visible_qty() == 50
def test_close_exposure_cancel_closes_all():
id_gen = MonotonicIntID(seed=23043, increment=1)
n = NewOrderCommand(id_gen.id(), 1234235.123, 2342, "user_x", MARKET, BID_SIDE, FAR, Price("34.52"), 1000)
oec = OrderEventChain(n, LOGGER, MonotonicIntID())
# should have 1 open exposure
assert len(oec.open_exposure_requests()) == 1
assert oec.most_recent_requested_exposure() == Exposure(Price("34.52"), 1000, id_gen.last_id())
cr = CancelReplaceCommand(id_gen.id(), 1234235.863, 2342, "user_x", MARKET, BID_SIDE, Price("34.51"), 800)
oec.apply_cancel_replace_command(cr)
# now should have 2 open exposures
assert len(oec.open_exposure_requests()) == 2
assert oec.open_exposure_requests()[1] == Exposure(Price("34.51"), 800, id_gen.last_id())
assert oec.most_recent_requested_exposure() == Exposure(Price("34.51"), 800, id_gen.last_id())
cancel_command = CancelCommand(id_gen.id(), 1234274.663, 2342, "user_x", MARKET, CancelReasons.USER_CANCEL)
oec.apply_cancel_command(cancel_command)
# now should have 3 open exposures
assert len(oec.open_exposure_requests()) == 3
assert oec.open_exposure_requests()[2] == Exposure(None, 0, id_gen.last_id())
assert oec.most_recent_requested_exposure() == Exposure(None, 0, id_gen.last_id())
cancel_confirm = CancelReport(id_gen.id(), 1234278.663, 2342, "user_x", MARKET, cancel_command, CancelReasons.USER_CANCEL)
oec.apply_cancel_report(cancel_confirm)
# all exposures should be closed now
assert len(oec.open_exposure_requests()) == 0
assert oec.most_recent_requested_exposure() is None
assert oec.has_partial_fill() is False
def test_basic_partial_fill():
n = NewOrderCommand(121234, 1234235.123, 2342, "user_x", MARKET, BID_SIDE, FAR, Price("34.52"), 1000)
oec = OrderEventChain(n, LOGGER, MonotonicIntID())
# now ack it
ack = AcknowledgementReport(121235, 1234235.123, 2342, "user_x", MARKET, n, Price("34.52"), 1000, 1000)
oec.apply_acknowledgement_report(ack)
aggressor = NewOrderCommand(1111, 1234237.123, 22222, "user_x", MARKET, ASK_SIDE, FAR, Price("34.52"), 44)
# now resting partial fill
pf = PartialFillReport(121236, 1234237.123, 2342, "user_x", MARKET, aggressor, 44, Price("34.52"),
BID_SIDE, 99999, 1000-44)
oec.apply_partial_fill_report(pf)
assert oec.open_exposure_requests() == []
assert oec.current_exposure().price() == Price("34.52")
assert oec.current_exposure().qty() == 1000-44
assert oec.visible_qty() == 1000-44
assert oec.iceberg_peak_qty() == 1000 # should not have changed
assert oec.has_partial_fill()
def test_basic_partial_fill_replenish_visible():
n = NewOrderCommand(121234, 1234235.123, 2342, "user_x", MARKET, BID_SIDE, FAR, Price("34.52"), 100, 40)
oec = OrderEventChain(n, LOGGER, MonotonicIntID())
# now ack it
ack = AcknowledgementReport(121235, 1234235.123, 2342, "user_x", MARKET, n, Price("34.52"), 100, 40)
oec.apply_acknowledgement_report(ack)
aggressor = NewOrderCommand(1111, 1234237.123, 22222, "user_y", MARKET, ASK_SIDE, FAR, Price("34.52"), 40)
# now resting partial fill
pf = PartialFillReport(121236, 1234237.123, 2342, "user_x", MARKET, aggressor, 40, Price("34.52"),
BID_SIDE, 99999, 100-40)
oec.apply_partial_fill_report(pf)
assert oec.open_exposure_requests() == []
assert oec.current_exposure().price() == Price("34.52")
assert oec.current_exposure().qty() == 100-40
assert oec.visible_qty() == 40 # should have replenished
assert oec.iceberg_peak_qty() == 40 # should not have changed
assert oec.has_partial_fill()
# now test the partial fill wipes out 40 more, so visible is min
aggressor2 = NewOrderCommand(1114, 1234237.123, 33333, "user_y", MARKET, ASK_SIDE, FAR, Price("34.52"), 40)
# now resting partial fill
pf2 = PartialFillReport(121236, 1234237.123, 2342, "user_x", MARKET, aggressor2, 40, Price("34.52"),
BID_SIDE, 99999, 100-40-40) # subtract out the size of 2 40 lot fills now
oec.apply_partial_fill_report(pf2)
assert oec.open_exposure_requests() == []
assert oec.current_exposure().price() == Price("34.52")
assert oec.current_exposure().qty() == 100-40-40
assert oec.visible_qty() == 100-40-40 # should have replenished to min of 40 and 100-40-40
assert oec.iceberg_peak_qty() == 40 # should not have changed
assert oec.has_partial_fill()
def test_partial_fill_to_zero_closes_out_order():
# when a partialfill closses out to an order there should be a balking because it is a paritial fill so shouldn't happen, but should allow
n = NewOrderCommand(121234, 1234235.123, 2342, "user_x", MARKET, BID_SIDE, FAR, Price("34.52"), 100)
oec = OrderEventChain(n, LOGGER, MonotonicIntID())
# now ack it
ack = AcknowledgementReport(121235, 1234235.123, 2342, "user_x", MARKET, n, Price("34.52"), 100, 100)
oec.apply_acknowledgement_report(ack)
aggressor = NewOrderCommand(1111, 1234237.123, 22222, "user_y", MARKET, ASK_SIDE, FAR, Price("34.52"), 100)
# now resting partial fill
pf = PartialFillReport(1212344, 1234237.123, 2342, "user_x", MARKET, aggressor, 100, Price("34.52"),
BID_SIDE, 99999, 0)
oec.apply_partial_fill_report(pf)
assert oec.open_exposure_requests() == []
assert oec.is_open() is False
assert oec.visible_qty() == 0
assert oec.current_exposure() == Exposure(None, 0, 1212344)
def test_partial_fill_on_unacked_order():
# when an unacked order is filled the requested exposure gets impacted
n = NewOrderCommand(121234, 1234235.123, 2342, "user_x", MARKET, BID_SIDE, FAR, Price("34.52"), 100)
oec = OrderEventChain(n, LOGGER, MonotonicIntID())
assert oec.current_exposure() is None
assert oec.most_recent_requested_exposure().qty() == 100
assert oec.most_recent_requested_exposure().price() == Price("34.52")
# now resting partial fill
pf = PartialFillReport(1212344, 1234237.123, 2342, "user_x", MARKET, n, 10, Price("34.52"),
BID_SIDE, 99999, 90)
oec.apply_partial_fill_report(pf)
assert oec.current_exposure() is None
assert oec.most_recent_requested_exposure().qty() == 90
assert oec.most_recent_requested_exposure().price() == Price("34.52")
def test_partial_fill_on_multiple_unacked_requests():
n = NewOrderCommand(1, 1234235.123, 2342, "user_x", MARKET, BID_SIDE, FAR, Price("34.52"), 1000)
oec = OrderEventChain(n, LOGGER, MonotonicIntID())
# should have 1 open exposure
assert len(oec.open_exposure_requests()) == 1
assert oec.most_recent_requested_exposure() == Exposure(Price("34.52"), 1000, 1)
cr1 = CancelReplaceCommand(2, 1234235.863, 2342, "user_x", MARKET, BID_SIDE, Price("34.51"), 800)
oec.apply_cancel_replace_command(cr1)
# now should have 2 open exposures
assert len(oec.open_exposure_requests()) == 2
assert oec.open_exposure_requests()[0] == Exposure(Price("34.52"), 1000, 1)
assert oec.open_exposure_requests()[1] == Exposure(Price("34.51"), 800, 2)
cr2 = CancelReplaceCommand(3, 1234236.842, 2342, "user_x", MARKET, BID_SIDE, Price("34.55"), 800)
oec.apply_cancel_replace_command(cr2)
# now should have 2 open exposures
assert len(oec.open_exposure_requests()) == 3
assert oec.open_exposure_requests()[0] == Exposure(Price("34.52"), 1000, 1)
assert oec.open_exposure_requests()[1] == Exposure(Price("34.51"), 800, 2)
assert oec.open_exposure_requests()[2] == Exposure(Price("34.55"), 800, 3)
cr3 = CancelReplaceCommand(4, 1234236.842, 2342, "user_x", MARKET, BID_SIDE, Price("34.56"), 800)
oec.apply_cancel_replace_command(cr3)
# now should have 2 open exposures
assert len(oec.open_exposure_requests()) == 4
assert oec.open_exposure_requests()[0] == Exposure(Price("34.52"), 1000, 1)
assert oec.open_exposure_requests()[1] == Exposure(Price("34.51"), 800, 2)
assert oec.open_exposure_requests()[2] == Exposure(Price("34.55"), 800, 3)
assert oec.open_exposure_requests()[3] == Exposure(Price("34.56"), 800, 4)
# a partial fill should should only impact the one the partial fill is for
# partially filling orderid 3 (cr2)
pf1 = PartialFillReport(5, 1234237.123, 2342, "user_x", MARKET, cr2, 10, Price("34.55"),
BID_SIDE, 999, 790)
oec.apply_partial_fill_report(pf1)
assert len(oec.open_exposure_requests()) == 4
assert oec.open_exposure_requests()[0] == Exposure(Price("34.52"), 1000, 1)
assert oec.open_exposure_requests()[1] == Exposure(Price("34.51"), 800, 2)
assert oec.open_exposure_requests()[2] == Exposure(Price("34.55"), 790, 3)
assert oec.open_exposure_requests()[3] == Exposure(Price("34.56"), 800, 4)
# and again
pf2 = PartialFillReport(6, 1234237.123, 2342, "user_x", MARKET, cr2, 10, Price("34.55"),
BID_SIDE, 1000, 780)
oec.apply_partial_fill_report(pf2)
assert len(oec.open_exposure_requests()) == 4
assert oec.open_exposure_requests()[0] == Exposure(Price("34.52"), 1000, 1)
assert oec.open_exposure_requests()[1] == Exposure(Price("34.51"), 800, 2)
assert oec.open_exposure_requests()[2] == Exposure(Price("34.55"), 780, 3)
assert oec.open_exposure_requests()[3] == Exposure(Price("34.56"), 800, 4)
# and now I can fill order id 4 (cr 3)
pf3 = PartialFillReport(6, 1234237.123, 2342, "user_x", MARKET, cr3, 50, Price("34.56"),
BID_SIDE, 1001, 750)
oec.apply_partial_fill_report(pf3)
assert len(oec.open_exposure_requests()) == 4
assert oec.open_exposure_requests()[0] == Exposure(Price("34.52"), 1000, 1)
assert oec.open_exposure_requests()[1] == Exposure(Price("34.51"), 800, 2)
assert oec.open_exposure_requests()[2] == Exposure(Price("34.55"), 780, 3)
assert oec.open_exposure_requests()[3] == Exposure(Price("34.56"), 750, 4)
# now start acking them
ack1 = AcknowledgementReport(10, 1234235.123, 2342, "user_x", MARKET, n, Price("34.52"), 1000, None)
oec.apply_acknowledgement_report(ack1)
assert len(oec.open_exposure_requests()) == 3
assert oec.open_exposure_requests()[0] == Exposure(Price("34.51"), 800, 2)
assert oec.open_exposure_requests()[1] == Exposure(Price("34.55"), 780, 3)
assert oec.open_exposure_requests()[2] == Exposure(Price("34.56"), 750, 4)
assert oec.current_exposure() == Exposure(Price("34.52"), 1000, 10)
ack2 = AcknowledgementReport(11, 1234235.123, 2342, "user_x", MARKET, cr1, Price("34.51"), 800, None)
oec.apply_acknowledgement_report(ack2)
assert len(oec.open_exposure_requests()) == 2
assert oec.open_exposure_requests()[0] == Exposure(Price("34.55"), 780, 3)
assert oec.open_exposure_requests()[1] == Exposure(Price("34.56"), 750, 4)
assert oec.current_exposure() == Exposure(Price("34.51"), 800, 11)
ack3 = AcknowledgementReport(12, 1234235.123, 2342, "user_x", MARKET, cr2, Price("34.55"), 780, None)
oec.apply_acknowledgement_report(ack3)
assert len(oec.open_exposure_requests()) == 1
print(oec.open_exposure_requests()[0])
assert oec.open_exposure_requests()[0] == Exposure(Price("34.56"), 750, 4)
assert oec.current_exposure() == Exposure(Price("34.55"), 780, 12)
ack4 = AcknowledgementReport(13, 1234235.123, 2342, "user_x", MARKET, cr3, Price("34.56"), 750, None)
oec.apply_acknowledgement_report(ack4)
assert len(oec.open_exposure_requests()) == 0
assert oec.current_exposure() == Exposure(Price("34.56"), 750, 13)
def test_basic_full_fill_on_acked_order():
n = NewOrderCommand(1, 1234235.123, 2342, "user_x", MARKET, BID_SIDE, FAR, Price("34.52"), 1000)
oec = OrderEventChain(n, LOGGER, MonotonicIntID())
# now ack it
ack = AcknowledgementReport(2, 1234235.123, 2342, "user_x", MARKET, n, Price("34.52"), 1000, 1000)
oec.apply_acknowledgement_report(ack)
assert oec.visible_qty() == 1000
assert oec.current_exposure().qty() == 1000
assert oec.current_exposure().price() == Price("34.52")
assert oec.is_open()
aggressor = NewOrderCommand(1111, 1234237.123, 22222, "user_y", MARKET, ASK_SIDE, FAR, Price("34.52"), 1000)
full_fill = FullFillReport(3, 1234237.123, 2342, "user_x", MARKET, aggressor, 1000, Price('34.52'), BID_SIDE, 12345)
oec.apply_full_fill_report(full_fill)
assert oec.visible_qty() == 0
assert oec.current_exposure().price() is None
assert oec.current_exposure().qty() == 0
assert oec.current_exposure().causing_event_id() == 3
assert oec.is_open() is False
def test_basic_full_fill_on_unacked_order():
n = NewOrderCommand(1, 1234235.123, 2342, "user_x", MARKET, BID_SIDE, FAR, Price("34.52"), 1000)
oec = OrderEventChain(n, LOGGER, MonotonicIntID())
assert oec.visible_qty() == 0
assert oec.current_exposure() is None
assert oec.most_recent_requested_exposure().price() == Price("34.52")
assert oec.most_recent_requested_exposure().qty() == 1000
assert oec.is_open()
full_fill = FullFillReport(3, 1234237.123, 2342, "user_x", MARKET, n, 1000, Price('34.52'), BID_SIDE, 12345)
oec.apply_full_fill_report(full_fill)
assert oec.visible_qty() == 0
assert oec.current_exposure().price() is None
assert oec.current_exposure().qty() == 0
assert oec.current_exposure().causing_event_id() == 3
assert len(oec.open_exposure_requests()) == 0
assert oec.is_open() is False
def test_full_fill_on_acked_order_with_unacked_cr_in_flight():
n = NewOrderCommand(1, 1234235.123, 2342, "user_x", MARKET, BID_SIDE, FAR, Price("34.52"), 1000)
oec = OrderEventChain(n, LOGGER, MonotonicIntID())
# now ack it
ack = AcknowledgementReport(2, 1234235.123, 2342, "user_x", MARKET, n, Price("34.52"), 1000, 1000)
oec.apply_acknowledgement_report(ack)
assert oec.visible_qty() == 1000
assert oec.current_exposure().qty() == 1000
assert oec.current_exposure().price() == Price("34.52")
assert len(oec.open_exposure_requests()) == 0
assert oec.is_open()
cr = CancelReplaceCommand(2, 1234236.842, 2342, "user_x", MARKET, BID_SIDE, Price("34.56"), 800)
oec.apply_cancel_replace_command(cr)
# now should have 2 open exposures
assert oec.visible_qty() == 1000
assert oec.current_exposure().qty() == 1000
assert oec.current_exposure().price() == Price("34.52")
assert oec.is_open()
assert len(oec.open_exposure_requests()) == 1
assert oec.most_recent_requested_exposure() == Exposure(Price("34.56"), 800, 2)
aggressor = NewOrderCommand(1111, 1234237.123, 22222, "user_y", MARKET, ASK_SIDE, FAR, Price("34.52"), 1000)
full_fill = FullFillReport(3, 1234237.123, 2342, "user_x", MARKET, aggressor, 1000, Price('34.52'), BID_SIDE, 12345)
oec.apply_full_fill_report(full_fill)
assert oec.visible_qty() == 0
assert oec.current_exposure().price() is None
assert oec.current_exposure().qty() == 0
assert oec.current_exposure().causing_event_id() == 3
assert len(oec.open_exposure_requests()) == 0
assert oec.is_open() is False
def test_full_fill_on_unacked_cr_with_acked_new_order():
n = NewOrderCommand(1, 1234235.123, 2342, "user_x", MARKET, BID_SIDE, FAR, Price("34.52"), 1000)
oec = OrderEventChain(n, LOGGER, MonotonicIntID())
# now ack it
ack = AcknowledgementReport(2, 1234235.123, 2342, "user_x", MARKET, n, Price("34.52"), 1000, 1000)
oec.apply_acknowledgement_report(ack)
assert oec.visible_qty() == 1000
assert oec.current_exposure().qty() == 1000
assert oec.current_exposure().price() == Price("34.52")
assert len(oec.open_exposure_requests()) == 0
assert oec.is_open()
cr = CancelReplaceCommand(3, 1234236.842, 2342, "user_x", MARKET, BID_SIDE, Price("34.56"), 800)
oec.apply_cancel_replace_command(cr)
# now should have 2 open exposures
assert oec.visible_qty() == 1000
assert oec.current_exposure().qty() == 1000
assert oec.current_exposure().price() == Price("34.52")
assert oec.is_open()
assert len(oec.open_exposure_requests()) == 1
assert oec.most_recent_requested_exposure() == Exposure(Price("34.56"), 800, 3)
full_fill = FullFillReport(4, 1234237.123, 2342, "user_x", MARKET, cr, 800, Price("34.56"), BID_SIDE, 12345)
oec.apply_full_fill_report(full_fill)
assert oec.visible_qty() == 0
assert oec.current_exposure().price() is None
assert oec.current_exposure().qty() == 0
assert oec.current_exposure().causing_event_id() == 4
assert len(oec.open_exposure_requests()) == 0
assert oec.is_open() is False
def test_full_fill_with_not_enough_size_on_acked_new_order():
# should balk but shouldn't keep it from working
n = NewOrderCommand(1, 1234235.123, 2342, "user_x", MARKET, BID_SIDE, FAR, Price("34.52"), 1000)
oec = OrderEventChain(n, LOGGER, MonotonicIntID())
# now ack it
ack = AcknowledgementReport(2, 1234235.123, 2342, "user_x", MARKET, n, Price("34.52"), 1000, 1000)
oec.apply_acknowledgement_report(ack)
assert oec.visible_qty() == 1000
assert oec.current_exposure().qty() == 1000
assert oec.current_exposure().price() == Price("34.52")
assert oec.is_open()
aggressor = NewOrderCommand(1111, 1234237.123, 22222, "user_y", MARKET, ASK_SIDE, FAR, Price("34.52"), 1000)
full_fill = FullFillReport(3, 1234237.123, 2342, "user_x", MARKET, aggressor, 17, Price('34.52'), BID_SIDE, 12345)
oec.apply_full_fill_report(full_fill)
assert oec.visible_qty() == 0
assert oec.current_exposure().price() is None
assert oec.current_exposure().qty() == 0
assert oec.current_exposure().causing_event_id() == 3
assert oec.is_open() is False
def test_full_fill_with_too_much_size_on_acked_new_order():
# should balk but shouldn't keep it from working
n = NewOrderCommand(1, 1234235.123, 2342, "user_x", MARKET, BID_SIDE, FAR, Price("34.52"), 1000)
oec = OrderEventChain(n, LOGGER, MonotonicIntID())
# now ack it
ack = AcknowledgementReport(2, 1234235.123, 2342, "user_x", MARKET, n, Price("34.52"), 1000, 1000)
oec.apply_acknowledgement_report(ack)
assert oec.visible_qty() == 1000
assert oec.current_exposure().qty() == 1000
assert oec.current_exposure().price() == Price("34.52")
assert oec.is_open()
aggressor = NewOrderCommand(1111, 1234237.123, 22222, "user_y", MARKET, ASK_SIDE, FAR, Price("34.52"), 1000)
full_fill = FullFillReport(3, 1234237.123, 2342, "user_x", MARKET, aggressor, 3400, Price('34.52'), BID_SIDE, 12345)
oec.apply_full_fill_report(full_fill)
assert oec.visible_qty() == 0
assert oec.current_exposure().price() is None
assert oec.current_exposure().qty() == 0
assert oec.current_exposure().causing_event_id() == 3
assert oec.is_open() is False
def test_full_fill_with_not_enough_size_on_unacked_new_order():
# should balk but shouldn't keep it from working
n = NewOrderCommand(1, 1234235.123, 2342, "user_x", MARKET, BID_SIDE, FAR, Price("34.52"), 1000)
oec = OrderEventChain(n, LOGGER, MonotonicIntID())
assert oec.visible_qty() == 0
assert oec.current_exposure() is None
assert oec.most_recent_requested_exposure().price() == Price("34.52")
assert oec.most_recent_requested_exposure().qty() == 1000
assert oec.is_open()
full_fill = FullFillReport(3, 1234237.123, 2342, "user_x", MARKET, n, 17, Price('34.52'), BID_SIDE, 12345)
oec.apply_full_fill_report(full_fill)
assert oec.visible_qty() == 0
assert oec.current_exposure().price() is None
assert oec.current_exposure().qty() == 0
assert oec.current_exposure().causing_event_id() == 3
assert oec.is_open() is False
def test_full_fill_with_too_much_size_on_unacked_new_order():
# should balk but shouldn't keep it from working
n = NewOrderCommand(1, 1234235.123, 2342, "user_x", MARKET, BID_SIDE, FAR, Price("34.52"), 1000)
oec = OrderEventChain(n, LOGGER, MonotonicIntID())
assert oec.visible_qty() == 0
assert oec.current_exposure() is None
assert oec.most_recent_requested_exposure().price() == Price("34.52")
assert oec.most_recent_requested_exposure().qty() == 1000
assert oec.is_open()
full_fill = FullFillReport(3, 1234237.123, 2342, "user_x", MARKET, n, 17, Price('34.52'), BID_SIDE, 12345)
oec.apply_full_fill_report(full_fill)
assert oec.visible_qty() == 0
assert oec.current_exposure().price() is None
assert oec.current_exposure().qty() == 0
assert oec.current_exposure().causing_event_id() == 3
assert oec.is_open() is False
def test_creation_without_neworder():
with pytest.raises(AssertionError):
cr = CancelReplaceCommand(121234, 1234235.123, 2342, "user_x", MARKET, BID_SIDE, Price("43.01"), 234)
OrderEventChain(cr, LOGGER, MonotonicIntID())
def test_cancel_replace_not_allowed_on_fak_or_fok():
# FAK
n = NewOrderCommand(121234, 1234235.123, 2342, "user_x", MARKET, BID_SIDE, FAK, Price("34.52"), 1000)
oec = OrderEventChain(n, LOGGER, MonotonicIntID())
cr = CancelReplaceCommand(121235, 1234235.324, 2342, "user_x", MARKET, BID_SIDE, Price("43.01"), 234)
with pytest.raises(AssertionError):
oec.apply_cancel_replace_command(cr)
# FOK
n = NewOrderCommand(121234, 1234235.123, 2342, "user_x", MARKET, BID_SIDE, FOK, Price("34.52"), 1000)
oec = OrderEventChain(n, LOGGER, MonotonicIntID())
cr = CancelReplaceCommand(121235, 1234235.324, 2342, "user_x", MARKET, BID_SIDE, Price("43.01"), 234)
with pytest.raises(AssertionError):
oec.apply_cancel_replace_command(cr)
def test_subchain_str():
# pretty basic, just testing that it doesn't break
n = NewOrderCommand(121234, 1234235.123, 2342, "user_x", MARKET, BID_SIDE, FAR, Price("34.52"), 1000)
oec = OrderEventChain(n, LOGGER, MonotonicIntID())
# now ack it
ack = AcknowledgementReport(121235, 1234235.123, 2342, "user_x", MARKET, n, Price("34.52"), 1000, None)
oec.apply_acknowledgement_report(ack)
assert oec.most_recent_event() == ack
# now check I can get a __str__ of the subchain no problem
str(oec.most_recent_subchain())
def test_subchain_to_json():
# pretty basic, just testing that it doesn't break
n = NewOrderCommand(121234, 1234235.123, 2342, "user_x", MARKET, BID_SIDE, FAR, Price("34.52"), 1000)
oec = OrderEventChain(n, LOGGER, MonotonicIntID())
# now ack it
ack = AcknowledgementReport(121235, 1234235.123, 2342, "user_x", MARKET, n, Price("34.52"), 1000, None)
oec.apply_acknowledgement_report(ack)
assert oec.most_recent_event() == ack
# now check I can get a to_json of the subchain no problem
oec.most_recent_subchain().to_json()
def test_subchain_getters():
# pretty basic, just testing that it doesn't break
n = NewOrderCommand(121234, 1234235.123, 2342, "user_x", MARKET, BID_SIDE, FAR, Price("34.52"), 1000)
oec = OrderEventChain(n, LOGGER, MonotonicIntID())
# now ack it
ack = AcknowledgementReport(121235, 1234235.123, 2342, "user_x", MARKET, n, Price("34.52"), 1000, None)
oec.apply_acknowledgement_report(ack)
# and partial fill
aggressor = NewOrderCommand(1111, 1234237.123, 22222, "user_y", MARKET, ASK_SIDE, FAR, Price("34.52"), 44)
# now resting partial fill
pf = PartialFillReport(121236, 1234237.123, 2342, "user_x", MARKET, aggressor, 44, Price("34.52"),
BID_SIDE, 99999, 1000 - 44)
oec.apply_partial_fill_report(pf)
subchain = oec.most_recent_subchain()
assert subchain.open_event() == n
assert subchain.first_execution_report() == ack
assert subchain.fills() == [pf]
assert subchain.last_event() == pf
def test_subchain_getters_partial_fill_before_ack():
# pretty basic, just testing that it doesn't break
n = NewOrderCommand(121234, 1234235.123, 2342, "user_x", MARKET, BID_SIDE, FAR, Price("34.52"), 1000)
oec = OrderEventChain(n, LOGGER, MonotonicIntID())
# now aggressive partial fill
pf = PartialFillReport(121236, 1234237.123, 2342, "user_x", MARKET, n, 44, Price("34.52"),
BID_SIDE, 99999, 1000 - 44)
oec.apply_partial_fill_report(pf)
# now ack it
ack = AcknowledgementReport(121235, 1234235.123, 2342, "user_x", MARKET, n, Price("34.52"), 1000-44, None)
oec.apply_acknowledgement_report(ack)
subchain = oec.most_recent_subchain()
assert subchain.open_event() == n
assert subchain.first_execution_report() == pf
assert subchain.fills() == [pf]
assert subchain.last_event() == ack
| 49.505654 | 142 | 0.704151 | 4,330 | 30,644 | 4.796998 | 0.081986 | 0.077127 | 0.038997 | 0.050551 | 0.824322 | 0.793029 | 0.746088 | 0.736219 | 0.725001 | 0.714458 | 0 | 0.113641 | 0.164371 | 30,644 | 618 | 143 | 49.585761 | 0.697505 | 0.10808 | 0 | 0.623318 | 0 | 0 | 0.043515 | 0 | 0 | 0 | 0 | 0 | 0.511211 | 1 | 0.053812 | false | 0 | 0.044843 | 0 | 0.098655 | 0.006726 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ca95ee390904620f6295db82ec40a7c39203b272 | 1,998 | py | Python | src/cnn_mod_cla.py | ndwuhuangwei/py-radio-autoencoder | 842cd1f14a17ee0798766dffcf132950a9e745bd | [
"CC0-1.0"
] | null | null | null | src/cnn_mod_cla.py | ndwuhuangwei/py-radio-autoencoder | 842cd1f14a17ee0798766dffcf132950a9e745bd | [
"CC0-1.0"
] | null | null | null | src/cnn_mod_cla.py | ndwuhuangwei/py-radio-autoencoder | 842cd1f14a17ee0798766dffcf132950a9e745bd | [
"CC0-1.0"
] | 1 | 2021-09-06T14:05:53.000Z | 2021-09-06T14:05:53.000Z | import tensorflow as tf
from tensorflow.keras import layers, Model, Sequential
def cnn_mod_cla_2d(in_shape, class_num):
'''2是(mod, snr)'''
input_tensor = layers.Input(shape=in_shape, dtype='float32')
x = layers.Conv2D(filters=128, kernel_size=8, padding='valid')(input_tensor)
x = layers.ReLU()(x)
x = layers.MaxPooling2D(pool_size=2, strides=2, padding='valid')(x)
x = layers.Conv2D(filters=64, kernel_size=16, padding='valid')(x)
x = layers.ReLU()(x)
x = layers.MaxPooling2D(pool_size=2, strides=2, padding='valid')(x)
x = layers.Flatten()(x)
x = layers.Dense(units=128)(x)
x = layers.ReLU()(x)
x = layers.Dense(units=64)(x)
x = layers.ReLU()(x)
x = layers.Dense(units=32)(x)
x = layers.ReLU()(x)
x = layers.Dense(units=class_num)(x)
output = layers.Softmax()(x)
# 定义完 layers 后还要调用 Model 才算真正建立了模型
model = Model(inputs=input_tensor, outputs=output)
return model
def cnn_mod_cla_1d(in_shape, class_num):
'''2是(mod, snr)'''
input_tensor = layers.Input(shape=in_shape, dtype='float32')
x = layers.Conv1D(filters=128, kernel_size=8, padding='valid')(input_tensor)
x = layers.ReLU()(x)
x = layers.MaxPooling1D(pool_size=2, strides=2, padding='valid')(x)
x = layers.Conv1D(filters=64, kernel_size=16, padding='valid')(x)
x = layers.ReLU()(x)
x = layers.MaxPooling1D(pool_size=2, strides=2, padding='valid')(x)
x = layers.Flatten()(x)
x = layers.Dense(units=128)(x)
x = layers.ReLU()(x)
x = layers.Dense(units=64)(x)
x = layers.ReLU()(x)
x = layers.Dense(units=32)(x)
x = layers.ReLU()(x)
x = layers.Dense(units=class_num)(x)
output = layers.Softmax()(x)
# 定义完 layers 后还要调用 Model 才算真正建立了模型
model = Model(inputs=input_tensor, outputs=output)
return model
if __name__ == '__main__':
model = cnn_mod_cla_1d(in_shape=(128, 2), class_num=11)
model.summary()
| 27 | 80 | 0.62963 | 294 | 1,998 | 4.139456 | 0.204082 | 0.161052 | 0.157765 | 0.098603 | 0.878389 | 0.878389 | 0.85456 | 0.85456 | 0.85456 | 0.85456 | 0 | 0.038608 | 0.209209 | 1,998 | 73 | 81 | 27.369863 | 0.731646 | 0.046046 | 0 | 0.744186 | 0 | 0 | 0.032735 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.046512 | false | 0 | 0.046512 | 0 | 0.139535 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.