hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c73f0ff0ef952ff0778e183e56d612b10b44afe9
| 209
|
py
|
Python
|
summarus/models/__init__.py
|
IlyaGusev/summarus
|
84f5fa7efe1dd4baa3e131af686ec93deb36626b
|
[
"Apache-2.0"
] | 139
|
2019-03-20T18:43:40.000Z
|
2022-03-29T19:45:10.000Z
|
summarus/models/__init__.py
|
IlyaGusev/summarus
|
84f5fa7efe1dd4baa3e131af686ec93deb36626b
|
[
"Apache-2.0"
] | 8
|
2019-04-26T06:40:41.000Z
|
2021-11-11T11:38:17.000Z
|
summarus/models/__init__.py
|
IlyaGusev/summarus
|
84f5fa7efe1dd4baa3e131af686ec93deb36626b
|
[
"Apache-2.0"
] | 22
|
2019-04-29T16:46:48.000Z
|
2022-01-25T13:26:53.000Z
|
from summarus.models.copynet import CustomCopyNetSeq2Seq
from summarus.models.seq2seq import Seq2Seq
from summarus.models.pgn import PointerGeneratorNetwork
from summarus.models.summarunner import SummaRuNNer
| 41.8
| 56
| 0.885167
| 24
| 209
| 7.708333
| 0.416667
| 0.259459
| 0.389189
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015544
| 0.076555
| 209
| 4
| 57
| 52.25
| 0.943005
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c74cff030f4eb6d9d72538cc023431e1ecec5fa4
| 14,134
|
py
|
Python
|
api/models-old.py
|
hackoregon/orestar-sync
|
95a400d40d7cbe93f60cc13fb017378972b6dd74
|
[
"MIT"
] | 2
|
2019-06-30T21:05:15.000Z
|
2019-07-02T03:25:05.000Z
|
api/models-old.py
|
radmosley/OreStar-API
|
90f8d0eb0f57b0e3e1843094c3c52dd1f92e6384
|
[
"MIT"
] | 10
|
2019-08-20T03:38:55.000Z
|
2021-06-10T21:52:34.000Z
|
api/models-old.py
|
hackoregon/orestar-sync
|
95a400d40d7cbe93f60cc13fb017378972b6dd74
|
[
"MIT"
] | null | null | null |
# This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Make sure each ForeignKey has `on_delete` set to the desired behavior.
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
from django.db import models
class Ballots(models.Model):
name = models.CharField(max_length=255, blank=True, null=True)
district = models.CharField(max_length=64, blank=True, null=True)
party = models.CharField(max_length=4, blank=True, null=True)
race = models.CharField(max_length=128, blank=True, null=True)
type = models.CharField(max_length=2, blank=True, null=True)
won = models.IntegerField()
writein = models.IntegerField()
year = models.CharField(max_length=4, blank=True, null=True)
votes = models.TextField(blank=True, null=True) # This field type is a guess.
class Meta:
managed = False
db_table = 'ballots'
class CommitteeHistory(models.Model):
committee_id = models.IntegerField(primary_key=True)
committee_name = models.CharField(max_length=255, blank=True, null=True)
committee_description = models.CharField(max_length=1024, blank=True, null=True)
effective = models.DateField(blank=True, null=True)
expiration = models.DateField(blank=True, null=True)
filing_type = models.CharField(max_length=32, blank=True, null=True)
class Meta:
managed = False
db_table = 'committee_history'
class CommitteesList(models.Model):
id = models.IntegerField(primary_key=True)
filer_name = models.CharField(max_length=255, blank=True, null=True)
filer_description = models.CharField(max_length=255, blank=True, null=True)
class Meta:
managed = False
db_table = 'committees_list'
class Donor(models.Model):
donor_id = models.IntegerField(primary_key=True)
donor_name = models.CharField(max_length=255, blank=True, null=True)
donor_address = models.CharField(max_length=255, blank=True, null=True)
donor_type = models.CharField(max_length=32, blank=True, null=True)
donor_cats = models.TextField(blank=True, null=True) # This field type is a guess.
class Meta:
managed = False
db_table = 'donor'
class ElectionActivity(models.Model):
election = models.CharField(max_length=64, blank=True, null=True)
committee_id = models.IntegerField(primary_key=True)
active_date = models.DateField(blank=True, null=True)
status = models.CharField(max_length=8, blank=True, null=True)
active_reason = models.CharField(max_length=255, blank=True, null=True)
class Meta:
managed = False
db_table = 'election_activity'
class Payee(models.Model):
payee_id = models.IntegerField(primary_key=True)
payee_name = models.CharField(max_length=255, blank=True, null=True)
class Meta:
managed = False
db_table = 'payee'
class StatementOfOrg(models.Model):
committee_id = models.IntegerField(primary_key=True)
committee_name = models.CharField(max_length=255, blank=True, null=True)
candidate_address = models.CharField(max_length=255, blank=True, null=True)
committee_acronym = models.CharField(max_length=32, blank=True, null=True)
committee_address = models.CharField(max_length=255, blank=True, null=True)
committee_campaign_phone = models.CharField(max_length=32, blank=True, null=True)
committee_filing_effective_from = models.CharField(max_length=255, blank=True, null=True)
committee_filing_type = models.CharField(max_length=10, blank=True, null=True)
committee_pac_type = models.CharField(max_length=32, blank=True, null=True)
election_office = models.CharField(max_length=255, blank=True, null=True)
email_address = models.CharField(max_length=255, blank=True, null=True)
employer = models.CharField(max_length=255, blank=True, null=True)
fax = models.CharField(max_length=32, blank=True, null=True)
home_phone = models.CharField(max_length=32, blank=True, null=True)
mailing_address = models.CharField(max_length=255, blank=True, null=True)
name = models.CharField(max_length=255, blank=True, null=True)
occupation = models.CharField(max_length=255, blank=True, null=True)
party_affiliation = models.CharField(max_length=16, blank=True, null=True)
treasurer_email_address = models.CharField(max_length=255, blank=True, null=True)
treasurer_fax = models.CharField(max_length=32, blank=True, null=True)
treasurer_home_phone = models.CharField(max_length=32, blank=True, null=True)
treasurer_mailing_address = models.CharField(max_length=255, blank=True, null=True)
treasurer_name = models.CharField(max_length=255, blank=True, null=True)
treasurer_work_phone = models.CharField(max_length=32, blank=True, null=True)
work_phone = models.CharField(max_length=32, blank=True, null=True)
class Meta:
managed = False
db_table = 'statement_of_org'
class TransactionDetails(models.Model):
transaction_id = models.IntegerField(primary_key=True)
payee_id = models.IntegerField(blank=True, null=True)
donor_id = models.IntegerField(blank=True, null=True)
address = models.CharField(max_length=255, blank=True, null=True)
address_book_type = models.CharField(max_length=32, blank=True, null=True)
agent = models.CharField(max_length=64, blank=True, null=True)
aggregate = models.DecimalField(max_digits=15, decimal_places=2, blank=True, null=True)
amount = models.DecimalField(max_digits=15, decimal_places=2, blank=True, null=True)
associations = models.CharField(max_length=2048, blank=True, null=True)
description = models.CharField(max_length=255, blank=True, null=True)
due_date = models.DateTimeField(blank=True, null=True)
employer_name = models.CharField(max_length=255, blank=True, null=True)
filed_date = models.DateTimeField(blank=True, null=True)
name = models.CharField(max_length=255, blank=True, null=True)
occupation = models.CharField(max_length=255, blank=True, null=True)
occupation_letter_date = models.DateField(blank=True, null=True)
payer_of_personal_expenditure = models.CharField(max_length=255, blank=True, null=True)
payment_method = models.CharField(max_length=32, blank=True, null=True)
process_status = models.CharField(max_length=32, blank=True, null=True)
purpose = models.CharField(max_length=512, blank=True, null=True)
repayment_schedule = models.CharField(max_length=128, blank=True, null=True)
transaction_date = models.DateField(blank=True, null=True)
transaction_sub_type = models.CharField(max_length=255, blank=True, null=True)
transaction_type = models.CharField(max_length=32, blank=True, null=True)
@property
def total_amount(self):
if self.amount.is_nan():
return "not available"
return self.amount
@property
def total_aggregate(self):
if self.aggregate.is_nan():
return "not available"
return self.aggregate
class Meta:
managed = False
db_table = 'transaction_details'
class Transactions(models.Model):
transaction_id = models.IntegerField(primary_key=True)
committee_id = models.IntegerField(blank=True, null=True)
transaction_date = models.DateField(blank=True, null=True)
status = models.CharField(max_length=32, blank=True, null=True)
filer_committee = models.CharField(max_length=255, blank=True, null=True)
contributor_payee = models.CharField(max_length=255, blank=True, null=True)
transaction_subtype = models.CharField(max_length=255, blank=True, null=True)
amount = models.CharField(max_length=100)
objects = CopyManager()
@property
def total_amount(self):
if self.amount.is_nan():
return "not available"
return self.amount
class Meta:
managed = False
db_table = 'transactions'
class ContributorBreakdown(models.Model):
committee_id = models.IntegerField(primary_key=True)
sum = models.DecimalField(max_digits=15, decimal_places=2, blank=True, null=True)
filer_name = models.CharField(max_length=255, blank=True, null=True)
donor_category = models.CharField(max_length=255, blank=True, null=True)
ratio = models.DecimalField(max_digits=15, decimal_places=2, blank=True, null=True)
election_cycle = models.CharField(max_length=255, blank=True, null=True)
class Meta:
managed = False
db_table = 'contributor_breakdown'
class TotalContributionsMonthly(models.Model):
committee_id = models.IntegerField(primary_key=True)
sum = models.DecimalField(max_digits=15, decimal_places=2, blank=True, null=True)
filer_name = models.CharField(max_length=255, blank=True, null=True)
election_cycle = models.CharField(max_length=255, blank=True, null=True)
month = models.DecimalField(max_digits=15, decimal_places=2, blank=True, null=True)
class Meta:
managed = False
db_table = 'total_contributions_raw_month'
class TotalContributionsYearly(models.Model):
committee_id = models.IntegerField(primary_key=True)
sum = models.DecimalField(max_digits=15, decimal_places=2, blank=True, null=True)
filer_name = models.CharField(max_length=255, blank=True, null=True)
election_cycle = models.CharField(max_length=255, blank=True, null=True)
year = models.DecimalField(max_digits=15, decimal_places=2, blank=True, null=True)
class Meta:
managed = False
db_table = 'total_contributions_raw_year'
class ElectionCycles(models.Model):
name = models.CharField(max_length=255, blank=True, null=True)
type = models.CharField(max_length=255, blank=True, null=True)
start_date = models.DateField()
end_date = models.DateField()
class Meta:
managed = False
db_table = 'election_cycles'
class TotalContributionsRaw(models.Model):
sum = models.DecimalField(max_digits=15, decimal_places=2, blank=True, null=True)
filer_name = models.CharField(max_length=255, blank=True, null=True)
id = models.IntegerField(primary_key=True)
election_cycle = models.CharField(max_length=255, blank=True, null=True)
class Meta:
managed = False
db_table = 'total_contributions_raw'
class TotalContributionsRawInState(models.Model):
committee_id = models.IntegerField(primary_key=True)
sum = models.DecimalField(max_digits=15, decimal_places=2, blank=True, null=True)
filer_name = models.CharField(max_length=255, blank=True, null=True)
election_cycle = models.CharField(max_length=255, blank=True, null=True)
class Meta:
managed = False
db_table = 'total_contributions_raw_in_state'
class TotalContributionsRawMonthTotal(models.Model):
id = models.IntegerField(primary_key=True)
sum = models.DecimalField(max_digits=15, decimal_places=2, blank=True, null=True)
election_cycle = models.CharField(max_length=255, blank=True, null=True)
month = models.DecimalField(max_digits=15, decimal_places=2, blank=True, null=True)
year = models.DecimalField(max_digits=15, decimal_places=2, blank=True, null=True)
class Meta:
managed = False
db_table = 'total_contributions_raw_month_total'
class TotalContributionsRawMonthRaceType(models.Model):
id = models.IntegerField(primary_key=True)
sum = models.DecimalField(max_digits=15, decimal_places=2, blank=True, null=True)
election_cycle = models.CharField(max_length=255, blank=True, null=True)
race_type = models.CharField(max_length=255, blank=True, null=True)
month = models.DecimalField(max_digits=15, decimal_places=2, blank=True, null=True)
year = models.DecimalField(max_digits=15, decimal_places=2, blank=True, null=True)
class Meta:
managed = False
db_table = 'total_contributions_raw_month_race_type'
class SpendingBreakdown(models.Model):
sum = models.DecimalField(max_digits=15, decimal_places=2, blank=True, null=True)
filer_name = models.CharField(max_length=255, blank=True, null=True)
committee_id = models.IntegerField(primary_key=True)
election_cycle = models.CharField(max_length=255, blank=True, null=True)
spending_category = models.CharField(max_length=255, blank=True, null=True)
class Meta:
managed = False
db_table = 'spending_breakdown'
class CommitteeContributors(models.Model):
sum = models.DecimalField(max_digits=15, decimal_places=2, blank=True, null=True)
contributor_payee = models.CharField(max_length=255, blank=True, null=True)
filer_name = models.CharField(max_length=255, blank=True, null=True)
election_cycle = models.CharField(max_length=255, blank=True, null=True)
committee_id = models.IntegerField(primary_key=True)
class Meta:
managed = False
db_table = 'committee_contributions'
class ContributorGraph(models.Model):
class Meta:
managed = False
class VoterAcquisitionCost(models.Model):
voter_acquisition_cost = models.DecimalField(max_digits=5, decimal_places=4, blank=True, null=True)
year = models.DecimalField(max_digits=15, decimal_places=2, blank=True, null=True)
votes = models.DecimalField(max_digits=15, decimal_places=2, blank=True, null=True)
election_cycle = models.CharField(max_length=100, blank=True, null=True)
type = models.CharField(max_length=1, blank=True, null=True)
election_cycle = models.CharField(max_length=255, blank=True, null=True)
class Meta:
managed = False
db_table = 'voter_acquisition_cost'
class CommitteeElectionCycle(models.Model):
committee_name = models.CharField(max_length=255, blank=True, null=True)
name = models.CharField(max_length=255, blank=True, null=True)
committee_id = models.IntegerField(primary_key=True)
class Meta:
managed = False
db_table = 'committee_election_cycle'
| 44.30721
| 104
| 0.73631
| 1,896
| 14,134
| 5.320148
| 0.110232
| 0.108853
| 0.157232
| 0.205611
| 0.818678
| 0.798354
| 0.775751
| 0.74373
| 0.738773
| 0.669575
| 0
| 0.025212
| 0.158129
| 14,134
| 318
| 105
| 44.446541
| 0.822506
| 0.036012
| 0
| 0.481928
| 1
| 0
| 0.03386
| 0.020272
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012048
| false
| 0
| 0.004016
| 0
| 0.803213
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c7bf3cdd1bc164a99c8f4991aa18c829d01d6bb6
| 30,519
|
py
|
Python
|
com/vmware/vmc/draas_client.py
|
adammillerio/vsphere-automation-sdk-python
|
c07e1be98615201139b26c28db3aa584c4254b66
|
[
"MIT"
] | null | null | null |
com/vmware/vmc/draas_client.py
|
adammillerio/vsphere-automation-sdk-python
|
c07e1be98615201139b26c28db3aa584c4254b66
|
[
"MIT"
] | null | null | null |
com/vmware/vmc/draas_client.py
|
adammillerio/vsphere-automation-sdk-python
|
c07e1be98615201139b26c28db3aa584c4254b66
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#---------------------------------------------------------------------------
# Copyright 2020 VMware, Inc. All rights reserved.
# AUTO GENERATED FILE -- DO NOT MODIFY!
#
# vAPI stub file for package com.vmware.vmc.draas.
#---------------------------------------------------------------------------
"""
"""
__author__ = 'VMware, Inc.'
__docformat__ = 'restructuredtext en'
import sys
from vmware.vapi.bindings import type
from vmware.vapi.bindings.converter import TypeConverter
from vmware.vapi.bindings.enum import Enum
from vmware.vapi.bindings.error import VapiError
from vmware.vapi.bindings.struct import VapiStruct
from vmware.vapi.bindings.stub import (
ApiInterfaceStub, StubFactoryBase, VapiInterface)
from vmware.vapi.bindings.common import raise_core_exception
from vmware.vapi.data.validator import (UnionValidator, HasFieldsOfValidator)
from vmware.vapi.exception import CoreException
from vmware.vapi.lib.constants import TaskType
from vmware.vapi.lib.rest import OperationRestMetadata
class ReplicaDiskCollections(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.vmc.draas.replica_disk_collections'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ReplicaDiskCollectionsStub)
self._VAPI_OPERATION_IDS = {}
def get(self,
org,
sddc,
datastore_mo_id=None,
):
"""
Query replica disk collections
:type org: :class:`str`
:param org: Organization identifier (required)
:type sddc: :class:`str`
:param sddc: sddc identifier (required)
:type datastore_mo_id: :class:`str` or ``None``
:param datastore_mo_id: Represents the datastore moref id to search. (optional)
:rtype: :class:`list` of :class:`com.vmware.vmc.draas.model_client.ReplicaDiskCollection`
:return:
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
Unauthorized
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not found
"""
return self._invoke('get',
{
'org': org,
'sddc': sddc,
'datastore_mo_id': datastore_mo_id,
})
class SiteRecovery(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.vmc.draas.site_recovery'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _SiteRecoveryStub)
self._VAPI_OPERATION_IDS = {}
def delete(self,
org,
sddc,
force=None,
deactivate_hcx=None,
):
"""
Deactivate site recovery for the specified sddc
:type org: :class:`str`
:param org: Organization identifier (required)
:type sddc: :class:`str`
:param sddc: sddc identifier (required)
:type force: :class:`bool` or ``None``
:param force: If = 'true', will deactivate site recovery forcefully. (optional)
:type deactivate_hcx: :class:`bool` or ``None``
:param deactivate_hcx: If = 'true', will deactivate HCX. (optional)
:rtype: :class:`com.vmware.vmc.draas.model_client.Task`
:return: com.vmware.vmc.draas.model.Task
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
Unauthorized
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Invalid action or bad argument
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Cannot find site recovery configuration for sddc identifier
"""
return self._invoke('delete',
{
'org': org,
'sddc': sddc,
'force': force,
'deactivate_hcx': deactivate_hcx,
})
def get(self,
org,
sddc,
):
"""
Query site recovery configuration for the specified sddc
:type org: :class:`str`
:param org: Organization identifier (required)
:type sddc: :class:`str`
:param sddc: sddc identifier (required)
:rtype: :class:`com.vmware.vmc.draas.model_client.SiteRecovery`
:return: com.vmware.vmc.draas.model.SiteRecovery
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
Unauthorized
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Invalid action or bad argument
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
"""
return self._invoke('get',
{
'org': org,
'sddc': sddc,
})
def post(self,
org,
sddc,
activate_site_recovery_config=None,
):
"""
Activate site recovery for the specified sddc
:type org: :class:`str`
:param org: Organization identifier (required)
:type sddc: :class:`str`
:param sddc: sddc identifier (required)
:type activate_site_recovery_config: :class:`com.vmware.vmc.draas.model_client.ActivateSiteRecoveryConfig` or ``None``
:param activate_site_recovery_config: Customization, for example can specify custom extension key suffix
for SRM. (optional)
:rtype: :class:`com.vmware.vmc.draas.model_client.Task`
:return: com.vmware.vmc.draas.model.Task
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
Unauthorized
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Invalid action or bad argument
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Cannot find site recovery configuration for sddc identifier
"""
return self._invoke('post',
{
'org': org,
'sddc': sddc,
'activate_site_recovery_config': activate_site_recovery_config,
})
class SiteRecoverySrmNodes(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.vmc.draas.site_recovery_srm_nodes'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _SiteRecoverySrmNodesStub)
self._VAPI_OPERATION_IDS = {}
def delete(self,
org,
sddc,
srm_node,
):
"""
Delete a SRM server.
:type org: :class:`str`
:param org: Organization identifier (required)
:type sddc: :class:`str`
:param sddc: sddc identifier (required)
:type srm_node: :class:`str`
:param srm_node: SRM node identifier (required)
:rtype: :class:`com.vmware.vmc.draas.model_client.Task`
:return: com.vmware.vmc.draas.model.Task
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
Unauthorized
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Invalid action or bad argument
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Cannot find SDDC or SRM node
"""
return self._invoke('delete',
{
'org': org,
'sddc': sddc,
'srm_node': srm_node,
})
def post(self,
org,
sddc,
provision_srm_config=None,
):
"""
Provision an additional SRM server.
:type org: :class:`str`
:param org: Organization identifier (required)
:type sddc: :class:`str`
:param sddc: sddc identifier (required)
:type provision_srm_config: :class:`com.vmware.vmc.draas.model_client.ProvisionSrmConfig` or ``None``
:param provision_srm_config: Customization, for example can specify custom extension key suffix
for SRM. (optional)
:rtype: :class:`com.vmware.vmc.draas.model_client.Task`
:return: com.vmware.vmc.draas.model.Task
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
Unauthorized
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Invalid action or bad argument
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Cannot find site recovery configuration for sddc identifier
"""
return self._invoke('post',
{
'org': org,
'sddc': sddc,
'provision_srm_config': provision_srm_config,
})
class SiteRecoveryVersions(VapiInterface):
"""
"""
GET_VERSION_SOURCE_VAMICLI = "vamicli"
"""
Possible value for ``versionSource`` of method
:func:`SiteRecoveryVersions.get`.
"""
GET_VERSION_SOURCE_LS = "ls"
"""
Possible value for ``versionSource`` of method
:func:`SiteRecoveryVersions.get`.
"""
_VAPI_SERVICE_ID = 'com.vmware.vmc.draas.site_recovery_versions'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _SiteRecoveryVersionsStub)
self._VAPI_OPERATION_IDS = {}
def get(self,
org,
sddc,
version_source=None,
):
"""
Query site recovery versions for the specified sddc
:type org: :class:`str`
:param org: Organization identifier (required)
:type sddc: :class:`str`
:param sddc: sddc identifier (required)
:type version_source: :class:`str` or ``None``
:param version_source: Represents the source for getting the version from. (optional)
:rtype: :class:`com.vmware.vmc.draas.model_client.SiteRecoveryVersions`
:return: com.vmware.vmc.draas.model.SiteRecoveryVersions
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
Unauthorized
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Invalid action or bad argument
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Cannot find site recovery versions for sddc identifier
"""
return self._invoke('get',
{
'org': org,
'sddc': sddc,
'version_source': version_source,
})
class Task(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.vmc.draas.task'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _TaskStub)
self._VAPI_OPERATION_IDS = {}
def get(self,
org,
task,
):
"""
Retrieve details of a task.
:type org: :class:`str`
:param org: Organization identifier (required)
:type task: :class:`str`
:param task: task identifier (required)
:rtype: :class:`com.vmware.vmc.draas.model_client.Task`
:return: com.vmware.vmc.draas.model.Task
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthenticated`
Unauthorized
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Cannot find the task with given identifier
"""
return self._invoke('get',
{
'org': org,
'task': task,
})
class _ReplicaDiskCollectionsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {
'org': type.StringType(),
'sddc': type.StringType(),
'datastore_mo_id': type.OptionalType(type.StringType()),
})
get_error_dict = {
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/vmc/draas/api/orgs/{org}/sddcs/{sddc}/site-recovery/replica-disk-collections',
path_variables={
'org': 'org',
'sddc': 'sddc',
},
query_parameters={
'datastore_mo_id': 'datastore_mo_id',
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ListType(type.ReferenceType('com.vmware.vmc.draas.model_client', 'ReplicaDiskCollection')),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.vmc.draas.replica_disk_collections',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _SiteRecoveryStub(ApiInterfaceStub):
def __init__(self, config):
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'org': type.StringType(),
'sddc': type.StringType(),
'force': type.OptionalType(type.BooleanType()),
'deactivate_hcx': type.OptionalType(type.BooleanType()),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/vmc/draas/api/orgs/{org}/sddcs/{sddc}/site-recovery',
path_variables={
'org': 'org',
'sddc': 'sddc',
},
query_parameters={
'force': 'force',
'deactivate_hcx': 'deactivate_hcx',
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'org': type.StringType(),
'sddc': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/vmc/draas/api/orgs/{org}/sddcs/{sddc}/site-recovery',
path_variables={
'org': 'org',
'sddc': 'sddc',
},
query_parameters={
},
content_type='application/json'
)
# properties for post operation
post_input_type = type.StructType('operation-input', {
'org': type.StringType(),
'sddc': type.StringType(),
'activate_site_recovery_config': type.OptionalType(type.ReferenceType('com.vmware.vmc.draas.model_client', 'ActivateSiteRecoveryConfig')),
})
post_error_dict = {
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
post_input_value_validator_list = [
]
post_output_validator_list = [
]
post_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/vmc/draas/api/orgs/{org}/sddcs/{sddc}/site-recovery',
request_body_parameter='activate_site_recovery_config',
path_variables={
'org': 'org',
'sddc': 'sddc',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'delete': {
'input_type': delete_input_type,
'output_type': type.ReferenceType('com.vmware.vmc.draas.model_client', 'Task'),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.vmc.draas.model_client', 'SiteRecovery'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'post': {
'input_type': post_input_type,
'output_type': type.ReferenceType('com.vmware.vmc.draas.model_client', 'Task'),
'errors': post_error_dict,
'input_value_validator_list': post_input_value_validator_list,
'output_validator_list': post_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'post': post_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.vmc.draas.site_recovery',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _SiteRecoverySrmNodesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'org': type.StringType(),
'sddc': type.StringType(),
'srm_node': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/vmc/draas/api/orgs/{org}/sddcs/{sddc}/site-recovery/srm-nodes/{srmNode}',
path_variables={
'org': 'org',
'sddc': 'sddc',
'srm_node': 'srmNode',
},
query_parameters={
},
content_type='application/json'
)
# properties for post operation
post_input_type = type.StructType('operation-input', {
'org': type.StringType(),
'sddc': type.StringType(),
'provision_srm_config': type.OptionalType(type.ReferenceType('com.vmware.vmc.draas.model_client', 'ProvisionSrmConfig')),
})
post_error_dict = {
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
post_input_value_validator_list = [
]
post_output_validator_list = [
]
post_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/vmc/draas/api/orgs/{org}/sddcs/{sddc}/site-recovery/srm-nodes',
request_body_parameter='provision_srm_config',
path_variables={
'org': 'org',
'sddc': 'sddc',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'delete': {
'input_type': delete_input_type,
'output_type': type.ReferenceType('com.vmware.vmc.draas.model_client', 'Task'),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'post': {
'input_type': post_input_type,
'output_type': type.ReferenceType('com.vmware.vmc.draas.model_client', 'Task'),
'errors': post_error_dict,
'input_value_validator_list': post_input_value_validator_list,
'output_validator_list': post_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'delete': delete_rest_metadata,
'post': post_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.vmc.draas.site_recovery_srm_nodes',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _SiteRecoveryVersionsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {
'org': type.StringType(),
'sddc': type.StringType(),
'version_source': type.OptionalType(type.StringType()),
})
get_error_dict = {
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/vmc/draas/api/orgs/{org}/sddcs/{sddc}/site-recovery/versions',
path_variables={
'org': 'org',
'sddc': 'sddc',
},
query_parameters={
'version_source': 'version_source',
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.vmc.draas.model_client', 'SiteRecoveryVersions'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.vmc.draas.site_recovery_versions',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _TaskStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {
'org': type.StringType(),
'task': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.unauthenticated':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthenticated'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/vmc/draas/api/orgs/{org}/tasks/{task}',
path_variables={
'org': 'org',
'task': 'task',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.vmc.draas.model_client', 'Task'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.vmc.draas.task',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class StubFactory(StubFactoryBase):
_attrs = {
'ReplicaDiskCollections': ReplicaDiskCollections,
'SiteRecovery': SiteRecovery,
'SiteRecoverySrmNodes': SiteRecoverySrmNodes,
'SiteRecoveryVersions': SiteRecoveryVersions,
'Task': Task,
'model': 'com.vmware.vmc.draas.model_client.StubFactory',
}
| 39.076825
| 150
| 0.574953
| 2,939
| 30,519
| 5.741409
| 0.071113
| 0.067204
| 0.067026
| 0.082494
| 0.857117
| 0.842776
| 0.825708
| 0.805144
| 0.77569
| 0.753763
| 0
| 0.000237
| 0.30961
| 30,519
| 780
| 151
| 39.126923
| 0.800617
| 0.237131
| 0
| 0.669339
| 1
| 0.006012
| 0.260393
| 0.180899
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036072
| false
| 0
| 0.024048
| 0
| 0.114228
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c7c6f5a43046226ea0673fc5d674130ffb9acbae
| 20,840
|
py
|
Python
|
openconcept/analysis/performance/mission_profiles.py
|
bbrelje/openconcept
|
b41bc831ed6aa2742ec35cacd3249395ee4527db
|
[
"MIT"
] | null | null | null |
openconcept/analysis/performance/mission_profiles.py
|
bbrelje/openconcept
|
b41bc831ed6aa2742ec35cacd3249395ee4527db
|
[
"MIT"
] | 6
|
2018-06-28T18:54:36.000Z
|
2021-12-17T16:18:20.000Z
|
openconcept/analysis/performance/mission_profiles.py
|
bbrelje/openconcept
|
b41bc831ed6aa2742ec35cacd3249395ee4527db
|
[
"MIT"
] | null | null | null |
from openmdao.api import IndepVarComp, Group, BalanceComp
from openconcept.analysis.performance.solver_phases import BFLImplicitSolve, GroundRollPhase, RotationPhase, RobustRotationPhase, SteadyFlightPhase, ClimbAnglePhase
class ThreePhaseMissionOnly(Group):
"""
This analysis group is set up to compute all the major parameters
of a fixed wing mission, including climb, cruise, and descent.
To use this analysis, pass in an aircraft model following OpenConcept interface.
Namely, the model should consume the following:
- flight conditions (fltcond|q/rho/p/T/Utrue/Ueas/...)
- aircraft design parameters (ac|*)
- lift coefficient (fltcond|CL; either solved from steady flight or assumed during ground roll)
- throttle
- propulsor_failed (value 0 when failed, 1 when not failed)
and produce top-level outputs:
- thrust
- drag
- weight
the following parameters need to either be defined as design variables or
given as top-level analysis outputs from the airplane model:
- ac|geom|S_ref
- ac|aero|CL_max_flaps30
- ac|weights|MTOW
Inputs
------
ac|* : various
All relevant airplane design variables to pass to the airplane model
takeoff|h : float
Takeoff obstacle clearance height (default 50 ft)
cruise|h0 : float
Initial cruise altitude (default 28000 ft)
payload : float
Mission payload (default 1000 lbm)
mission_range : float
Design range (deault 1250 NM)
Options
-------
aircraft_model : class
An aircraft model class with the standard OpenConcept interfaces promoted correctly
num_nodes : int
Number of analysis points per segment. Higher is more accurate but more expensive
extra_states : tuple
Any extra integrated states to connect across the model.
Format is ('state_var_name', ('segments','to','connect','across'))
"""
def initialize(self):
self.options.declare('num_nodes', default=9, desc="Number of points per segment. Needs to be 2N + 1 due to simpson's rule")
self.options.declare('aircraft_model', default=None, desc="OpenConcept-compliant airplane model")
self.options.declare('extra_states', default=None, desc="Extra states to connect across mission phases")
def setup(self):
nn = self.options['num_nodes']
acmodelclass = self.options['aircraft_model']
mp = self.add_subsystem('missionparams',IndepVarComp(),promotes_outputs=['*'])
mp.add_output('takeoff|h',val=0.,units='ft')
mp.add_output('cruise|h0',val=28000.,units='ft')
mp.add_output('mission_range',val=1250.,units='NM')
mp.add_output('payload',val=1000.,units='lbm')
# add the climb, cruise, and descent segments
self.add_subsystem('climb',SteadyFlightPhase(num_nodes=nn, aircraft_model=acmodelclass, flight_phase='climb'),promotes_inputs=['ac|*'])
# set the climb time such that the specified initial cruise altitude is exactly reached
self.add_subsystem('climbdt',BalanceComp(name='duration',units='s',eq_units='m',val=120,upper=2000,lower=0,rhs_name='cruise|h0',lhs_name='fltcond|h_final'),promotes_inputs=['cruise|h0'])
self.connect('climb.fltcond|h_final','climbdt.fltcond|h_final')
self.connect('climbdt.duration','climb.duration')
self.add_subsystem('cruise',SteadyFlightPhase(num_nodes=nn, aircraft_model=acmodelclass, flight_phase='cruise'),promotes_inputs=['ac|*'])
# set the cruise time such that the desired design range is flown by the end of the mission
self.add_subsystem('cruisedt',BalanceComp(name='duration',units='s',eq_units='m',val=120, upper=25000, lower=0,rhs_name='mission_range',lhs_name='range_final'),promotes_inputs=['mission_range'])
self.connect('cruisedt.duration','cruise.duration')
self.add_subsystem('descent',SteadyFlightPhase(num_nodes=nn, aircraft_model=acmodelclass, flight_phase='descent'),promotes_inputs=['ac|*'])
# set the descent time so that the final altitude is sea level again
self.add_subsystem('descentdt',BalanceComp(name='duration',units='s',eq_units='m', val=120, upper=2000, lower=0,rhs_name='takeoff|h',lhs_name='fltcond|h_final'),promotes_inputs=['takeoff|h'])
self.connect('descent.range_final','cruisedt.range_final')
self.connect('descent.fltcond|h_final','descentdt.fltcond|h_final')
self.connect('descentdt.duration','descent.duration')
# connect range, fuel burn, and altitude from the end of each segment to the beginning of the next, in order
extra_states = self.options['extra_states']
for extra_state in extra_states:
state_name = extra_state[0]
phases = extra_state[1]
for i in range(len(phases) - 1):
from_phase = phases[i]
to_phase = phases[i+1]
self.connect(from_phase+'.'+state_name+'_final',to_phase+'.'+state_name+'_initial')
class FullMissionAnalysis(Group):
"""
This analysis group is set up to compute all the major parameters
of a fixed wing mission, including balanced-field takeoff, climb, cruise, and descent.
To use this analysis, pass in an aircraft model following OpenConcept interface.
Namely, the model should consume the following:
- flight conditions (fltcond|q/rho/p/T/Utrue/Ueas/...)
- aircraft design parameters (ac|*)
- lift coefficient (fltcond|CL; either solved from steady flight or assumed during ground roll)
- throttle
- propulsor_failed (value 0 when failed, 1 when not failed)
and produce top-level outputs:
- thrust
- drag
- weight
the following parameters need to either be defined as design variables or
given as top-level analysis outputs from the airplane model:
- ac|geom|S_ref
- ac|aero|CL_max_flaps30
- ac|weights|MTOW
Inputs
------
ac|* : various
All relevant airplane design variables to pass to the airplane model
takeoff|h : float
Takeoff obstacle clearance height (default 50 ft)
cruise|h0 : float
Initial cruise altitude (default 28000 ft)
payload : float
Mission payload (default 1000 lbm)
mission_range : float
Design range (deault 1250 NM)
Outputs
-------
takeoff|v1 : float
Decision speed
Options
-------
aircraft_model : class
An aircraft model class with the standard OpenConcept interfaces promoted correctly
num_nodes : int
Number of analysis points per segment. Higher is more accurate but more expensive
extra_states : tuple
Any extra integrated states to connect across the model.
Format is ('state_var_name', ('segments','to','connect','across'))
transition_method : str
Analysis method to compute distance, altitude, and time during transition
Default "simplified" is the Raymer circular arc method and is more robust
Option "ode" is a 2DOF ODE integration method which is arguably just as inaccurate and less robust
"""
def initialize(self):
self.options.declare('num_nodes', default=9, desc="Number of points per segment. Needs to be 2N + 1 due to simpson's rule")
self.options.declare('aircraft_model', default=None, desc="OpenConcept-compliant airplane model")
self.options.declare('extra_states', default=None, desc="Extra states to connect across mission phases")
self.options.declare('transition_method', default='simplified', desc="Method to use for computing transition")
def setup(self):
nn = self.options['num_nodes']
acmodelclass = self.options['aircraft_model']
transition_method = self.options['transition_method']
# add the four balanced field length takeoff segments and the implicit v1 solver
# v0v1 - from a rolling start to v1 speed
# v1vr - from the decision speed to rotation
# rotate - in the air following rotation in 2DOF
# v1vr - emergency stopping from v1 to a stop.
mp = self.add_subsystem('missionparams',IndepVarComp(),promotes_outputs=['*'])
mp.add_output('takeoff|h',val=0.,units='ft')
mp.add_output('cruise|h0',val=28000.,units='ft')
mp.add_output('mission_range',val=1250.,units='NM')
mp.add_output('payload',val=1000.,units='lbm')
self.add_subsystem('bfl', BFLImplicitSolve(), promotes_outputs=['takeoff|v1'])
self.add_subsystem('v0v1', GroundRollPhase(num_nodes=nn, aircraft_model=acmodelclass, flight_phase='v0v1'), promotes_inputs=['ac|*','takeoff|v1'])
self.add_subsystem('v1vr', GroundRollPhase(num_nodes=nn, aircraft_model=acmodelclass, flight_phase='v1vr'), promotes_inputs=['ac|*'])
self.connect('takeoff|v1','v1vr.fltcond|Utrue_initial')
self.connect('v0v1.range_final','v1vr.range_initial')
if transition_method == 'simplified':
self.add_subsystem('rotate',RobustRotationPhase(num_nodes=nn, aircraft_model=acmodelclass, flight_phase='rotate'),promotes_inputs=['ac|*'])
elif transition_method == 'ode':
self.add_subsystem('rotate',RotationPhase(num_nodes=nn, aircraft_model=acmodelclass, flight_phase='rotate'),promotes_inputs=['ac|*'])
self.connect('v1vr.fltcond|Utrue_final','rotate.fltcond|Utrue_initial')
else:
raise IOError('Invalid option for transition method')
self.connect('v1vr.range_final','rotate.range_initial')
self.connect('rotate.range_final','bfl.distance_continue')
self.connect('v1vr.takeoff|vr','bfl.takeoff|vr')
self.add_subsystem('v1v0',GroundRollPhase(num_nodes=nn, aircraft_model=acmodelclass, flight_phase='v1v0'), promotes_inputs=['ac|*','takeoff|v1'])
self.connect('v0v1.range_final','v1v0.range_initial')
self.connect('v1v0.range_final','bfl.distance_abort')
self.add_subsystem('engineoutclimb',ClimbAnglePhase(num_nodes=1, aircraft_model=acmodelclass, flight_phase='EngineOutClimbAngle'), promotes_inputs=['ac|*'])
# add the climb, cruise, and descent segments
self.add_subsystem('climb',SteadyFlightPhase(num_nodes=nn, aircraft_model=acmodelclass, flight_phase='climb'),promotes_inputs=['ac|*'])
# set the climb time such that the specified initial cruise altitude is exactly reached
self.add_subsystem('climbdt',BalanceComp(name='duration',units='s',eq_units='m',val=120,lower=0,upper=3000,rhs_name='cruise|h0',lhs_name='fltcond|h_final'),promotes_inputs=['cruise|h0'])
self.connect('climb.fltcond|h_final','climbdt.fltcond|h_final')
self.connect('climbdt.duration','climb.duration')
self.add_subsystem('cruise',SteadyFlightPhase(num_nodes=nn, aircraft_model=acmodelclass, flight_phase='cruise'),promotes_inputs=['ac|*'])
# set the cruise time such that the desired design range is flown by the end of the mission
self.add_subsystem('cruisedt',BalanceComp(name='duration',units='s',eq_units='km',val=120, lower=0,upper=30000,rhs_name='mission_range',lhs_name='range_final'),promotes_inputs=['mission_range'])
self.connect('cruisedt.duration','cruise.duration')
self.add_subsystem('descent',SteadyFlightPhase(num_nodes=nn, aircraft_model=acmodelclass, flight_phase='descent'),promotes_inputs=['ac|*'])
# set the descent time so that the final altitude is sea level again
self.add_subsystem('descentdt',BalanceComp(name='duration',units='s',eq_units='m', val=120, lower=0,upper=3000,rhs_name='takeoff|h',lhs_name='fltcond|h_final'),promotes_inputs=['takeoff|h'])
self.connect('descent.range_final','cruisedt.range_final')
self.connect('descent.fltcond|h_final','descentdt.fltcond|h_final')
self.connect('descentdt.duration','descent.duration')
# connect range, fuel burn, and altitude from the end of each segment to the beginning of the next, in order
extra_states = self.options['extra_states']
for extra_state in extra_states:
state_name = extra_state[0]
phases = extra_state[1]
for i in range(len(phases) - 1):
from_phase = phases[i]
to_phase = phases[i+1]
self.connect(from_phase+'.'+state_name+'_final',to_phase+'.'+state_name+'_initial')
class CruiseOnly(Group):
"""
This analysis group is set up to compute all the major parameters
of a fixed wing mission, including balanced-field takeoff, climb, cruise, and descent.
To use this analysis, pass in an aircraft model following OpenConcept interface.
Namely, the model should consume the following:
- flight conditions (fltcond|q/rho/p/T/Utrue/Ueas/...)
- aircraft design parameters (ac|*)
- lift coefficient (fltcond|CL; either solved from steady flight or assumed during ground roll)
- throttle
- propulsor_failed (value 0 when failed, 1 when not failed)
and produce top-level outputs:
- thrust
- drag
- weight
the following parameters need to either be defined as design variables or
given as top-level analysis outputs from the airplane model:
- ac|geom|S_ref
- ac|aero|CL_max_flaps30
- ac|weights|MTOW
Inputs
------
ac|* : various
All relevant airplane design variables to pass to the airplane model
takeoff|h : float
Takeoff obstacle clearance height (default 50 ft)
cruise|h0 : float
Initial cruise altitude (default 28000 ft)
payload : float
Mission payload (default 1000 lbm)
mission_range : float
Design range (deault 1250 NM)
Outputs
-------
takeoff|v1 : float
Decision speed
Options
-------
aircraft_model : class
An aircraft model class with the standard OpenConcept interfaces promoted correctly
num_nodes : int
Number of analysis points per segment. Higher is more accurate but more expensive
extra_states : tuple
Any extra integrated states to connect across the model.
Format is ('state_var_name', ('segments','to','connect','across'))
"""
def initialize(self):
self.options.declare('num_nodes', default=9, desc="Number of points per segment. Needs to be 2N + 1 due to simpson's rule")
self.options.declare('aircraft_model', default=None, desc="OpenConcept-compliant airplane model")
self.options.declare('extra_states', default=None, desc="Extra states to connect across mission phases")
def setup(self):
nn = self.options['num_nodes']
acmodelclass = self.options['aircraft_model']
# add the four balanced field length takeoff segments and the implicit v1 solver
# v0v1 - from a rolling start to v1 speed
# v1vr - from the decision speed to rotation
# rotate - in the air following rotation in 2DOF
# v1vr - emergency stopping from v1 to a stop.
mp = self.add_subsystem('missionparams',IndepVarComp(),promotes_outputs=['*'])
mp.add_output('takeoff|h',val=0.,units='ft')
mp.add_output('cruise|h0',val=28000.,units='ft')
mp.add_output('mission_range',val=1250.,units='NM')
mp.add_output('payload',val=1000.,units='lbm')
mp.add_output('cruise|duration',val=1.,units='h')
self.add_subsystem('cruise',SteadyFlightPhase(num_nodes=nn, aircraft_model=acmodelclass, flight_phase='cruise'),promotes_inputs=['ac|*'])
# set the cruise time such that the desired design range is flown by the end of the mission
self.connect('cruise|duration','cruise.duration')
self.connect('cruise|h0','cruise.fltcond|h_initial')
class BalancedFieldTakeoff(Group):
"""
This analysis group is set up to compute balanced-field takeoff only
To use this analysis, pass in an aircraft model following OpenConcept interface.
Namely, the model should consume the following:
- flight conditions (fltcond|q/rho/p/T/Utrue/Ueas/...)
- aircraft design parameters (ac|*)
- lift coefficient (fltcond|CL; either solved from steady flight or assumed during ground roll)
- throttle
- propulsor_failed (value 0 when failed, 1 when not failed)
and produce top-level outputs:
- thrust
- drag
- weight
the following parameters need to either be defined as design variables or
given as top-level analysis outputs from the airplane model:
- ac|geom|S_ref
- ac|aero|CL_max_flaps30
- ac|weights|MTOW
Inputs
------
ac|* : various
All relevant airplane design variables to pass to the airplane model
takeoff|h : float
Takeoff obstacle clearance height (default 50 ft)
cruise|h0 : float
Initial cruise altitude (default 28000 ft)
payload : float
Mission payload (default 1000 lbm)
mission_range : float
Design range (deault 1250 NM)
Outputs
-------
takeoff|v1 : float
Decision speed
Options
-------
aircraft_model : class
An aircraft model class with the standard OpenConcept interfaces promoted correctly
num_nodes : int
Number of analysis points per segment. Higher is more accurate but more expensive
extra_states : tuple
Any extra integrated states to connect across the model.
Format is ('state_var_name', ('segments','to','connect','across'))
"""
def initialize(self):
self.options.declare('num_nodes', default=9, desc="Number of points per segment. Needs to be 2N + 1 due to simpson's rule")
self.options.declare('aircraft_model', default=None, desc="OpenConcept-compliant airplane model")
self.options.declare('extra_states', default=None, desc="Extra states to connect across mission phases")
def setup(self):
nn = self.options['num_nodes']
acmodelclass = self.options['aircraft_model']
# add the four balanced field length takeoff segments and the implicit v1 solver
# v0v1 - from a rolling start to v1 speed
# v1vr - from the decision speed to rotation
# rotate - in the air following rotation in 2DOF
# v1vr - emergency stopping from v1 to a stop.
mp = self.add_subsystem('missionparams',IndepVarComp(),promotes_outputs=['*'])
mp.add_output('takeoff|h',val=0.,units='ft')
self.add_subsystem('bfl', BFLImplicitSolve(), promotes_outputs=['takeoff|v1'])
self.add_subsystem('v0v1', GroundRollPhase(num_nodes=nn, aircraft_model=acmodelclass, flight_phase='v0v1'), promotes_inputs=['ac|*','takeoff|v1'])
self.add_subsystem('v1vr', GroundRollPhase(num_nodes=nn, aircraft_model=acmodelclass, flight_phase='v1vr'), promotes_inputs=['ac|*'])
self.connect('takeoff|v1','v1vr.fltcond|Utrue_initial')
self.connect('v0v1.range_final','v1vr.range_initial')
self.connect('v0v1.fuel_used_final','v1vr.fuel_used_initial')
self.add_subsystem('rotate',RotationPhase(num_nodes=nn, aircraft_model=acmodelclass, flight_phase='rotate'),promotes_inputs=['ac|*'])
self.connect('v1vr.range_final','rotate.range_initial')
self.connect('v1vr.fltcond|Utrue_final','rotate.fltcond|Utrue_initial')
self.connect('v1vr.fuel_used_final','rotate.fuel_used_initial')
self.connect('rotate.range_final','bfl.distance_continue')
self.connect('v1vr.takeoff|vr','bfl.takeoff|vr')
self.add_subsystem('v1v0',GroundRollPhase(num_nodes=nn, aircraft_model=acmodelclass, flight_phase='v1v0'), promotes_inputs=['ac|*','takeoff|v1'])
self.connect('v0v1.range_final','v1v0.range_initial')
self.connect('v1v0.range_final','bfl.distance_abort')
# connect range, fuel burn, and altitude from the end of each segment to the beginning of the next, in order
extra_states = self.options['extra_states']
if extra_states is not None:
for extra_state in extra_states:
state_name = extra_state[0]
phases = extra_state[1]
for i in range(len(phases) - 1):
from_phase = phases[i]
to_phase = phases[i+1]
self.connect(from_phase+'.'+state_name+'_final',to_phase+'.'+state_name+'_initial')
| 51.078431
| 206
| 0.671833
| 2,651
| 20,840
| 5.162957
| 0.103734
| 0.035143
| 0.033901
| 0.038504
| 0.918974
| 0.914298
| 0.914298
| 0.914298
| 0.914298
| 0.911522
| 0
| 0.018057
| 0.221401
| 20,840
| 408
| 207
| 51.078431
| 0.825465
| 0.393426
| 0
| 0.814286
| 0
| 0
| 0.262135
| 0.045091
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057143
| false
| 0
| 0.014286
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c7d7ab8b23ea3bcac13dcf34075cfefe821ebdb1
| 8,499
|
py
|
Python
|
src/srvup/api_curl_tests.py
|
codingforentrepreneurs/srvup-rest-framework
|
62a71fd083e41f723fe36441cb736b17fb71db6d
|
[
"Apache-2.0"
] | 46
|
2015-02-28T12:43:27.000Z
|
2022-02-03T05:13:23.000Z
|
src/srvup/api_curl_tests.py
|
belal-bh/srvup-rest-framework
|
62a71fd083e41f723fe36441cb736b17fb71db6d
|
[
"Apache-2.0"
] | null | null | null |
src/srvup/api_curl_tests.py
|
belal-bh/srvup-rest-framework
|
62a71fd083e41f723fe36441cb736b17fb71db6d
|
[
"Apache-2.0"
] | 30
|
2015-06-28T21:24:42.000Z
|
2021-08-24T11:59:56.000Z
|
curl -X POST -d "username=jmitchel3&password=123" http://127.0.0.1:8000/api/auth/token/
{"active":true,
"token":"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImptaXRjaGVsMyIsInVzZXJfaWQiOjEsImVtYWlsIjoiY29kaW5nZm9yZW50cmVwcmVuZXVyc0BnbWFpbC5jb20iLCJleHAiOjE0MjQ5NDI4MjV9.9H5fKOiOzN7-kCOjYaaYeLn0MvI0HrkujCwyb5l-R5E",
"user":"jmitchel3"}
curl -X POST -d "text='Some text'" http://127.0.0.1:8000/api/comments/.json
{"detail":"Authentication credentials were not provided."}
curl -X POST -d "text='Some text'" http://127.0.0.1:8000/api/comments/.json -H "Authorization: JWT eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImptaXRjaGVsMyIsInVzZXJfaWQiOjEsImVtYWlsIjoiY29kaW5nZm9yZW50cmVwcmVuZXVyc0BnbWFpbC5jb20iLCJleHAiOjE0MjQ5NDI4MjV9.9H5fKOiOzN7-kCOjYaaYeLn0MvI0HrkujCwyb5l-R5E"
{"user":["This field is required."]}
curl -X POST -d "text='Some text'&user=jmitchel3" http://127.0.0.1:8000/api/comments/.json -H "Authorization: JWT eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImptaXRjaGVsMyIsInVzZXJfaWQiOjEsImVtYWlsIjoiY29kaW5nZm9yZW50cmVwcmVuZXVyc0BnbWFpbC5jb20iLCJleHAiOjE0MjQ5NDI4MjV9.9H5fKOiOzN7-kCOjYaaYeLn0MvI0HrkujCwyb5l-R5E"
{"user":["Incorrect type. Expected pk value, received unicode."]}
curl -X POST -d "text='Some text'&user=1" http://127.0.0.1:8000/api/comments/.json -H "Authorization: JWT eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImptaXRjaGVsMyIsInVzZXJfaWQiOjEsImVtYWlsIjoiY29kaW5nZm9yZW50cmVwcmVuZXVyc0BnbWFpbC5jb20iLCJleHAiOjE0MjQ5NDI4MjV9.9H5fKOiOzN7-kCOjYaaYeLn0MvI0HrkujCwyb5l-R5E"
{"url":"http://127.0.0.1:8000/api/comments/133/.json","id":133,"children":[],"user":1,"text":"'Some text'"}
curl -X DELETE http://127.0.0.1:8000/api/comments/133/.json -H "Authorization: JWT eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImptaXRjaGVsMyIsInVzZXJfaWQiOjEsImVtYWlsIjoiY29kaW5nZm9yZW50cmVwcmVuZXVyc0BnbWFpbC5jb20iLCJleHAiOjE0MjQ5NDI4MjV9.9H5fKOiOzN7-kCOjYaaYeLn0MvI0HrkujCwyb5l-R5E"
#return nil
#GET
curl http://127.0.0.1:8000/api/comments/133/.json -H "Authorization: JWT eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImptaXRjaGVsMyIsInVzZXJfaWQiOjEsImVtYWlsIjoiY29kaW5nZm9yZW50cmVwcmVuZXVyc0BnbWFpbC5jb20iLCJleHAiOjE0MjQ5NDI4MjV9.9H5fKOiOzN7-kCOjYaaYeLn0MvI0HrkujCwyb5l-R5E"
{"detail":"Not found"}
curl -X POST -d "text='Some text'&user=1&video=24" http://127.0.0.1:8000/api/comments/.json -H "Authorization: JWT eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImptaXRjaGVsMyIsInVzZXJfaWQiOjEsImVtYWlsIjoiY29kaW5nZm9yZW50cmVwcmVuZXVyc0BnbWFpbC5jb20iLCJleHAiOjE0MjQ5NDI4MjV9.9H5fKOiOzN7-kCOjYaaYeLn0MvI0HrkujCwyb5l-R5E"
{"video":["Invalid hyperlink - No URL match"]}
curl -X POST -d "text=This is some great video AGAIN&user=1&video=http://127.0.0.1:8000/api/videos/24/" http://127.0.0.1:8000/api/comments/.json -H "Authorization: JWT eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImptaXRjaGVsMyIsInVzZXJfaWQiOjEsImVtYWlsIjoiY29kaW5nZm9yZW50cmVwcmVuZXVyc0BnbWFpbC5jb20iLCJleHAiOjE0MjQ5NDI4MjV9.9H5fKOiOzN7-kCOjYaaYeLn0MvI0HrkujCwyb5l-R5E"
{"url":"http://127.0.0.1:8000/api/comments/135/.json","id":135,"children":[],"user":1,"video":"http://127.0.0.1:8000/api/videos/24/.json","text":"This is some great video AGAIN"}
curl -X POST -d "text=Even newer&user=1&parent=229" http://127.0.0.1:8000/api/comments/.json -H "Authorization: JWT eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImptaXRjaGVsMyIsInVzZXJfaWQiOjEsImVtYWlsIjoiY29kaW5nZm9yZW50cmVwcmVuZXVyc0BnbWFpbC5jb20iLCJleHAiOjE0MzUwOTExMTV9.ZKrCIMvcj32RNrV9f26A7Zsz15bfC6tePErjfMXGeYk"
{"url":"http://127.0.0.1:8000/api/comments/136/.json","id":136,"children":[],"user":1,"video":null,"text":"'Some text'"}
curl -X POST -d "text='NEW CHILD COMMENT'&user=1&parent=http://127.0.0.1:8000/api/comments/135/" http://127.0.0.1:8000/api/comments/.json -H "Authorization: JWT eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImptaXRjaGVsMyIsInVzZXJfaWQiOjEsImVtYWlsIjoiY29kaW5nZm9yZW50cmVwcmVuZXVyc0BnbWFpbC5jb20iLCJleHAiOjE0MjQ5NDI4MjV9.9H5fKOiOzN7-kCOjYaaYeLn0MvI0HrkujCwyb5l-R5E"
{"url":"http://127.0.0.1:8000/api/comments/139/.json","id":139,"children":[],"parent":"http://127.0.0.1:8000/api/comments/135/.json","user":1,"video":null,"text":"'NEW CHILD COMMENT'"}
#API 2 with CBViews
curl -X POST -d "text='NEW CHILD COMMENT'&user=1&parent=http://127.0.0.1:8000/api2/comment/135/" http://127.0.0.1:8000/api2/comment/133/ -H "Authorization: JWT eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImptaXRjaGVsMyIsInVzZXJfaWQiOjEsImVtYWlsIjoiY29kaW5nZm9yZW50cmVwcmVuZXVyc0BnbWFpbC5jb20iLCJleHAiOjE0MjUwMjExNjR9.3OtWG20Dx1hl4vSaznokkuW9fBee4MBYfM742b1G2vA"
{"detail":"Method 'POST' not allowed."}
curl -X DELETE -d "text='NEW CHILD COMMENT'&user=1&parent=http://127.0.0.1:8000/api/comments/135/" http://127.0.0.1:8000/api2/projects/djangogap/ -H "Authorization: JWT eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImptaXRjaGVsMyIsInVzZXJfaWQiOjEsImVtYWlsIjoiY29kaW5nZm9yZW50cmVwcmVuZXVyc0BnbWFpbC5jb20iLCJleHAiOjE0MjUwMjExNjR9.3OtWG20Dx1hl4vSaznokkuW9fBee4MBYfM742b1G2vA"
{"detail":"Method 'DELETE' not allowed."}
curl -X PUT -d "text='NEW CHILD COMMENT'&user=1&parent=http://127.0.0.1:8000/api/comments/135/" http://127.0.0.1:8000/api2/projects/djangogap/ -H "Authorization: JWT eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImptaXRjaGVsMyIsInVzZXJfaWQiOjEsImVtYWlsIjoiY29kaW5nZm9yZW50cmVwcmVuZXVyc0BnbWFpbC5jb20iLCJleHAiOjE0MjUwMjExNjR9.3OtWG20Dx1hl4vSaznokkuW9fBee4MBYfM742b1G2vA"
{"detail":"Method 'PUT' not allowed."}
curl http://127.0.0.1:8000/api2/projects/djangogap/ -H "Authorization: JWT eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImptaXRjaGVsMyIsInVzZXJfaWQiOjEsImVtYWlsIjoiY29kaW5nZm9yZW50cmVwcmVuZXVyc0BnbWFpbC5jb20iLCJleHAiOjE0MjUwMjExNjR9.3OtWG20Dx1hl4vSaznokkuW9fBee4MBYfM742b1G2vA"
{"url":"http://127.0.0.1:8000/api2/projects/djangogap/","id":2,"slug":"djangogap","title":"DjangoGap","description":"","image":"http://127.0.0.1:8000/media/images/djangogap.png"}
curl http://127.0.0.1:8000/api2/projects/djangogap/
{"detail":"Authentication credentials were not provided."}
curl -X PUT -d "text=YET ANOTHER AWESOME NEW COMMENT" http://127.0.0.1:8000/api2/comment/135/ -H "Authorization: JWT eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImptaXRjaGVsMyIsInVzZXJfaWQiOjEsImVtYWlsIjoiY29kaW5nZm9yZW50cmVwcmVuZXVyc0BnbWFpbC5jb20iLCJleHAiOjE0MjU0NTc1ODV9.5aA5mTmyuoMI0BhMYPL03qI-vwnE-pSzWe-14yDMnS8"
{"id":135,"user":"jmitchel3","text":"'YET ANOTHER AWESOME NEW COMMENT'"}
curl -X DELETE http://127.0.0.1:8000/api2/comment/143/ -H "Authorization: JWT eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImptaXRjaGVsMyIsInVzZXJfaWQiOjEsImVtYWlsIjoiY29kaW5nZm9yZW50cmVwcmVuZXVyc0BnbWFpbC5jb20iLCJleHAiOjE0MjU0NTc1ODV9.5aA5mTmyuoMI0BhMYPL03qI-vwnE-pSzWe-14yDMnS8"
{"detail":"You do not have permission to perform this action."}
curl -X DELETE http://127.0.0.1:8000/api2/comment/142/ -H "Authorization: JWT eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImptaXRjaGVsMyIsInVzZXJfaWQiOjEsImVtYWlsIjoiY29kaW5nZm9yZW50cmVwcmVuZXVyc0BnbWFpbC5jb20iLCJleHAiOjE0MjU0NTc1ODV9.5aA5mTmyuoMI0BhMYPL03qI-vwnE-pSzWe-14yDMnS8"
#returned 204
curl -X POST -d "text='NEW COMMENT'&user=1&parent=229" http://127.0.0.1:8000/api2/comment/create/ -H "Authorization: JWT eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImptaXRjaGVsMyIsInVzZXJfaWQiOjEsImVtYWlsIjoiY29kaW5nZm9yZW50cmVwcmVuZXVyc0BnbWFpbC5jb20iLCJleHAiOjE0MzUwOTExMTV9.ZKrCIMvcj32RNrV9f26A7Zsz15bfC6tePErjfMXGeYk"
{"text":"'NEW COMMENT'","user":1,"video":null,"parent":146}
curl -X PUT -d "text=YET ANOTHER AWESOME NEW COMMENT" http://127.0.0.1:8000/api2/comment/147/ -H "Authorization: JWT eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImptaXRjaGVsMyIsInVzZXJfaWQiOjEsImVtYWlsIjoiY29kaW5nZm9yZW50cmVwcmVuZXVyc0BnbWFpbC5jb20iLCJleHAiOjE0MjU0NTc1ODV9.5aA5mTmyuoMI0BhMYPL03qI-vwnE-pSzWe-14yDMnS8"
curl -X DELETE http://127.0.0.1:8000/api2/comment/147/ -H "Authorization: JWT eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImptaXRjaGVsMyIsInVzZXJfaWQiOjEsImVtYWlsIjoiY29kaW5nZm9yZW50cmVwcmVuZXVyc0BnbWFpbC5jb20iLCJleHAiOjE0MjU0NTc1ODV9.5aA5mTmyuoMI0BhMYPL03qI-vwnE-pSzWe-14yDMnS8"
curl -X POST -d "text=YEAHH IT's working&user=1&video=23" http://127.0.0.1:8000/api2/comment/create/ -H "Authorization: JWT eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImptaXRjaGVsMyIsInVzZXJfaWQiOjEsImVtYWlsIjoiY29kaW5nZm9yZW50cmVwcmVuZXVyc0BnbWFpbC5jb20iLCJleHAiOjE0MjU0NTc1ODV9.5aA5mTmyuoMI0BhMYPL03qI-vwnE-pSzWe-14yDMnS8"
| 87.618557
| 379
| 0.835392
| 819
| 8,499
| 8.669109
| 0.151404
| 0.035493
| 0.040563
| 0.045634
| 0.919577
| 0.902254
| 0.815915
| 0.804366
| 0.750986
| 0.613521
| 0
| 0.134038
| 0.043182
| 8,499
| 96
| 380
| 88.53125
| 0.739056
| 0.005059
| 0
| 0.046512
| 0
| 0.116279
| 0.777936
| 0.534209
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.023256
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
1be5344462a22f58f8693adeed04e2bcf815f0e3
| 16,663
|
py
|
Python
|
fasttest/drivers/macaca/driver_macaca.py
|
xinxi1990/fasttest
|
51c807f038e9b03ae31b658815ca1d1b422d41a7
|
[
"MIT"
] | null | null | null |
fasttest/drivers/macaca/driver_macaca.py
|
xinxi1990/fasttest
|
51c807f038e9b03ae31b658815ca1d1b422d41a7
|
[
"MIT"
] | null | null | null |
fasttest/drivers/macaca/driver_macaca.py
|
xinxi1990/fasttest
|
51c807f038e9b03ae31b658815ca1d1b422d41a7
|
[
"MIT"
] | 1
|
2020-12-15T03:42:41.000Z
|
2020-12-15T03:42:41.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import traceback
import subprocess
from fasttest.common import *
class AndroidDriver(object):
@staticmethod
def adb_shell(cmd):
'''
:param cmd:
:return:
'''
try:
log_info('adb {}'.format(cmd))
if cmd.startswith('shell'):
cmd = ["adb", "-s", Var.udid, "shell", "{}".format(cmd.lstrip('shell').strip())]
pipe = subprocess.Popen(cmd, stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
out = pipe.communicate()
else:
cmd = ["adb", "-s", Var.udid, "{}".format(cmd)]
os.system(' '.join(cmd))
except:
raise Exception(traceback.format_exc())
@staticmethod
def install_app(app_path):
'''
install app
:param app_path:
:return:
'''
try:
AndroidDriver.adb_shell('install -r {}'.format(app_path))
except Exception as e:
raise e
@staticmethod
def uninstall_app(package_info):
'''
uninstall app
:param package_info: Android(package) or iOS(bundleId)
:return:
'''
try:
AndroidDriver.adb_shell('uninstall {}'.format(package_info))
except Exception as e:
raise e
@staticmethod
def launch_app(package_info):
'''
launch app
:param package_info: Android(package/activity) or iOS(bundleId)
:return:
'''
try:
if not package_info:
AndroidDriver.adb_shell('shell am start -W {}/{}'.format(Var.package, Var.activity))
else:
AndroidDriver.adb_shell('shell am start -W {}'.format(package_info))
except Exception as e:
raise e
@staticmethod
def close_app(package_info):
'''
close app
:param package_info: Android(package) or iOS(bundleId)
:return:
'''
try:
if not package_info:
AndroidDriver.adb_shell('shell am force-stop {}'.format(Var.package))
else:
AndroidDriver.adb_shell('shell am force-stop {}'.format(package_info))
except Exception as e:
raise e
@staticmethod
def tap(x, y):
'''
:param x:
:param y:
:return:
'''
try:
width = Var.instance.get_window_size()['width']
height = Var.instance.get_window_size()['height']
if x <= 1.0:
x = x * width
if y <= 1.0:
y = y * height
Var.instance.touch('tap', {'x': x, 'y': y})
except Exception as e:
raise e
@staticmethod
def double_tap(x, y):
'''
:param x:
:param y:
:return:
'''
try:
width = Var.instance.get_window_size()['width']
height = Var.instance.get_window_size()['height']
if x <= 1.0:
x = x * width
if y <= 1.0:
y = y * height
Var.instance.touch('doubleTap', {'x': x, 'y': y})
except Exception as e:
raise e
@staticmethod
def press(x, y, duration=2):
'''
:param x:
:param y:
:param duration:
:return:
'''
try:
width = Var.instance.get_window_size()['width']
height = Var.instance.get_window_size()['height']
if x <= 1.0:
x = x * width
if y <= 1.0:
y = y * height
Var.instance.touch('press', {'x': x, 'y': y, 'duration': duration})
except Exception as e:
raise e
@staticmethod
def press(element, duration=2):
'''
:param element:
:param duration:
:return:
'''
try:
element.touch('press', {'duration': duration})
except Exception as e:
raise e
@staticmethod
def swipe_up(duration=2):
'''
:param duration:
:return:
'''
try:
width = Var.instance.get_window_size()['width']
height = Var.instance.get_window_size()['height']
AndroidDriver.swipe(width / 2, height * 3 / 4, width / 2, height / 4, duration)
except Exception as e:
raise e
@staticmethod
def swipe_down(duration=2):
'''
:param duration:
:return:
'''
try:
width = Var.instance.get_window_size()['width']
height = Var.instance.get_window_size()['height']
AndroidDriver.swipe(width / 2, height / 4, width / 2, height * 3 / 4, duration)
except Exception as e:
raise e
@staticmethod
def swipe_left(duration=2):
'''
:param duration:
:return:
'''
try:
width = Var.instance.get_window_size()['width']
height = Var.instance.get_window_size()['height']
AndroidDriver.swipe(width * 3 / 4, height / 2, width / 4, height / 2, duration)
except Exception as e:
raise e
@staticmethod
def swipe_right(duration=2):
'''
:param duration:
:return:
'''
try:
width = Var.instance.get_window_size()['width']
height = Var.instance.get_window_size()['height']
AndroidDriver.swipe(width / 4, height / 2, width * 3 / 4, height / 2, duration)
except Exception as e:
raise e
@staticmethod
def swipe(from_x, from_y, to_x, to_y, duration=2):
'''
:param from_x:
:param from_y:
:param to_x:
:param to_y:
:param duration:
:return:
'''
try:
width = Var.instance.get_window_size()['width']
height = Var.instance.get_window_size()['height']
if from_x <= 1.0:
from_x = from_x * width
if from_y <= 1.0:
from_y = from_y * height
if to_x <= 1.0:
to_x = to_x * width
if to_y <= 1.0:
to_y = to_y * height
AndroidDriver.adb_shell('shell input swipe {} {} {} {} {}'.format(from_x, from_y, to_x, to_y, duration * 100))
except Exception as e:
raise e
@staticmethod
def input(element, text, clear=True, hide_keyboard=True):
'''
:param element:
:param text:
:param clear:
:param hide_keyboard:
:return:
'''
try:
# if clear:
# AndroidDriver.clear()
# if hide_keyboard:
# AndroidDriver.hide_keyboard()
# element.click()
element.send_keys(text)
except Exception as e:
raise e
@staticmethod
def get_text(element):
'''
:param element:
:return:
'''
try:
text = element.text
return text
except Exception as e:
raise e
@staticmethod
def clear():
'''
:return:
'''
try:
Var.instance.clear()
except:
traceback.print_exc()
@staticmethod
def hide_keyboard():
'''
:return:
'''
try:
AndroidDriver.adb_shell('shell input keyevent 111')
except:
traceback.print_exc()
@staticmethod
def wait_for_elements_by_id(id, timeout=10, interval=1):
'''
:param id:
:return:
'''
try:
elements = Var.instance.wait_for_elements_by_id(id,int(timeout)*1000,int(interval)*1000)
return elements
except:
return None
@staticmethod
def wait_for_elements_by_name(name, timeout=10, interval=1):
'''
:param name:
:return:me
'''
try:
elements = Var.instance.wait_for_elements_by_name(name,int(timeout)*1000,int(interval)*1000)
return elements
except:
return None
@staticmethod
def wait_for_elements_by_xpath(xpath, timeout=10, interval=1):
'''
:param xpath:
:return:
'''
try:
elements = Var.instance.wait_for_elements_by_xpath(xpath,int(timeout)*1000,int(interval)*1000)
return elements
except:
return None
@staticmethod
def wait_for_elements_by_classname(classname, timeout=10, interval=1):
'''
:param classname:
:return:
'''
try:
elements = Var.instance.wait_for_elements_by_class_name(classname,int(timeout)*1000,int(interval)*1000)
return elements
except:
return None
class iOSDriver(object):
@staticmethod
def install_app(app_path):
'''
install app
:param app_path:
:return:
'''
try:
os.system('ideviceinstaller -u {} -i {}'.format(Var.udid, app_path))
except Exception as e:
raise e
@staticmethod
def uninstall_app(package_info):
'''
uninstall app
:param package_info: Android(package) or iOS(bundleId)
:return:
'''
try:
os.system('ideviceinstaller -u {} -U {}'.format(Var.udid, package_info))
except Exception as e:
raise e
@staticmethod
def launch_app(package_info):
'''
launch app
:param package_info: Android(package/activity) or iOS(bundleId)
:return:
'''
try:
pass # todo 待实现
except Exception as e:
raise e
@staticmethod
def close_app(package_info):
'''
close app
:param package_info: Android(package) or iOS(bundleId)
:return:
'''
try:
pass # todo 待实现
except Exception as e:
raise e
@staticmethod
def tap(x, y):
'''
:param x:
:param y:
:return:
'''
try:
width = Var.instance.get_window_size()['width']
height = Var.instance.get_window_size()['height']
if x <= 1.0:
x = x * width
if y <= 1.0:
y = y * height
Var.instance.touch('tap', {'x': x, 'y': y})
except Exception as e:
raise e
@staticmethod
def double_tap(x, y):
'''
:param x:
:param y:
:return:
'''
try:
width = Var.instance.get_window_size()['width']
height = Var.instance.get_window_size()['height']
if x <= 1.0:
x = x * width
if y <= 1.0:
y = y * height
Var.instance.touch('doubleTap', {'x': x, 'y': y})
except Exception as e:
raise e
@staticmethod
def press(x, y, duration=2):
'''
:param x:
:param y:
:param duration:
:return:
'''
try:
width = Var.instance.get_window_size()['width']
height = Var.instance.get_window_size()['height']
if x <= 1.0:
x = x * width
if y <= 1.0:
y = y * height
Var.instance.touch('press', {'x': x, 'y': y, 'duration': duration})
except Exception as e:
raise e
@staticmethod
def press(element, duration=2):
'''
:param element:
:param duration:
:return:
'''
try:
element.touch('press', {'duration': duration})
except Exception as e:
raise e
@staticmethod
def swipe_up(duration=0):
'''
:param duration:
:return:
'''
try:
width = Var.instance.get_window_size()['width']
height = Var.instance.get_window_size()['height']
iOSDriver.swipe(width / 2, height * 3 / 4, width / 2, height / 4, duration)
except Exception as e:
raise e
@staticmethod
def swipe_down(duration=2):
'''
:param duration:
:return:
'''
try:
width = Var.instance.get_window_size()['width']
height = Var.instance.get_window_size()['height']
iOSDriver.swipe(width / 2, height / 4, width / 2, height * 3 / 4, duration)
except Exception as e:
raise e
@staticmethod
def swipe_left(duration=2):
'''
:param duration:
:return:
'''
try:
width = Var.instance.get_window_size()['width']
height = Var.instance.get_window_size()['height']
iOSDriver.swipe(width * 3 / 4, height / 2, width / 4, height / 2, duration)
except Exception as e:
raise e
@staticmethod
def swipe_right(duration=2):
'''
:param duration:
:return:
'''
try:
width = Var.instance.get_window_size()['width']
height = Var.instance.get_window_size()['height']
iOSDriver.swipe(width / 4, height / 2, width * 3 / 4, height / 2, duration)
except Exception as e:
raise e
@staticmethod
def swipe(from_x, from_y, to_x, to_y, duration=2):
'''
:param from_x:
:param from_y:
:param to_x:
:param to_y:
:param duration:
:return:
'''
try:
width = Var.instance.get_window_size()['width']
height = Var.instance.get_window_size()['height']
if from_x <= 1.0:
from_x = from_x * width
if from_y <= 1.0:
from_y = from_y * height
if to_x <= 1.0:
to_x = to_x * width
if to_y <= 1.0:
to_y = to_y * height
Var.instance.touch('drag', { 'fromX': from_x, 'fromY': from_y, 'toX': to_x, 'toY': to_y, 'duration': duration})
except Exception as e:
raise e
@staticmethod
def input(element, text, clear=True, hide_keyboard=True):
'''
:param element:
:param text:
:param clear:
:param hide_keyboard:
:return:
'''
try:
# if clear:
# iOSDriver.clear()
# if hide_keyboard:
# iOSDriver.hide_keyboard()
# element.click()
element.send_keys(text)
except Exception as e:
raise e
@staticmethod
def get_text(element):
'''
:param element:
:return:
'''
try:
text = element.text
return text
except Exception as e:
raise e
@staticmethod
def clear():
'''
:return:
'''
try:
Var.instance.clear()
except:
traceback.print_exc()
@staticmethod
def hide_keyboard():
'''
:return:
'''
try:
pass # todo 待实现
except:
traceback.print_exc()
@staticmethod
def wait_for_elements_by_id(id, timeout=10, interval=1):
'''
:param id:
:return:
'''
try:
elements = Var.instance.wait_for_elements_by_id(id,int(timeout)*1000,int(interval)*1000)
return elements
except:
return None
@staticmethod
def wait_for_elements_by_name(name, timeout=10, interval=1):
'''
:param name:
:return:me
'''
try:
elements = Var.instance.wait_for_elements_by_name(name,int(timeout)*1000,int(interval)*1000)
return elements
except:
return None
@staticmethod
def wait_for_elements_by_xpath(xpath, timeout=10, interval=1):
'''
:param xpath:
:return:
'''
try:
elements = Var.instance.wait_for_elements_by_xpath(xpath,int(timeout)*1000,int(interval)*1000)
return elements
except:
return None
@staticmethod
def wait_for_elements_by_classname(classname, timeout=10, interval=1):
'''
:param classname:
:return:
'''
try:
elements = Var.instance.wait_for_elements_by_class_name(classname,int(timeout)*1000,int(interval)*1000)
return elements
except:
return None
| 26.875806
| 123
| 0.495049
| 1,759
| 16,663
| 4.548039
| 0.076748
| 0.067375
| 0.056
| 0.08
| 0.923125
| 0.904625
| 0.896
| 0.896
| 0.88375
| 0.88075
| 0
| 0.018712
| 0.390626
| 16,663
| 620
| 124
| 26.875806
| 0.769155
| 0.115525
| 0
| 0.897222
| 0
| 0
| 0.041923
| 0
| 0
| 0
| 0
| 0.004839
| 0
| 1
| 0.119444
| false
| 0.008333
| 0.011111
| 0
| 0.186111
| 0.011111
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1be8f70c43853eaa607d79ffa9df9fb35b9223b0
| 52,010
|
py
|
Python
|
sdk/python/pulumi_azure/keyvault/key_vault.py
|
ScriptBox99/pulumi-azure
|
1b8c6d5479ccabc39094741eac25a8ca44c8833a
|
[
"ECL-2.0",
"Apache-2.0"
] | 109
|
2018-06-18T00:19:44.000Z
|
2022-02-20T05:32:57.000Z
|
sdk/python/pulumi_azure/keyvault/key_vault.py
|
ScriptBox99/pulumi-azure
|
1b8c6d5479ccabc39094741eac25a8ca44c8833a
|
[
"ECL-2.0",
"Apache-2.0"
] | 663
|
2018-06-18T21:08:46.000Z
|
2022-03-31T20:10:11.000Z
|
sdk/python/pulumi_azure/keyvault/key_vault.py
|
ScriptBox99/pulumi-azure
|
1b8c6d5479ccabc39094741eac25a8ca44c8833a
|
[
"ECL-2.0",
"Apache-2.0"
] | 41
|
2018-07-19T22:37:38.000Z
|
2022-03-14T10:56:26.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['KeyVaultArgs', 'KeyVault']
@pulumi.input_type
class KeyVaultArgs:
def __init__(__self__, *,
resource_group_name: pulumi.Input[str],
sku_name: pulumi.Input[str],
tenant_id: pulumi.Input[str],
access_policies: Optional[pulumi.Input[Sequence[pulumi.Input['KeyVaultAccessPolicyArgs']]]] = None,
contacts: Optional[pulumi.Input[Sequence[pulumi.Input['KeyVaultContactArgs']]]] = None,
enable_rbac_authorization: Optional[pulumi.Input[bool]] = None,
enabled_for_deployment: Optional[pulumi.Input[bool]] = None,
enabled_for_disk_encryption: Optional[pulumi.Input[bool]] = None,
enabled_for_template_deployment: Optional[pulumi.Input[bool]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network_acls: Optional[pulumi.Input['KeyVaultNetworkAclsArgs']] = None,
purge_protection_enabled: Optional[pulumi.Input[bool]] = None,
soft_delete_enabled: Optional[pulumi.Input[bool]] = None,
soft_delete_retention_days: Optional[pulumi.Input[int]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a KeyVault resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the Key Vault. Changing this forces a new resource to be created.
:param pulumi.Input[str] sku_name: The Name of the SKU used for this Key Vault. Possible values are `standard` and `premium`.
:param pulumi.Input[str] tenant_id: The Azure Active Directory tenant ID that should be used for authenticating requests to the key vault.
:param pulumi.Input[Sequence[pulumi.Input['KeyVaultAccessPolicyArgs']]] access_policies: A list of up to 16 objects describing access policies, as described below.
:param pulumi.Input[Sequence[pulumi.Input['KeyVaultContactArgs']]] contacts: One or more `contact` block as defined below.
:param pulumi.Input[bool] enable_rbac_authorization: Boolean flag to specify whether Azure Key Vault uses Role Based Access Control (RBAC) for authorization of data actions. Defaults to `false`.
:param pulumi.Input[bool] enabled_for_deployment: Boolean flag to specify whether Azure Virtual Machines are permitted to retrieve certificates stored as secrets from the key vault. Defaults to `false`.
:param pulumi.Input[bool] enabled_for_disk_encryption: Boolean flag to specify whether Azure Disk Encryption is permitted to retrieve secrets from the vault and unwrap keys. Defaults to `false`.
:param pulumi.Input[bool] enabled_for_template_deployment: Boolean flag to specify whether Azure Resource Manager is permitted to retrieve secrets from the key vault. Defaults to `false`.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: Specifies the name of the Key Vault. Changing this forces a new resource to be created.
:param pulumi.Input['KeyVaultNetworkAclsArgs'] network_acls: A `network_acls` block as defined below.
:param pulumi.Input[bool] purge_protection_enabled: Is Purge Protection enabled for this Key Vault? Defaults to `false`.
:param pulumi.Input[int] soft_delete_retention_days: The number of days that items should be retained for once soft-deleted. This value can be between `7` and `90` (the default) days.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
"""
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "sku_name", sku_name)
pulumi.set(__self__, "tenant_id", tenant_id)
if access_policies is not None:
pulumi.set(__self__, "access_policies", access_policies)
if contacts is not None:
pulumi.set(__self__, "contacts", contacts)
if enable_rbac_authorization is not None:
pulumi.set(__self__, "enable_rbac_authorization", enable_rbac_authorization)
if enabled_for_deployment is not None:
pulumi.set(__self__, "enabled_for_deployment", enabled_for_deployment)
if enabled_for_disk_encryption is not None:
pulumi.set(__self__, "enabled_for_disk_encryption", enabled_for_disk_encryption)
if enabled_for_template_deployment is not None:
pulumi.set(__self__, "enabled_for_template_deployment", enabled_for_template_deployment)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if network_acls is not None:
pulumi.set(__self__, "network_acls", network_acls)
if purge_protection_enabled is not None:
pulumi.set(__self__, "purge_protection_enabled", purge_protection_enabled)
if soft_delete_enabled is not None:
warnings.warn("""Azure has removed support for disabling Soft Delete as of 2020-12-15, as such this field is no longer configurable and can be safely removed. This field will be removed in version 3.0 of the Azure Provider.""", DeprecationWarning)
pulumi.log.warn("""soft_delete_enabled is deprecated: Azure has removed support for disabling Soft Delete as of 2020-12-15, as such this field is no longer configurable and can be safely removed. This field will be removed in version 3.0 of the Azure Provider.""")
if soft_delete_enabled is not None:
pulumi.set(__self__, "soft_delete_enabled", soft_delete_enabled)
if soft_delete_retention_days is not None:
pulumi.set(__self__, "soft_delete_retention_days", soft_delete_retention_days)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group in which to create the Key Vault. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="skuName")
def sku_name(self) -> pulumi.Input[str]:
"""
The Name of the SKU used for this Key Vault. Possible values are `standard` and `premium`.
"""
return pulumi.get(self, "sku_name")
@sku_name.setter
def sku_name(self, value: pulumi.Input[str]):
pulumi.set(self, "sku_name", value)
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> pulumi.Input[str]:
"""
The Azure Active Directory tenant ID that should be used for authenticating requests to the key vault.
"""
return pulumi.get(self, "tenant_id")
@tenant_id.setter
def tenant_id(self, value: pulumi.Input[str]):
pulumi.set(self, "tenant_id", value)
@property
@pulumi.getter(name="accessPolicies")
def access_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['KeyVaultAccessPolicyArgs']]]]:
"""
A list of up to 16 objects describing access policies, as described below.
"""
return pulumi.get(self, "access_policies")
@access_policies.setter
def access_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['KeyVaultAccessPolicyArgs']]]]):
pulumi.set(self, "access_policies", value)
@property
@pulumi.getter
def contacts(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['KeyVaultContactArgs']]]]:
"""
One or more `contact` block as defined below.
"""
return pulumi.get(self, "contacts")
@contacts.setter
def contacts(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['KeyVaultContactArgs']]]]):
pulumi.set(self, "contacts", value)
@property
@pulumi.getter(name="enableRbacAuthorization")
def enable_rbac_authorization(self) -> Optional[pulumi.Input[bool]]:
"""
Boolean flag to specify whether Azure Key Vault uses Role Based Access Control (RBAC) for authorization of data actions. Defaults to `false`.
"""
return pulumi.get(self, "enable_rbac_authorization")
@enable_rbac_authorization.setter
def enable_rbac_authorization(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_rbac_authorization", value)
@property
@pulumi.getter(name="enabledForDeployment")
def enabled_for_deployment(self) -> Optional[pulumi.Input[bool]]:
"""
Boolean flag to specify whether Azure Virtual Machines are permitted to retrieve certificates stored as secrets from the key vault. Defaults to `false`.
"""
return pulumi.get(self, "enabled_for_deployment")
@enabled_for_deployment.setter
def enabled_for_deployment(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled_for_deployment", value)
@property
@pulumi.getter(name="enabledForDiskEncryption")
def enabled_for_disk_encryption(self) -> Optional[pulumi.Input[bool]]:
"""
Boolean flag to specify whether Azure Disk Encryption is permitted to retrieve secrets from the vault and unwrap keys. Defaults to `false`.
"""
return pulumi.get(self, "enabled_for_disk_encryption")
@enabled_for_disk_encryption.setter
def enabled_for_disk_encryption(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled_for_disk_encryption", value)
@property
@pulumi.getter(name="enabledForTemplateDeployment")
def enabled_for_template_deployment(self) -> Optional[pulumi.Input[bool]]:
"""
Boolean flag to specify whether Azure Resource Manager is permitted to retrieve secrets from the key vault. Defaults to `false`.
"""
return pulumi.get(self, "enabled_for_template_deployment")
@enabled_for_template_deployment.setter
def enabled_for_template_deployment(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled_for_template_deployment", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the Key Vault. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="networkAcls")
def network_acls(self) -> Optional[pulumi.Input['KeyVaultNetworkAclsArgs']]:
"""
A `network_acls` block as defined below.
"""
return pulumi.get(self, "network_acls")
@network_acls.setter
def network_acls(self, value: Optional[pulumi.Input['KeyVaultNetworkAclsArgs']]):
pulumi.set(self, "network_acls", value)
@property
@pulumi.getter(name="purgeProtectionEnabled")
def purge_protection_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Is Purge Protection enabled for this Key Vault? Defaults to `false`.
"""
return pulumi.get(self, "purge_protection_enabled")
@purge_protection_enabled.setter
def purge_protection_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "purge_protection_enabled", value)
@property
@pulumi.getter(name="softDeleteEnabled")
def soft_delete_enabled(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "soft_delete_enabled")
@soft_delete_enabled.setter
def soft_delete_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "soft_delete_enabled", value)
@property
@pulumi.getter(name="softDeleteRetentionDays")
def soft_delete_retention_days(self) -> Optional[pulumi.Input[int]]:
"""
The number of days that items should be retained for once soft-deleted. This value can be between `7` and `90` (the default) days.
"""
return pulumi.get(self, "soft_delete_retention_days")
@soft_delete_retention_days.setter
def soft_delete_retention_days(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "soft_delete_retention_days", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class _KeyVaultState:
def __init__(__self__, *,
access_policies: Optional[pulumi.Input[Sequence[pulumi.Input['KeyVaultAccessPolicyArgs']]]] = None,
contacts: Optional[pulumi.Input[Sequence[pulumi.Input['KeyVaultContactArgs']]]] = None,
enable_rbac_authorization: Optional[pulumi.Input[bool]] = None,
enabled_for_deployment: Optional[pulumi.Input[bool]] = None,
enabled_for_disk_encryption: Optional[pulumi.Input[bool]] = None,
enabled_for_template_deployment: Optional[pulumi.Input[bool]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network_acls: Optional[pulumi.Input['KeyVaultNetworkAclsArgs']] = None,
purge_protection_enabled: Optional[pulumi.Input[bool]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
sku_name: Optional[pulumi.Input[str]] = None,
soft_delete_enabled: Optional[pulumi.Input[bool]] = None,
soft_delete_retention_days: Optional[pulumi.Input[int]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
vault_uri: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering KeyVault resources.
:param pulumi.Input[Sequence[pulumi.Input['KeyVaultAccessPolicyArgs']]] access_policies: A list of up to 16 objects describing access policies, as described below.
:param pulumi.Input[Sequence[pulumi.Input['KeyVaultContactArgs']]] contacts: One or more `contact` block as defined below.
:param pulumi.Input[bool] enable_rbac_authorization: Boolean flag to specify whether Azure Key Vault uses Role Based Access Control (RBAC) for authorization of data actions. Defaults to `false`.
:param pulumi.Input[bool] enabled_for_deployment: Boolean flag to specify whether Azure Virtual Machines are permitted to retrieve certificates stored as secrets from the key vault. Defaults to `false`.
:param pulumi.Input[bool] enabled_for_disk_encryption: Boolean flag to specify whether Azure Disk Encryption is permitted to retrieve secrets from the vault and unwrap keys. Defaults to `false`.
:param pulumi.Input[bool] enabled_for_template_deployment: Boolean flag to specify whether Azure Resource Manager is permitted to retrieve secrets from the key vault. Defaults to `false`.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: Specifies the name of the Key Vault. Changing this forces a new resource to be created.
:param pulumi.Input['KeyVaultNetworkAclsArgs'] network_acls: A `network_acls` block as defined below.
:param pulumi.Input[bool] purge_protection_enabled: Is Purge Protection enabled for this Key Vault? Defaults to `false`.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the Key Vault. Changing this forces a new resource to be created.
:param pulumi.Input[str] sku_name: The Name of the SKU used for this Key Vault. Possible values are `standard` and `premium`.
:param pulumi.Input[int] soft_delete_retention_days: The number of days that items should be retained for once soft-deleted. This value can be between `7` and `90` (the default) days.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] tenant_id: The Azure Active Directory tenant ID that should be used for authenticating requests to the key vault.
:param pulumi.Input[str] vault_uri: The URI of the Key Vault, used for performing operations on keys and secrets.
"""
if access_policies is not None:
pulumi.set(__self__, "access_policies", access_policies)
if contacts is not None:
pulumi.set(__self__, "contacts", contacts)
if enable_rbac_authorization is not None:
pulumi.set(__self__, "enable_rbac_authorization", enable_rbac_authorization)
if enabled_for_deployment is not None:
pulumi.set(__self__, "enabled_for_deployment", enabled_for_deployment)
if enabled_for_disk_encryption is not None:
pulumi.set(__self__, "enabled_for_disk_encryption", enabled_for_disk_encryption)
if enabled_for_template_deployment is not None:
pulumi.set(__self__, "enabled_for_template_deployment", enabled_for_template_deployment)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if network_acls is not None:
pulumi.set(__self__, "network_acls", network_acls)
if purge_protection_enabled is not None:
pulumi.set(__self__, "purge_protection_enabled", purge_protection_enabled)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if sku_name is not None:
pulumi.set(__self__, "sku_name", sku_name)
if soft_delete_enabled is not None:
warnings.warn("""Azure has removed support for disabling Soft Delete as of 2020-12-15, as such this field is no longer configurable and can be safely removed. This field will be removed in version 3.0 of the Azure Provider.""", DeprecationWarning)
pulumi.log.warn("""soft_delete_enabled is deprecated: Azure has removed support for disabling Soft Delete as of 2020-12-15, as such this field is no longer configurable and can be safely removed. This field will be removed in version 3.0 of the Azure Provider.""")
if soft_delete_enabled is not None:
pulumi.set(__self__, "soft_delete_enabled", soft_delete_enabled)
if soft_delete_retention_days is not None:
pulumi.set(__self__, "soft_delete_retention_days", soft_delete_retention_days)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if tenant_id is not None:
pulumi.set(__self__, "tenant_id", tenant_id)
if vault_uri is not None:
pulumi.set(__self__, "vault_uri", vault_uri)
@property
@pulumi.getter(name="accessPolicies")
def access_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['KeyVaultAccessPolicyArgs']]]]:
"""
A list of up to 16 objects describing access policies, as described below.
"""
return pulumi.get(self, "access_policies")
@access_policies.setter
def access_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['KeyVaultAccessPolicyArgs']]]]):
pulumi.set(self, "access_policies", value)
@property
@pulumi.getter
def contacts(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['KeyVaultContactArgs']]]]:
"""
One or more `contact` block as defined below.
"""
return pulumi.get(self, "contacts")
@contacts.setter
def contacts(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['KeyVaultContactArgs']]]]):
pulumi.set(self, "contacts", value)
@property
@pulumi.getter(name="enableRbacAuthorization")
def enable_rbac_authorization(self) -> Optional[pulumi.Input[bool]]:
"""
Boolean flag to specify whether Azure Key Vault uses Role Based Access Control (RBAC) for authorization of data actions. Defaults to `false`.
"""
return pulumi.get(self, "enable_rbac_authorization")
@enable_rbac_authorization.setter
def enable_rbac_authorization(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_rbac_authorization", value)
@property
@pulumi.getter(name="enabledForDeployment")
def enabled_for_deployment(self) -> Optional[pulumi.Input[bool]]:
"""
Boolean flag to specify whether Azure Virtual Machines are permitted to retrieve certificates stored as secrets from the key vault. Defaults to `false`.
"""
return pulumi.get(self, "enabled_for_deployment")
@enabled_for_deployment.setter
def enabled_for_deployment(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled_for_deployment", value)
@property
@pulumi.getter(name="enabledForDiskEncryption")
def enabled_for_disk_encryption(self) -> Optional[pulumi.Input[bool]]:
"""
Boolean flag to specify whether Azure Disk Encryption is permitted to retrieve secrets from the vault and unwrap keys. Defaults to `false`.
"""
return pulumi.get(self, "enabled_for_disk_encryption")
@enabled_for_disk_encryption.setter
def enabled_for_disk_encryption(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled_for_disk_encryption", value)
@property
@pulumi.getter(name="enabledForTemplateDeployment")
def enabled_for_template_deployment(self) -> Optional[pulumi.Input[bool]]:
"""
Boolean flag to specify whether Azure Resource Manager is permitted to retrieve secrets from the key vault. Defaults to `false`.
"""
return pulumi.get(self, "enabled_for_template_deployment")
@enabled_for_template_deployment.setter
def enabled_for_template_deployment(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled_for_template_deployment", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the Key Vault. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="networkAcls")
def network_acls(self) -> Optional[pulumi.Input['KeyVaultNetworkAclsArgs']]:
"""
A `network_acls` block as defined below.
"""
return pulumi.get(self, "network_acls")
@network_acls.setter
def network_acls(self, value: Optional[pulumi.Input['KeyVaultNetworkAclsArgs']]):
pulumi.set(self, "network_acls", value)
@property
@pulumi.getter(name="purgeProtectionEnabled")
def purge_protection_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Is Purge Protection enabled for this Key Vault? Defaults to `false`.
"""
return pulumi.get(self, "purge_protection_enabled")
@purge_protection_enabled.setter
def purge_protection_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "purge_protection_enabled", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resource group in which to create the Key Vault. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="skuName")
def sku_name(self) -> Optional[pulumi.Input[str]]:
"""
The Name of the SKU used for this Key Vault. Possible values are `standard` and `premium`.
"""
return pulumi.get(self, "sku_name")
@sku_name.setter
def sku_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sku_name", value)
@property
@pulumi.getter(name="softDeleteEnabled")
def soft_delete_enabled(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "soft_delete_enabled")
@soft_delete_enabled.setter
def soft_delete_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "soft_delete_enabled", value)
@property
@pulumi.getter(name="softDeleteRetentionDays")
def soft_delete_retention_days(self) -> Optional[pulumi.Input[int]]:
"""
The number of days that items should be retained for once soft-deleted. This value can be between `7` and `90` (the default) days.
"""
return pulumi.get(self, "soft_delete_retention_days")
@soft_delete_retention_days.setter
def soft_delete_retention_days(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "soft_delete_retention_days", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> Optional[pulumi.Input[str]]:
"""
The Azure Active Directory tenant ID that should be used for authenticating requests to the key vault.
"""
return pulumi.get(self, "tenant_id")
@tenant_id.setter
def tenant_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tenant_id", value)
@property
@pulumi.getter(name="vaultUri")
def vault_uri(self) -> Optional[pulumi.Input[str]]:
"""
The URI of the Key Vault, used for performing operations on keys and secrets.
"""
return pulumi.get(self, "vault_uri")
@vault_uri.setter
def vault_uri(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vault_uri", value)
class KeyVault(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
access_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['KeyVaultAccessPolicyArgs']]]]] = None,
contacts: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['KeyVaultContactArgs']]]]] = None,
enable_rbac_authorization: Optional[pulumi.Input[bool]] = None,
enabled_for_deployment: Optional[pulumi.Input[bool]] = None,
enabled_for_disk_encryption: Optional[pulumi.Input[bool]] = None,
enabled_for_template_deployment: Optional[pulumi.Input[bool]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network_acls: Optional[pulumi.Input[pulumi.InputType['KeyVaultNetworkAclsArgs']]] = None,
purge_protection_enabled: Optional[pulumi.Input[bool]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
sku_name: Optional[pulumi.Input[str]] = None,
soft_delete_enabled: Optional[pulumi.Input[bool]] = None,
soft_delete_retention_days: Optional[pulumi.Input[int]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages a Key Vault.
## Disclaimers
> **Note:** It's possible to define Key Vault Access Policies both within the `keyvault.KeyVault` resource via the `access_policy` block and by using the `keyvault.AccessPolicy` resource. However it's not possible to use both methods to manage Access Policies within a KeyVault, since there'll be conflicts.
> **Note:** This provider will automatically recover a soft-deleted Key Vault during Creation if one is found - you can opt out of this using the `features` configuration within the Provider configuration block.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
current = azure.core.get_client_config()
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_key_vault = azure.keyvault.KeyVault("exampleKeyVault",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
enabled_for_disk_encryption=True,
tenant_id=current.tenant_id,
soft_delete_retention_days=7,
purge_protection_enabled=False,
sku_name="standard",
access_policies=[azure.keyvault.KeyVaultAccessPolicyArgs(
tenant_id=current.tenant_id,
object_id=current.object_id,
key_permissions=["Get"],
secret_permissions=["Get"],
storage_permissions=["Get"],
)])
```
## Import
Key Vault's can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:keyvault/keyVault:KeyVault example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.KeyVault/vaults/vault1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['KeyVaultAccessPolicyArgs']]]] access_policies: A list of up to 16 objects describing access policies, as described below.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['KeyVaultContactArgs']]]] contacts: One or more `contact` block as defined below.
:param pulumi.Input[bool] enable_rbac_authorization: Boolean flag to specify whether Azure Key Vault uses Role Based Access Control (RBAC) for authorization of data actions. Defaults to `false`.
:param pulumi.Input[bool] enabled_for_deployment: Boolean flag to specify whether Azure Virtual Machines are permitted to retrieve certificates stored as secrets from the key vault. Defaults to `false`.
:param pulumi.Input[bool] enabled_for_disk_encryption: Boolean flag to specify whether Azure Disk Encryption is permitted to retrieve secrets from the vault and unwrap keys. Defaults to `false`.
:param pulumi.Input[bool] enabled_for_template_deployment: Boolean flag to specify whether Azure Resource Manager is permitted to retrieve secrets from the key vault. Defaults to `false`.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: Specifies the name of the Key Vault. Changing this forces a new resource to be created.
:param pulumi.Input[pulumi.InputType['KeyVaultNetworkAclsArgs']] network_acls: A `network_acls` block as defined below.
:param pulumi.Input[bool] purge_protection_enabled: Is Purge Protection enabled for this Key Vault? Defaults to `false`.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the Key Vault. Changing this forces a new resource to be created.
:param pulumi.Input[str] sku_name: The Name of the SKU used for this Key Vault. Possible values are `standard` and `premium`.
:param pulumi.Input[int] soft_delete_retention_days: The number of days that items should be retained for once soft-deleted. This value can be between `7` and `90` (the default) days.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] tenant_id: The Azure Active Directory tenant ID that should be used for authenticating requests to the key vault.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: KeyVaultArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a Key Vault.
## Disclaimers
> **Note:** It's possible to define Key Vault Access Policies both within the `keyvault.KeyVault` resource via the `access_policy` block and by using the `keyvault.AccessPolicy` resource. However it's not possible to use both methods to manage Access Policies within a KeyVault, since there'll be conflicts.
> **Note:** This provider will automatically recover a soft-deleted Key Vault during Creation if one is found - you can opt out of this using the `features` configuration within the Provider configuration block.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
current = azure.core.get_client_config()
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_key_vault = azure.keyvault.KeyVault("exampleKeyVault",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
enabled_for_disk_encryption=True,
tenant_id=current.tenant_id,
soft_delete_retention_days=7,
purge_protection_enabled=False,
sku_name="standard",
access_policies=[azure.keyvault.KeyVaultAccessPolicyArgs(
tenant_id=current.tenant_id,
object_id=current.object_id,
key_permissions=["Get"],
secret_permissions=["Get"],
storage_permissions=["Get"],
)])
```
## Import
Key Vault's can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:keyvault/keyVault:KeyVault example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.KeyVault/vaults/vault1
```
:param str resource_name: The name of the resource.
:param KeyVaultArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(KeyVaultArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
access_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['KeyVaultAccessPolicyArgs']]]]] = None,
contacts: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['KeyVaultContactArgs']]]]] = None,
enable_rbac_authorization: Optional[pulumi.Input[bool]] = None,
enabled_for_deployment: Optional[pulumi.Input[bool]] = None,
enabled_for_disk_encryption: Optional[pulumi.Input[bool]] = None,
enabled_for_template_deployment: Optional[pulumi.Input[bool]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network_acls: Optional[pulumi.Input[pulumi.InputType['KeyVaultNetworkAclsArgs']]] = None,
purge_protection_enabled: Optional[pulumi.Input[bool]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
sku_name: Optional[pulumi.Input[str]] = None,
soft_delete_enabled: Optional[pulumi.Input[bool]] = None,
soft_delete_retention_days: Optional[pulumi.Input[int]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = KeyVaultArgs.__new__(KeyVaultArgs)
__props__.__dict__["access_policies"] = access_policies
__props__.__dict__["contacts"] = contacts
__props__.__dict__["enable_rbac_authorization"] = enable_rbac_authorization
__props__.__dict__["enabled_for_deployment"] = enabled_for_deployment
__props__.__dict__["enabled_for_disk_encryption"] = enabled_for_disk_encryption
__props__.__dict__["enabled_for_template_deployment"] = enabled_for_template_deployment
__props__.__dict__["location"] = location
__props__.__dict__["name"] = name
__props__.__dict__["network_acls"] = network_acls
__props__.__dict__["purge_protection_enabled"] = purge_protection_enabled
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
if sku_name is None and not opts.urn:
raise TypeError("Missing required property 'sku_name'")
__props__.__dict__["sku_name"] = sku_name
if soft_delete_enabled is not None and not opts.urn:
warnings.warn("""Azure has removed support for disabling Soft Delete as of 2020-12-15, as such this field is no longer configurable and can be safely removed. This field will be removed in version 3.0 of the Azure Provider.""", DeprecationWarning)
pulumi.log.warn("""soft_delete_enabled is deprecated: Azure has removed support for disabling Soft Delete as of 2020-12-15, as such this field is no longer configurable and can be safely removed. This field will be removed in version 3.0 of the Azure Provider.""")
__props__.__dict__["soft_delete_enabled"] = soft_delete_enabled
__props__.__dict__["soft_delete_retention_days"] = soft_delete_retention_days
__props__.__dict__["tags"] = tags
if tenant_id is None and not opts.urn:
raise TypeError("Missing required property 'tenant_id'")
__props__.__dict__["tenant_id"] = tenant_id
__props__.__dict__["vault_uri"] = None
super(KeyVault, __self__).__init__(
'azure:keyvault/keyVault:KeyVault',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
access_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['KeyVaultAccessPolicyArgs']]]]] = None,
contacts: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['KeyVaultContactArgs']]]]] = None,
enable_rbac_authorization: Optional[pulumi.Input[bool]] = None,
enabled_for_deployment: Optional[pulumi.Input[bool]] = None,
enabled_for_disk_encryption: Optional[pulumi.Input[bool]] = None,
enabled_for_template_deployment: Optional[pulumi.Input[bool]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network_acls: Optional[pulumi.Input[pulumi.InputType['KeyVaultNetworkAclsArgs']]] = None,
purge_protection_enabled: Optional[pulumi.Input[bool]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
sku_name: Optional[pulumi.Input[str]] = None,
soft_delete_enabled: Optional[pulumi.Input[bool]] = None,
soft_delete_retention_days: Optional[pulumi.Input[int]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
vault_uri: Optional[pulumi.Input[str]] = None) -> 'KeyVault':
"""
Get an existing KeyVault resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['KeyVaultAccessPolicyArgs']]]] access_policies: A list of up to 16 objects describing access policies, as described below.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['KeyVaultContactArgs']]]] contacts: One or more `contact` block as defined below.
:param pulumi.Input[bool] enable_rbac_authorization: Boolean flag to specify whether Azure Key Vault uses Role Based Access Control (RBAC) for authorization of data actions. Defaults to `false`.
:param pulumi.Input[bool] enabled_for_deployment: Boolean flag to specify whether Azure Virtual Machines are permitted to retrieve certificates stored as secrets from the key vault. Defaults to `false`.
:param pulumi.Input[bool] enabled_for_disk_encryption: Boolean flag to specify whether Azure Disk Encryption is permitted to retrieve secrets from the vault and unwrap keys. Defaults to `false`.
:param pulumi.Input[bool] enabled_for_template_deployment: Boolean flag to specify whether Azure Resource Manager is permitted to retrieve secrets from the key vault. Defaults to `false`.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: Specifies the name of the Key Vault. Changing this forces a new resource to be created.
:param pulumi.Input[pulumi.InputType['KeyVaultNetworkAclsArgs']] network_acls: A `network_acls` block as defined below.
:param pulumi.Input[bool] purge_protection_enabled: Is Purge Protection enabled for this Key Vault? Defaults to `false`.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the Key Vault. Changing this forces a new resource to be created.
:param pulumi.Input[str] sku_name: The Name of the SKU used for this Key Vault. Possible values are `standard` and `premium`.
:param pulumi.Input[int] soft_delete_retention_days: The number of days that items should be retained for once soft-deleted. This value can be between `7` and `90` (the default) days.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] tenant_id: The Azure Active Directory tenant ID that should be used for authenticating requests to the key vault.
:param pulumi.Input[str] vault_uri: The URI of the Key Vault, used for performing operations on keys and secrets.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _KeyVaultState.__new__(_KeyVaultState)
__props__.__dict__["access_policies"] = access_policies
__props__.__dict__["contacts"] = contacts
__props__.__dict__["enable_rbac_authorization"] = enable_rbac_authorization
__props__.__dict__["enabled_for_deployment"] = enabled_for_deployment
__props__.__dict__["enabled_for_disk_encryption"] = enabled_for_disk_encryption
__props__.__dict__["enabled_for_template_deployment"] = enabled_for_template_deployment
__props__.__dict__["location"] = location
__props__.__dict__["name"] = name
__props__.__dict__["network_acls"] = network_acls
__props__.__dict__["purge_protection_enabled"] = purge_protection_enabled
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["sku_name"] = sku_name
__props__.__dict__["soft_delete_enabled"] = soft_delete_enabled
__props__.__dict__["soft_delete_retention_days"] = soft_delete_retention_days
__props__.__dict__["tags"] = tags
__props__.__dict__["tenant_id"] = tenant_id
__props__.__dict__["vault_uri"] = vault_uri
return KeyVault(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="accessPolicies")
def access_policies(self) -> pulumi.Output[Sequence['outputs.KeyVaultAccessPolicy']]:
"""
A list of up to 16 objects describing access policies, as described below.
"""
return pulumi.get(self, "access_policies")
@property
@pulumi.getter
def contacts(self) -> pulumi.Output[Optional[Sequence['outputs.KeyVaultContact']]]:
"""
One or more `contact` block as defined below.
"""
return pulumi.get(self, "contacts")
@property
@pulumi.getter(name="enableRbacAuthorization")
def enable_rbac_authorization(self) -> pulumi.Output[Optional[bool]]:
"""
Boolean flag to specify whether Azure Key Vault uses Role Based Access Control (RBAC) for authorization of data actions. Defaults to `false`.
"""
return pulumi.get(self, "enable_rbac_authorization")
@property
@pulumi.getter(name="enabledForDeployment")
def enabled_for_deployment(self) -> pulumi.Output[Optional[bool]]:
"""
Boolean flag to specify whether Azure Virtual Machines are permitted to retrieve certificates stored as secrets from the key vault. Defaults to `false`.
"""
return pulumi.get(self, "enabled_for_deployment")
@property
@pulumi.getter(name="enabledForDiskEncryption")
def enabled_for_disk_encryption(self) -> pulumi.Output[Optional[bool]]:
"""
Boolean flag to specify whether Azure Disk Encryption is permitted to retrieve secrets from the vault and unwrap keys. Defaults to `false`.
"""
return pulumi.get(self, "enabled_for_disk_encryption")
@property
@pulumi.getter(name="enabledForTemplateDeployment")
def enabled_for_template_deployment(self) -> pulumi.Output[Optional[bool]]:
"""
Boolean flag to specify whether Azure Resource Manager is permitted to retrieve secrets from the key vault. Defaults to `false`.
"""
return pulumi.get(self, "enabled_for_template_deployment")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Specifies the name of the Key Vault. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="networkAcls")
def network_acls(self) -> pulumi.Output['outputs.KeyVaultNetworkAcls']:
"""
A `network_acls` block as defined below.
"""
return pulumi.get(self, "network_acls")
@property
@pulumi.getter(name="purgeProtectionEnabled")
def purge_protection_enabled(self) -> pulumi.Output[Optional[bool]]:
"""
Is Purge Protection enabled for this Key Vault? Defaults to `false`.
"""
return pulumi.get(self, "purge_protection_enabled")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
The name of the resource group in which to create the Key Vault. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter(name="skuName")
def sku_name(self) -> pulumi.Output[str]:
"""
The Name of the SKU used for this Key Vault. Possible values are `standard` and `premium`.
"""
return pulumi.get(self, "sku_name")
@property
@pulumi.getter(name="softDeleteEnabled")
def soft_delete_enabled(self) -> pulumi.Output[bool]:
return pulumi.get(self, "soft_delete_enabled")
@property
@pulumi.getter(name="softDeleteRetentionDays")
def soft_delete_retention_days(self) -> pulumi.Output[Optional[int]]:
"""
The number of days that items should be retained for once soft-deleted. This value can be between `7` and `90` (the default) days.
"""
return pulumi.get(self, "soft_delete_retention_days")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> pulumi.Output[str]:
"""
The Azure Active Directory tenant ID that should be used for authenticating requests to the key vault.
"""
return pulumi.get(self, "tenant_id")
@property
@pulumi.getter(name="vaultUri")
def vault_uri(self) -> pulumi.Output[str]:
"""
The URI of the Key Vault, used for performing operations on keys and secrets.
"""
return pulumi.get(self, "vault_uri")
| 54.177083
| 315
| 0.686964
| 6,394
| 52,010
| 5.375039
| 0.050047
| 0.080976
| 0.076845
| 0.036138
| 0.947131
| 0.938693
| 0.931884
| 0.926967
| 0.92106
| 0.912157
| 0
| 0.004087
| 0.218996
| 52,010
| 959
| 316
| 54.233577
| 0.841999
| 0.350721
| 0
| 0.82971
| 1
| 0.01087
| 0.171663
| 0.073253
| 0
| 0
| 0
| 0
| 0
| 1
| 0.163043
| false
| 0.001812
| 0.012681
| 0.005435
| 0.273551
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
408e1e1446d7d1f07daccf82d45c4ea5d3d13750
| 4,594
|
py
|
Python
|
tests/test_gpg.py
|
wearefair/modelmapper
|
7f420ceca0312373fdb0ab81fa3f6a3f8fb86086
|
[
"MIT"
] | 5
|
2018-06-29T22:13:22.000Z
|
2019-03-15T18:28:07.000Z
|
tests/test_gpg.py
|
wearefair/modelmapper
|
7f420ceca0312373fdb0ab81fa3f6a3f8fb86086
|
[
"MIT"
] | 13
|
2018-06-29T22:40:02.000Z
|
2020-12-15T05:29:04.000Z
|
tests/test_gpg.py
|
wearefair/modelmapper
|
7f420ceca0312373fdb0ab81fa3f6a3f8fb86086
|
[
"MIT"
] | 1
|
2019-08-05T17:47:02.000Z
|
2019-08-05T17:47:02.000Z
|
import logging
import base64
from modelmapper.gpg import GPGMixin
PRIVATE_KEY = 'LS0tLS1CRUdJTiBQR1AgUFJJVkFURSBLRVkgQkxPQ0stLS0tLQpsUVBHQkY3cjcwOEJDQUM1bjZBZDFGdW51NkNjbU9VcjdiQjIxYVY1d2RhMjVhaUtKOUlaOENGc0k5dXpHT1hlCnh2TU9VMGE1WlN6Wkh6bWpuazFpaTlFYW9DeXhHZW5TcG1RdFIwSUJsSzhxbEJRdTI1Y0dzNVhENTdKdTZaNlQKVkJubTFLUGhWbXp2UTV5eTVTOElZa0tYM0hMd0xNRW91T051MUN6dXJ1ZDQyblRpZ3JNWFAxekxmTU9oQmtsdwo2VmdtNzhwQXdObFFRbDE1Mnp2RzhsUmlOS3FTUmpvakMrUFY3WTFoZ0NucXVETU5VZ01nbU95NkFFd3lWcS81CjBrM0JrZ0dUY1B4TU0xVFpaWHhCY3NXbm5ielc5bmZDKzV5WFJyUkp0M2tkbEZGWFdLVFc2ci9VOVZkblp0WlIKcUtXTDJ4ck9COUZKY3dPbTFiaElNZUVWYzd6ZDhyTjBVem01QUJFQkFBSCtCd01DdmphUHV6ZlR0WmJuaGJDVwpjUHZrdnBCQ1gvSXF5U2ptOXdudCtpSVVoQTcwdFArU1dNSThBRTJaTVFSQWVlek4yN3FkTkg2NEhKOEJ5eEx6CmdPSmd3elFDZld2UDBObjArUmdObTgrUWdWNVAyQmlQUmJhcGpzQ25VbGNaMmpwSlFzZTZBZE9GTEpCd05QM2gKdWVtRFhFSmtxZGRud1YvMldkdFBSQjNRbGpEU09qbWVWVG4rK05rR2dtU08zQjlUQmZGVXozN0xzb0JWRmM3dwpRY3JXQzJIRGgrYVlEMEpNdFNmWkc5S3hCU29IZUowQ0RIVjM2RkFqekN6VEFtcEFTWlNaR0RXVEpzRUZwQldRCjllQ1Y1TTFzTW5SZy84WGc0SGRNd2NWMm1NdWxBUWV0a0pZR3VGNTlLVElhT29kZGpscmlJMmhwdDMyL3FCdDMKMW5UYUZ4QVhubjk4VFBzTE5YaEdkTk1vRU1qRUhnZUNiS2FZdUhteUxyc25XeVUrdnFTZm9nbmgxRU5wd2lSQgpVNGRYMUZrQVpobE9FaFIvWEMzYjgwUWs5VU9BUjh6VW41Ulo0YlZlZUl3eXBCbE9GRUJ6OE1OaVJHdGxFcGhqCkdHcmp6ejJMN2k2OE1RTCs0MDZNUDlRT1UzV241dFR5M2djTjdpWitWbkZ4N0NKWGpETU52M1lyVWVUS0FIUWQKWXE4V3lZTzJMK3BibHdGTzZyWlFXc1E2aXN1VExHSmV3WHNHUzQvZnB6QUFuNzc0Zmx2RU5vUDhZNmI1ZXdOQgo2U0FzY1VnakdhSXpjVkxBZ1hweHlVZkMxYmhWeTVGZi9jbVhRT1MrLzBZOHFwK2JKNkVlS29xc3AxS3VncVJ3ClpCQWlIYk1acTAwWmxaYW9KOFdwOEJQMzFmdUFuUVUydjNmWGhKaHFOa1AzRGZzZXNLcEdkQmNtMjU1aTBxVnoKeDdZYm41RDhaT0dmd3UycUdKYXFBay93RGVaR0lDQ0RaVDN2ZXdpMG9jakRobkIrekRxR0lrR216ZGs3VVMxUApXWE5neU9pTG1XS0l5cndCejVFWlV1VllmZm1Bb2taRTA3Tm9nUTdEcVowL3JDbG9WUTFnUzhKVXZmYzR4S2RjClFKdzN1UHNOWCtOckFVdGNqSC9vdTFJQ1ExVTBZZXBKVmprb3Bua2h0Z3BEQUJTUktRcTRNMTloKzBpanhjNXQKSDhORVkydVdOZGRRdEN4QmRYUnZaMlZ1WlhKaGRHVmtJRXRsZVNBOGRHVnpkR2R3WjNWelpYSkFiWGxrYjIxaAphVzR1WTI5dFBva0JUZ1FUQVFnQU9CWWhCREEzS2w4b0pON2NHaHVjTjhTb1FFaDU1OGlnQlFKZTYrOVBBaHN2CkJRc0pDQWNDQmhVSUNRb0xBZ1FXQWdNQkFoNEJBaGVBQUFvSkVNU29RRWg1NThpZ2pLa0gvMThaRXo3eDNRYkcKRE1KS0FJbTkvb1N6UVl5R0RFZTBYS0JMVjJsS2w1SmVzQUZFZEJ3TTFKdlFpUGljQjFzR3Z6WW9mSTNwTnBMZAp2L29jNkZjNW5BODZrTHhXUnR3YnYzSlk1ODFoVndtWnZGa1h1NlBjMjlYZ042VXZTekFqMVFwazRray91YTRzClFyZllOZ2dwL2NBdGViaFN0dmxrZXpnUU5kMFk1dDF2dWxvYVUrM21tNndzWG5MczVIeVFqNk1BbnBWREtsSEgKc1d3Q3FWRWZ5aEtlbXZSRUFDL3FBOWlrZjRrMkljQzB5TWlzODdUWGhwMnpYdE85YitPQlZIRDRoc1BFdVArdwpKWkYyOEhmaFdWcUZXaVNla1VyYUtFL2Z1ZWdoZnpwdlhQckdiNGV4TTFkMnNkOWVZT0xBN0cxZEdWQ3BvUmtXCmZtQ3R1bXc3cldRPQo9eFVOZwotLS0tLUVORCBQR1AgUFJJVkFURSBLRVkgQkxPQ0stLS0tLQo='
PUBLIC_KEY = 'LS0tLS1CRUdJTiBQR1AgUFVCTElDIEtFWSBCTE9DSy0tLS0tCm1RRU5CRjdyNzA4QkNBQzVuNkFkMUZ1bnU2Q2NtT1VyN2JCMjFhVjV3ZGEyNWFpS0o5SVo4Q0ZzSTl1ekdPWGUKeHZNT1UwYTVaU3paSHptam5rMWlpOUVhb0N5eEdlblNwbVF0UjBJQmxLOHFsQlF1MjVjR3M1WEQ1N0p1Nlo2VApWQm5tMUtQaFZtenZRNXl5NVM4SVlrS1gzSEx3TE1Fb3VPTnUxQ3p1cnVkNDJuVGlnck1YUDF6TGZNT2hCa2x3CjZWZ203OHBBd05sUVFsMTUyenZHOGxSaU5LcVNSam9qQytQVjdZMWhnQ25xdURNTlVnTWdtT3k2QUV3eVZxLzUKMGszQmtnR1RjUHhNTTFUWlpYeEJjc1dubmJ6VzluZkMrNXlYUnJSSnQza2RsRkZYV0tUVzZyL1U5VmRuWnRaUgpxS1dMMnhyT0I5Rkpjd09tMWJoSU1lRVZjN3pkOHJOMFV6bTVBQkVCQUFHMExFRjFkRzluWlc1bGNtRjBaV1FnClMyVjVJRHgwWlhOMFozQm5kWE5sY2tCdGVXUnZiV0ZwYmk1amIyMCtpUUZPQkJNQkNBQTRGaUVFTURjcVh5Z2sKM3R3YUc1dzN4S2hBU0hubnlLQUZBbDdyNzA4Q0d5OEZDd2tJQndJR0ZRZ0pDZ3NDQkJZQ0F3RUNIZ0VDRjRBQQpDZ2tReEtoQVNIbm55S0NNcVFmL1h4a1RQdkhkQnNZTXdrb0FpYjMraExOQmpJWU1SN1Jjb0V0WGFVcVhrbDZ3CkFVUjBIQXpVbTlDSStKd0hXd2EvTmloOGplazJrdDIvK2h6b1Z6bWNEenFRdkZaRzNCdS9jbGpueldGWENabTgKV1JlN285emIxZUEzcFM5TE1DUFZDbVRpU1QrNXJpeEN0OWcyQ0NuOXdDMTV1RksyK1dSN09CQTEzUmptM1crNgpXaHBUN2VhYnJDeGVjdXprZkpDUG93Q2VsVU1xVWNleGJBS3BVUi9LRXA2YTlFUUFMK29EMktSL2lUWWh3TFRJCnlLenp0TmVHbmJOZTA3MXY0NEZVY1BpR3c4UzQvN0Fsa1hid2QrRlpXb1ZhSko2UlN0b29UOSs1NkNGL09tOWMKK3Nadmg3RXpWM2F4MzE1ZzRzRHNiVjBaVUttaEdSWitZSzI2YkR1dFpBPT0KPTg1Y0oKLS0tLS1FTkQgUEdQIFBVQkxJQyBLRVkgQkxPQ0stLS0tLQo='
PASSPHRASE = 'my passphrase'
RECEPIENT = 'testgpguser@mydomain.com'
class MyGPG(GPGMixin):
GPG_PUBLIC_KEY = base64.b64decode(PUBLIC_KEY)
GPG_PRIVATE_KEY = base64.b64decode(PRIVATE_KEY)
GPG_RECIPIENT = RECEPIENT
GPG_PASSPHRASE = PASSPHRASE
logger = logging.getLogger(__name__)
class TestGPGMixin:
def test_encrypt_and_decrypt_content(self):
content = b"What is going on?"
mygpg = MyGPG()
encrypted = mygpg.gpg_encrypt_content(content)
assert encrypted.startswith(b'-----BEGIN PGP MESSAGE-----')
decrypted = mygpg.gpg_decrypt_content(encrypted)
assert content == decrypted
| 153.133333
| 2,568
| 0.9532
| 88
| 4,594
| 49.511364
| 0.511364
| 0.006885
| 0.008263
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113284
| 0.031563
| 4,594
| 29
| 2,569
| 158.413793
| 0.866037
| 0
| 0
| 0
| 0
| 0
| 0.854375
| 0.841968
| 0
| 1
| 0
| 0
| 0.095238
| 1
| 0.047619
| false
| 0.095238
| 0.142857
| 0
| 0.52381
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
40f473b46c5b10a83545dae2dcf99296956ef510
| 14,940
|
py
|
Python
|
tests/em/static/test_DC_2D_analytic.py
|
prisae/simpeg
|
5cdd1b496bddcf3d9acd714b901a57bad6fb1ef9
|
[
"MIT"
] | 3
|
2021-08-04T02:27:41.000Z
|
2022-01-12T00:20:07.000Z
|
tests/em/static/test_DC_2D_analytic.py
|
thast/simpeg
|
8021082b8b53f3c08fa87fc085547bdd56437c6b
|
[
"MIT"
] | 2
|
2020-06-16T00:11:37.000Z
|
2020-07-10T19:45:09.000Z
|
tests/em/static/test_DC_2D_analytic.py
|
thast/simpeg
|
8021082b8b53f3c08fa87fc085547bdd56437c6b
|
[
"MIT"
] | 1
|
2021-12-29T00:06:07.000Z
|
2021-12-29T00:06:07.000Z
|
import numpy as np
import unittest
from discretize import TensorMesh
from SimPEG import utils, SolverLU
from SimPEG.electromagnetics import resistivity as dc
from SimPEG.electromagnetics import analytics
class DCProblemAnalyticTests_DPDP(unittest.TestCase):
def setUp(self):
npad = 10
cs = 12.5
hx = [(cs, npad, -1.4), (cs, 61), (cs, npad, 1.4)]
hy = [(cs, npad, -1.4), (cs, 20)]
mesh = TensorMesh([hx, hy], x0="CN")
sighalf = 1e-2
sigma = np.ones(mesh.nC) * sighalf
x = mesh.cell_centers_x[
np.logical_and(mesh.cell_centers_x > -150, mesh.cell_centers_x < 250)
]
M = utils.ndgrid(x, np.r_[0.0])
N = utils.ndgrid(x + 12.5 * 4, np.r_[0.0])
A0loc = np.r_[-200, 0.0]
A1loc = np.r_[-250, 0.0]
rxloc = [np.c_[M, np.zeros(x.size)], np.c_[N, np.zeros(x.size)]]
data_ana_A = analytics.DCAnalytic_Pole_Dipole(
np.r_[A0loc, 0.0], rxloc, sighalf, earth_type="halfspace"
)
data_ana_b = analytics.DCAnalytic_Pole_Dipole(
np.r_[A1loc, 0.0], rxloc, sighalf, earth_type="halfspace"
)
data_ana = data_ana_A - data_ana_b
rx = dc.receivers.Dipole(M, N)
src0 = dc.sources.Dipole([rx], A0loc, A1loc)
survey = dc.Survey([src0])
self.survey = survey
self.mesh = mesh
self.sigma = sigma
self.data_ana = data_ana
self.plotIt = False
try:
from pymatsolver import Pardiso
self.Solver = Pardiso
except ImportError:
self.Solver = SolverLU
def test_Simulation2DNodal(self, tolerance=0.05):
simulation = dc.simulation_2d.Simulation2DNodal(
self.mesh,
survey=self.survey,
sigma=self.sigma,
solver=self.Solver,
bc_type="Robin",
)
data = simulation.dpred()
err = np.sqrt(
np.linalg.norm((data - self.data_ana) / self.data_ana) ** 2
/ self.data_ana.size
)
print(f"DPDP N err: {err}")
self.assertLess(err, tolerance)
def test_Simulation2DCellCentered(self, tolerance=0.05):
simulation = dc.simulation_2d.Simulation2DCellCentered(
self.mesh,
survey=self.survey,
sigma=self.sigma,
solver=self.Solver,
bc_type="Robin",
)
data = simulation.dpred()
err = np.sqrt(
np.linalg.norm((data - self.data_ana) / self.data_ana) ** 2
/ self.data_ana.size
)
print(f"DPDP N err: {err}")
self.assertLess(err, tolerance)
class DCProblemAnalyticTests_PDP(unittest.TestCase):
def setUp(self):
npad = 10
cs = 12.5
hx = [(cs, npad, -1.4), (cs, 61), (cs, npad, 1.4)]
hy = [(cs, npad, -1.4), (cs, 20)]
mesh = TensorMesh([hx, hy], x0="CN")
sighalf = 1e-2
sigma = np.ones(mesh.nC) * sighalf
x = mesh.cell_centers_x[
np.logical_and(mesh.cell_centers_x > -150, mesh.cell_centers_x < 250)
]
M = utils.ndgrid(x, np.r_[0.0])
N = utils.ndgrid(x + 12.5 * 4, np.r_[0.0])
A0loc = np.r_[-200, 0.0]
# A1loc = np.r_[-250, 0.0]
rxloc = [np.c_[M, np.zeros(x.size)], np.c_[N, np.zeros(x.size)]]
data_ana = analytics.DCAnalytic_Pole_Dipole(
np.r_[A0loc, 0.0], rxloc, sighalf, earth_type="halfspace"
)
rx = dc.receivers.Dipole(M, N)
src0 = dc.sources.Pole([rx], A0loc)
survey = dc.Survey([src0])
self.survey = survey
self.mesh = mesh
self.sigma = sigma
self.data_ana = data_ana
self.plotIt = False
try:
from pymatsolver import Pardiso
self.Solver = Pardiso
except ImportError:
self.Solver = SolverLU
def test_Simulation2DNodal(self, tolerance=0.05):
simulation = dc.simulation_2d.Simulation2DNodal(
self.mesh,
survey=self.survey,
sigma=self.sigma,
solver=self.Solver,
bc_type="Robin",
)
data = simulation.dpred()
err = np.sqrt(
np.linalg.norm((data - self.data_ana) / self.data_ana) ** 2
/ self.data_ana.size
)
print(f"PDP N err: {err}")
self.assertLess(err, tolerance)
def test_Simulation2DCellCentered(self, tolerance=0.05):
simulation = dc.simulation_2d.Simulation2DCellCentered(
self.mesh,
survey=self.survey,
sigma=self.sigma,
solver=self.Solver,
bc_type="Robin",
)
data = simulation.dpred()
err = np.sqrt(
np.linalg.norm((data - self.data_ana) / self.data_ana) ** 2
/ self.data_ana.size
)
print(f"PDP CC err: {err}")
self.assertLess(err, tolerance)
class DCProblemAnalyticTests_DPP(unittest.TestCase):
def setUp(self):
npad = 10
cs = 12.5
hx = [(cs, npad, -1.4), (cs, 61), (cs, npad, 1.4)]
hy = [(cs, npad, -1.4), (cs, 20)]
mesh = TensorMesh([hx, hy], x0="CN")
sighalf = 1e-2
sigma = np.ones(mesh.nC) * sighalf
x = mesh.cell_centers_x[
np.logical_and(mesh.cell_centers_x > -150, mesh.cell_centers_x < 250)
]
M = utils.ndgrid(x, np.r_[0.0])
N = utils.ndgrid(x + 12.5 * 4, np.r_[0.0])
A0loc = np.r_[-200, 0.0]
A1loc = np.r_[-250, 0.0]
rxloc = np.c_[M, np.zeros(x.size)]
data_ana = analytics.DCAnalytic_Dipole_Pole(
[np.r_[A0loc, 0.0], np.r_[A1loc, 0.0]],
rxloc,
sighalf,
earth_type="halfspace",
)
rx = dc.receivers.Pole(M)
src0 = dc.sources.Dipole([rx], A0loc, A1loc)
survey = dc.survey.Survey([src0])
self.survey = survey
self.mesh = mesh
self.sigma = sigma
self.data_ana = data_ana
self.plotIt = False
try:
from pymatsolver import PardisoSolver
self.Solver = PardisoSolver
except ImportError:
self.Solver = SolverLU
def test_Simulation2DNodal(self, tolerance=0.05):
simulation = dc.simulation_2d.Simulation2DNodal(
self.mesh,
survey=self.survey,
sigma=self.sigma,
solver=self.Solver,
bc_type="Robin",
)
data = simulation.dpred()
err = np.sqrt(
np.linalg.norm((data - self.data_ana) / self.data_ana) ** 2
/ self.data_ana.size
)
print(f"DPP N err: {err}")
self.assertLess(err, tolerance)
def test_Simulation2DCellCentered(self, tolerance=0.05):
simulation = dc.simulation_2d.Simulation2DCellCentered(
self.mesh,
survey=self.survey,
sigma=self.sigma,
solver=self.Solver,
bc_type="Robin",
)
data = simulation.dpred()
err = np.sqrt(
np.linalg.norm((data - self.data_ana) / self.data_ana) ** 2
/ self.data_ana.size
)
print(f"DPP CC err: {err}")
self.assertLess(err, tolerance)
class DCProblemAnalyticTests_PP(unittest.TestCase):
def setUp(self):
# Note: Pole-Pole requires bigger boundary to obtain good accuracy.
# One can use greater padding rate. Here 2 is used.
npad = 10
cs = 12.5
hx = [(cs, npad, -2), (cs, 61), (cs, npad, 2)]
hy = [(cs, npad, -2), (cs, 20)]
mesh = TensorMesh([hx, hy], x0="CN")
sighalf = 1e-2
sigma = np.ones(mesh.nC) * sighalf
x = mesh.cell_centers_x[
np.logical_and(mesh.cell_centers_x > -150, mesh.cell_centers_x < 250)
]
M = utils.ndgrid(x, np.r_[0.0])
# N = utils.ndgrid(x + 12.5*4, np.r_[0.0])
A0loc = np.r_[-200, 0.0]
# A1loc = np.r_[-250, 0.0]
rxloc = np.c_[M, np.zeros(x.size)]
data_ana = analytics.DCAnalytic_Pole_Pole(
np.r_[A0loc, 0.0], rxloc, sighalf, earth_type="halfspace"
)
rx = dc.receivers.Pole(M)
src0 = dc.sources.Pole([rx], A0loc)
survey = dc.survey.Survey([src0])
self.survey = survey
self.mesh = mesh
self.sigma = sigma
self.data_ana = data_ana
try:
from pymatsolver import PardisoSolver
self.Solver = PardisoSolver
except ImportError:
self.Solver = SolverLU
def test_Simulation2DCellCentered(self, tolerance=0.05):
simulation = dc.simulation_2d.Simulation2DCellCentered(
self.mesh,
survey=self.survey,
sigma=self.sigma,
solver=self.Solver,
bc_type="Robin",
)
data = simulation.dpred()
err = np.sqrt(
np.linalg.norm((data - self.data_ana) / self.data_ana) ** 2
/ self.data_ana.size
)
print(f"PP CC err: {err}")
self.assertLess(err, tolerance)
def test_Simulation2DNodal(self, tolerance=0.05):
simulation = dc.simulation_2d.Simulation2DNodal(
self.mesh,
survey=self.survey,
sigma=self.sigma,
solver=self.Solver,
bc_type="Robin",
)
data = simulation.dpred()
err = np.sqrt(
np.linalg.norm((data - self.data_ana) / self.data_ana) ** 2
/ self.data_ana.size
)
print(f"PP N err: {err}")
self.assertLess(err, tolerance)
class DCProblemAnalyticTests_DPField(unittest.TestCase):
def setUp(self):
cs = 12.5
hx = [(cs, 7, -1.3), (cs, 61), (cs, 7, 1.3)]
hy = [(cs, 7, -1.3), (cs, 20)]
mesh = TensorMesh([hx, hy], x0="CN")
sighalf = 1e-2
sigma = np.ones(mesh.nC) * sighalf
A0loc = np.r_[-31.25, 0.0]
A1loc = np.r_[31.25, 0.0]
rxloc = np.c_[mesh.gridN, np.zeros(mesh.nN)]
data_ana = analytics.DCAnalytic_Dipole_Pole(
[np.r_[A0loc, 0.0], np.r_[A1loc, 0.0]],
rxloc,
sighalf,
earth_type="halfspace",
)
src0 = dc.sources.Dipole([], A0loc, A1loc)
survey = dc.survey.Survey([src0])
# determine comparison locations
ROI_large_BNW = np.array([-200, -100])
ROI_large_TSE = np.array([200, 0])
ROI_largeInds = utils.model_builder.getIndicesBlock(
ROI_large_BNW, ROI_large_TSE, mesh.gridN
)[0]
# print(ROI_largeInds.shape)
ROI_small_BNW = np.array([-50, -25])
ROI_small_TSE = np.array([50, 0])
ROI_smallInds = utils.model_builder.getIndicesBlock(
ROI_small_BNW, ROI_small_TSE, mesh.gridN
)[0]
# print(ROI_smallInds.shape)
ROI_inds = np.setdiff1d(ROI_largeInds, ROI_smallInds)
self.data_ana = data_ana
self.survey = survey
self.mesh = mesh
self.sigma = sigma
self.plotIt = False
self.ROI_inds = ROI_inds
try:
from pymatsolver import PardisoSolver
self.Solver = PardisoSolver
except ImportError:
self.Solver = SolverLU
def test_Simulation2DCellCentered(self, tolerance=0.05):
simulation = dc.simulation_2d.Simulation2DCellCentered(
self.mesh, survey=self.survey, sigma=self.sigma, solver=self.Solver,
)
field = simulation.fields(self.sigma)
# just test if we can get each property of the field
field[:, "phi"][:, 0]
field[:, "j"]
field[:, "e"]
field[:, "charge"]
field[:, "charge_density"]
print("got fields CC")
def test_Simulation2DNodal(self, tolerance=0.05):
simulation = dc.simulation_2d.Simulation2DNodal(
self.mesh, survey=self.survey, sigma=self.sigma, solver=self.Solver,
)
field = simulation.fields(self.sigma)
data = field[:, "phi"][:, 0]
# also test if we can get the other things charge and charge_density
field[:, "j"]
field[:, "e"]
field[:, "charge"]
field[:, "charge_density"]
print("got fields N")
ROI_inds = self.ROI_inds
diff_norm = np.linalg.norm((data[ROI_inds] - self.data_ana[ROI_inds]))
err = diff_norm / np.linalg.norm(self.data_ana[ROI_inds])
print(f"DP N Fields err: {err}")
self.assertLess(err, tolerance)
class DCSimulationAppResTests(unittest.TestCase):
def setUp(self):
npad = 10
cs = 12.5
hx = [(cs, npad, -1.4), (cs, 61), (cs, npad, 1.4)]
hy = [(cs, npad, -1.4), (cs, 20)]
mesh = TensorMesh([hx, hy], x0="CN")
sighalf = 1e-2
sigma = np.ones(mesh.nC) * sighalf
x = mesh.cell_centers_x[
np.logical_and(mesh.cell_centers_x > -150, mesh.cell_centers_x < 250)
]
M = utils.ndgrid(x, np.r_[0.0])
N = utils.ndgrid(x + 12.5 * 4, np.r_[0.0])
A0loc = np.r_[-200, 0.0]
A1loc = np.r_[-250, 0.0]
rx = dc.receivers.Dipole(M, N, data_type="apparent_resistivity")
src0 = dc.sources.Dipole([rx], A0loc, A1loc)
survey = dc.Survey([src0])
self.survey = survey
self.mesh = mesh
self.sigma = sigma
self.sigma_half = sighalf
self.plotIt = False
try:
from pymatsolver import Pardiso
self.Solver = Pardiso
except ImportError:
self.Solver = SolverLU
def test_Simulation2DNodal(self, tolerance=0.05):
simulation = dc.simulation_2d.Simulation2DNodal(
self.mesh,
survey=self.survey,
sigma=self.sigma,
solver=self.Solver,
bc_type="Robin",
)
with self.assertRaises(KeyError):
data = simulation.dpred()
self.survey.set_geometric_factor()
data = simulation.dpred()
rhohalf = 1.0 / self.sigma_half
err = np.sqrt(np.linalg.norm((data - rhohalf) / rhohalf) ** 2 / data.size)
print(f"DPDP N err: {err}")
self.assertLess(err, tolerance)
def test_Simulation2DCellCentered(self, tolerance=0.05):
simulation = dc.simulation_2d.Simulation2DCellCentered(
self.mesh,
survey=self.survey,
sigma=self.sigma,
solver=self.Solver,
bc_type="Robin",
)
with self.assertRaises(KeyError):
data = simulation.dpred()
self.survey.set_geometric_factor()
data = simulation.dpred()
rhohalf = 1.0 / self.sigma_half
err = np.sqrt(np.linalg.norm((data - rhohalf) / rhohalf) ** 2 / data.size)
print(f"DPDP N err: {err}")
self.assertLess(err, tolerance)
if __name__ == "__main__":
unittest.main()
| 31.320755
| 82
| 0.55174
| 1,855
| 14,940
| 4.318598
| 0.098113
| 0.039321
| 0.042566
| 0.029959
| 0.886906
| 0.8542
| 0.837723
| 0.828611
| 0.822244
| 0.785919
| 0
| 0.039972
| 0.321821
| 14,940
| 476
| 83
| 31.386555
| 0.750691
| 0.027376
| 0
| 0.783163
| 0
| 0
| 0.027961
| 0
| 0
| 0
| 0
| 0
| 0.033163
| 1
| 0.045918
| false
| 0
| 0.045918
| 0
| 0.107143
| 0.033163
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
40fd8a0f4d985bb66da09435354d6b933e50ec75
| 230
|
py
|
Python
|
inference/torch_model/__init__.py
|
TAViT2022/TAViT
|
6ea42150c57af0e0618675565440df85121cf50a
|
[
"Apache-2.0"
] | null | null | null |
inference/torch_model/__init__.py
|
TAViT2022/TAViT
|
6ea42150c57af0e0618675565440df85121cf50a
|
[
"Apache-2.0"
] | null | null | null |
inference/torch_model/__init__.py
|
TAViT2022/TAViT
|
6ea42150c57af0e0618675565440df85121cf50a
|
[
"Apache-2.0"
] | null | null | null |
from .jpegmodules.dct import dct_2d as dct_2d
from .jpegmodules.dct import idct_2d as idct_2d
from .jpegmodules.matrices import Q10 as Q10
from .jpegmodules.matrices import Q50 as Q50
from .jpegmodules.matrices import Q90 as Q90
| 38.333333
| 48
| 0.821739
| 39
| 230
| 4.74359
| 0.282051
| 0.405405
| 0.372973
| 0.47027
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.080402
| 0.134783
| 230
| 5
| 49
| 46
| 0.849246
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
dc134db316c8869543f7d1a8857575a86edca651
| 12,896
|
py
|
Python
|
Configuration/HLT/python/addOnTestsHLT.py
|
malbouis/cmssw
|
16173a30d3f0c9ecc5419c474bb4d272c58b65c8
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
Configuration/HLT/python/addOnTestsHLT.py
|
malbouis/cmssw
|
16173a30d3f0c9ecc5419c474bb4d272c58b65c8
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
Configuration/HLT/python/addOnTestsHLT.py
|
malbouis/cmssw
|
16173a30d3f0c9ecc5419c474bb4d272c58b65c8
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
def addOnTestsHLT():
addOnTestsHLT = {
'hlt_mc_Fake' : ['cmsDriver.py TTbar_8TeV_TuneCUETP8M1_cfi -s GEN,SIM,DIGI,L1,DIGI2RAW --mc --scenario=pp -n 10 --conditions auto:run1_mc_Fake --relval 9000,50 --datatier "GEN-SIM-RAW" --eventcontent RAWSIM --customise=HLTrigger/Configuration/CustomConfigs.L1T --fileout file:RelVal_Raw_Fake_MC.root',
'HLTrigger/Configuration/test/OnLine_HLT_Fake.py',
'cmsDriver.py RelVal -s HLT:Fake,RAW2DIGI,L1Reco,RECO --mc --scenario=pp -n 10 --conditions auto:run1_mc_Fake --relval 9000,50 --datatier "RAW-HLT-RECO" --eventcontent FEVTDEBUGHLT --customise=HLTrigger/Configuration/CustomConfigs.L1THLT --processName=HLTRECO --filein file:RelVal_Raw_Fake_MC.root --fileout file:RelVal_Raw_Fake_MC_HLT_RECO.root'],
'hlt_mc_Fake1': ['cmsDriver.py TTbar_13TeV_TuneCUETP8M1_cfi -s GEN,SIM,DIGI,L1,DIGI2RAW --mc --scenario=pp -n 10 --conditions auto:run2_mc_Fake1 --relval 9000,50 --datatier "GEN-SIM-RAW" --eventcontent RAWSIM --customise=HLTrigger/Configuration/CustomConfigs.L1T --era Run2_25ns --fileout file:RelVal_Raw_Fake1_MC.root',
'HLTrigger/Configuration/test/OnLine_HLT_Fake1.py',
'cmsDriver.py RelVal -s HLT:Fake1,RAW2DIGI,L1Reco,RECO --mc --scenario=pp -n 10 --conditions auto:run2_mc_Fake1 --relval 9000,50 --datatier "RAW-HLT-RECO" --eventcontent FEVTDEBUGHLT --customise=HLTrigger/Configuration/CustomConfigs.L1THLT --era Run2_25ns --processName=HLTRECO --filein file:RelVal_Raw_Fake1_MC.root --fileout file:RelVal_Raw_Fake1_MC_HLT_RECO.root'],
'hlt_mc_Fake2': ['cmsDriver.py TTbar_13TeV_TuneCUETP8M1_cfi -s GEN,SIM,DIGI,L1,DIGI2RAW --mc --scenario=pp -n 10 --conditions auto:run2_mc_Fake2 --relval 9000,50 --datatier "GEN-SIM-RAW" --eventcontent RAWSIM --customise=HLTrigger/Configuration/CustomConfigs.L1T --era Run2_2016 --fileout file:RelVal_Raw_Fake2_MC.root',
'HLTrigger/Configuration/test/OnLine_HLT_Fake2.py',
'cmsDriver.py RelVal -s HLT:Fake2,RAW2DIGI,L1Reco,RECO --mc --scenario=pp -n 10 --conditions auto:run2_mc_Fake2 --relval 9000,50 --datatier "RAW-HLT-RECO" --eventcontent FEVTDEBUGHLT --customise=HLTrigger/Configuration/CustomConfigs.L1THLT --era Run2_2016 --processName=HLTRECO --filein file:RelVal_Raw_Fake2_MC.root --fileout file:RelVal_Raw_Fake2_MC_HLT_RECO.root'],
'hlt_mc_GRun' : ['cmsDriver.py TTbar_13TeV_TuneCUETP8M1_cfi -s GEN,SIM,DIGI,L1,DIGI2RAW --mc --scenario=pp -n 10 --conditions auto:run3_mc_GRun --relval 9000,50 --datatier "GEN-SIM-RAW" --eventcontent RAWSIM --customise=HLTrigger/Configuration/CustomConfigs.L1T --era Run3 --fileout file:RelVal_Raw_GRun_MC.root',
'HLTrigger/Configuration/test/OnLine_HLT_GRun.py',
'cmsDriver.py RelVal -s HLT:GRun,RAW2DIGI,L1Reco,RECO --mc --scenario=pp -n 10 --conditions auto:run3_mc_GRun --relval 9000,50 --datatier "RAW-HLT-RECO" --eventcontent FEVTDEBUGHLT --customise=HLTrigger/Configuration/CustomConfigs.L1THLT --era Run3 --processName=HLTRECO --filein file:RelVal_Raw_GRun_MC.root --fileout file:RelVal_Raw_GRun_MC_HLT_RECO.root'],
'hlt_mc_HIon' : ['cmsDriver.py TTbar_13TeV_TuneCUETP8M1_cfi -s GEN,SIM,DIGI,L1,DIGI2RAW --mc --scenario=pp -n 10 --conditions auto:run3_mc_HIon --relval 9000,50 --datatier "GEN-SIM-RAW" --eventcontent RAWSIM --customise=HLTrigger/Configuration/CustomConfigs.L1T --era Run3_pp_on_PbPb --fileout file:RelVal_Raw_HIon_MC.root',
'HLTrigger/Configuration/test/OnLine_HLT_HIon.py',
'cmsDriver.py RelVal -s HLT:HIon,RAW2DIGI,L1Reco,RECO --mc --scenario=pp -n 10 --conditions auto:run3_mc_HIon --relval 9000,50 --datatier "RAW-HLT-RECO" --eventcontent FEVTDEBUGHLT --customise=HLTrigger/Configuration/CustomConfigs.L1THLT --era Run3_pp_on_PbPb --processName=HLTRECO --filein file:RelVal_Raw_HIon_MC.root --fileout file:RelVal_Raw_HIon_MC_HLT_RECO.root'],
'hlt_mc_PIon' : ['cmsDriver.py TTbar_13TeV_TuneCUETP8M1_cfi -s GEN,SIM,DIGI,L1,DIGI2RAW --mc --scenario=pp -n 10 --conditions auto:run3_mc_PIon --relval 9000,50 --datatier "GEN-SIM-RAW" --eventcontent RAWSIM --customise=HLTrigger/Configuration/CustomConfigs.L1T --era Run3 --fileout file:RelVal_Raw_PIon_MC.root',
'HLTrigger/Configuration/test/OnLine_HLT_PIon.py',
'cmsDriver.py RelVal -s HLT:PIon,RAW2DIGI,L1Reco,RECO --mc --scenario=pp -n 10 --conditions auto:run3_mc_PIon --relval 9000,50 --datatier "RAW-HLT-RECO" --eventcontent FEVTDEBUGHLT --customise=HLTrigger/Configuration/CustomConfigs.L1THLT --era Run3 --processName=HLTRECO --filein file:RelVal_Raw_PIon_MC.root --fileout file:RelVal_Raw_PIon_MC_HLT_RECO.root'],
'hlt_mc_PRef' : ['cmsDriver.py TTbar_13TeV_TuneCUETP8M1_cfi -s GEN,SIM,DIGI,L1,DIGI2RAW --mc --scenario=pp -n 10 --conditions auto:run3_mc_PRef --relval 9000,50 --datatier "GEN-SIM-RAW" --eventcontent RAWSIM --customise=HLTrigger/Configuration/CustomConfigs.L1T --era Run3 --fileout file:RelVal_Raw_PRef_MC.root',
'HLTrigger/Configuration/test/OnLine_HLT_PRef.py',
'cmsDriver.py RelVal -s HLT:PRef,RAW2DIGI,L1Reco,RECO --mc --scenario=pp -n 10 --conditions auto:run3_mc_PRef --relval 9000,50 --datatier "RAW-HLT-RECO" --eventcontent FEVTDEBUGHLT --customise=HLTrigger/Configuration/CustomConfigs.L1THLT --era Run3 --processName=HLTRECO --filein file:RelVal_Raw_PRef_MC.root --fileout file:RelVal_Raw_PRef_MC_HLT_RECO.root'],
'hlt_data_Fake' : ['cmsDriver.py RelVal -s L1REPACK:GT1 --data --scenario=pp -n 10 --conditions auto:run1_hlt_Fake --relval 9000,50 --datatier "RAW" --eventcontent RAW --customise=HLTrigger/Configuration/CustomConfigs.L1T --fileout file:RelVal_Raw_Fake_DATA.root --filein /store/data/Run2012A/MuEG/RAW/v1/000/191/718/14932935-E289-E111-830C-5404A6388697.root',
'HLTrigger/Configuration/test/OnLine_HLT_Fake.py',
'cmsDriver.py RelVal -s HLT:Fake,RAW2DIGI,L1Reco,RECO --data --scenario=pp -n 10 --conditions auto:run1_data_Fake --relval 9000,50 --datatier "RAW-HLT-RECO" --eventcontent FEVTDEBUGHLT --customise=HLTrigger/Configuration/CustomConfigs.L1THLT --processName=HLTRECO --filein file:RelVal_Raw_Fake_DATA.root --fileout file:RelVal_Raw_Fake_DATA_HLT_RECO.root'],
'hlt_data_Fake1': ['cmsDriver.py RelVal -s L1REPACK:GCTGT --data --scenario=pp -n 10 --conditions auto:run2_hlt_Fake1 --relval 9000,50 --datatier "RAW" --eventcontent RAW --customise=HLTrigger/Configuration/CustomConfigs.L1T --era Run2_25ns --fileout file:RelVal_Raw_Fake1_DATA.root --filein /store/data/Run2015D/MuonEG/RAW/v1/000/256/677/00000/80950A90-745D-E511-92FD-02163E011C5D.root',
'HLTrigger/Configuration/test/OnLine_HLT_Fake1.py',
'cmsDriver.py RelVal -s HLT:Fake1,RAW2DIGI,L1Reco,RECO --data --scenario=pp -n 10 --conditions auto:run2_data_Fake1 --relval 9000,50 --datatier "RAW-HLT-RECO" --eventcontent FEVTDEBUGHLT --customise=HLTrigger/Configuration/CustomConfigs.L1THLT --era Run2_25ns --processName=HLTRECO --filein file:RelVal_Raw_Fake1_DATA.root --fileout file:RelVal_Raw_Fake1_DATA_HLT_RECO.root'],
'hlt_data_Fake2': ['cmsDriver.py RelVal -s L1REPACK:Full --data --scenario=pp -n 10 --conditions auto:run2_hlt_Fake2 --relval 9000,50 --datatier "RAW" --eventcontent RAW --customise=HLTrigger/Configuration/CustomConfigs.L1T --era Run2_2016 --fileout file:RelVal_Raw_Fake2_DATA.root --filein /store/data/Run2016B/JetHT/RAW/v1/000/272/762/00000/C666CDE2-E013-E611-B15A-02163E011DBE.root',
'HLTrigger/Configuration/test/OnLine_HLT_Fake2.py',
'cmsDriver.py RelVal -s HLT:Fake2,RAW2DIGI,L1Reco,RECO --data --scenario=pp -n 10 --conditions auto:run2_data_Fake2 --relval 9000,50 --datatier "RAW-HLT-RECO" --eventcontent FEVTDEBUGHLT --customise=HLTrigger/Configuration/CustomConfigs.L1THLT --era Run2_2016 --processName=HLTRECO --filein file:RelVal_Raw_Fake2_DATA.root --fileout file:RelVal_Raw_Fake2_DATA_HLT_RECO.root'],
'hlt_data_GRun' : ['cmsDriver.py RelVal -s L1REPACK:Full --data --scenario=pp -n 10 --conditions auto:run3_hlt_GRun --relval 9000,50 --datatier "RAW" --eventcontent RAW --customise=HLTrigger/Configuration/CustomConfigs.L1T --era Run3 --fileout file:RelVal_Raw_GRun_DATA.root --filein /store/data/Run2018D/EphemeralHLTPhysics1/RAW/v1/000/323/775/00000/2E066536-5CF2-B340-A73B-209640F29FF6.root --customise_commands=\'if hasattr(process,"simMuonGEMPadTask"): setattr(process,"simMuonGEMPadTask",cms.Task())\'',
'HLTrigger/Configuration/test/OnLine_HLT_GRun.py',
'cmsDriver.py RelVal -s HLT:GRun,RAW2DIGI,L1Reco,RECO --data --scenario=pp -n 10 --conditions auto:run3_data_GRun --relval 9000,50 --datatier "RAW-HLT-RECO" --eventcontent FEVTDEBUGHLT --customise=HLTrigger/Configuration/CustomConfigs.L1THLT,HLTrigger/Configuration/CustomConfigs.CTPPSRun2Geometry --era Run3 --processName=HLTRECO --filein file:RelVal_Raw_GRun_DATA.root --fileout file:RelVal_Raw_GRun_DATA_HLT_RECO.root'],
'hlt_data_HIon' : ['cmsDriver.py RelVal -s L1REPACK:Full --data --scenario=pp -n 10 --conditions auto:run3_hlt_HIon --relval 9000,50 --datatier "RAW" --eventcontent RAW --customise=HLTrigger/Configuration/CustomConfigs.L1T --era Run3_pp_on_PbPb --fileout file:RelVal_Raw_HIon_DATA.root --filein /store/data/Run2018D/HIMinimumBias0/RAW/v1/000/325/112/00000/660F62BB-9932-D645-A4A4-0BBBDA3963E8.root --customise_commands=\'if hasattr(process,"simMuonGEMPadTask"): setattr(process,"simMuonGEMPadTask",cms.Task()); from FWCore.ParameterSet.MassReplace import massSearchReplaceAnyInputTag; massSearchReplaceAnyInputTag(process.SimL1Emulator,"rawDataCollector","rawDataRepacker",False,True)\'',
'HLTrigger/Configuration/test/OnLine_HLT_HIon.py',
'cmsDriver.py RelVal -s HLT:HIon,RAW2DIGI,L1Reco,RECO --data --scenario=pp -n 10 --conditions auto:run3_data_HIon --relval 9000,50 --datatier "RAW-HLT-RECO" --eventcontent FEVTDEBUGHLT --customise=HLTrigger/Configuration/CustomConfigs.L1THLT,HLTrigger/Configuration/CustomConfigs.CTPPSRun2Geometry --era Run3_pp_on_PbPb --processName=HLTRECO --filein file:RelVal_Raw_HIon_DATA.root --fileout file:RelVal_Raw_HIon_DATA_HLT_RECO.root'],
'hlt_data_PIon' : ['cmsDriver.py RelVal -s L1REPACK:Full --data --scenario=pp -n 10 --conditions auto:run3_hlt_PIon --relval 9000,50 --datatier "RAW" --customise=HLTrigger/Configuration/CustomConfigs.L1T --era Run3 --eventcontent RAW --fileout file:RelVal_Raw_PIon_DATA.root --filein /store/data/Run2018D/EphemeralHLTPhysics1/RAW/v1/000/323/775/00000/2E066536-5CF2-B340-A73B-209640F29FF6.root --customise_commands=\'if hasattr(process,"simMuonGEMPadTask"): setattr(process,"simMuonGEMPadTask",cms.Task())\'',
'HLTrigger/Configuration/test/OnLine_HLT_PIon.py',
'cmsDriver.py RelVal -s HLT:PIon,RAW2DIGI,L1Reco,RECO --data --scenario=pp -n 10 --conditions auto:run3_data_PIon --relval 9000,50 --datatier "RAW-HLT-RECO" --eventcontent FEVTDEBUGHLT --customise=HLTrigger/Configuration/CustomConfigs.L1THLT,HLTrigger/Configuration/CustomConfigs.CTPPSRun2Geometry --era Run3 --processName=HLTRECO --filein file:RelVal_Raw_PIon_DATA.root --fileout file:RelVal_Raw_PIon_DATA_HLT_RECO.root'],
'hlt_data_PRef' : ['cmsDriver.py RelVal -s L1REPACK:Full --data --scenario=pp -n 10 --conditions auto:run3_hlt_PRef --relval 9000,50 --datatier "RAW" --customise=HLTrigger/Configuration/CustomConfigs.L1T --era Run3 --eventcontent RAW --fileout file:RelVal_Raw_PRef_DATA.root --filein /store/data/Run2018D/EphemeralHLTPhysics1/RAW/v1/000/323/775/00000/2E066536-5CF2-B340-A73B-209640F29FF6.root --customise_commands=\'if hasattr(process,"simMuonGEMPadTask"): setattr(process,"simMuonGEMPadTask",cms.Task())\'',
'HLTrigger/Configuration/test/OnLine_HLT_PRef.py',
'cmsDriver.py RelVal -s HLT:PRef,RAW2DIGI,L1Reco,RECO --data --scenario=pp -n 10 --conditions auto:run3_data_PRef --relval 9000,50 --datatier "RAW-HLT-RECO" --eventcontent FEVTDEBUGHLT --customise=HLTrigger/Configuration/CustomConfigs.L1THLT,HLTrigger/Configuration/CustomConfigs.CTPPSRun2Geometry --era Run3 --processName=HLTRECO --filein file:RelVal_Raw_PRef_DATA.root --fileout file:RelVal_Raw_PRef_DATA_HLT_RECO.root'],
}
return addOnTestsHLT
| 263.183673
| 708
| 0.727357
| 1,664
| 12,896
| 5.45012
| 0.082332
| 0.111589
| 0.060205
| 0.040137
| 0.93759
| 0.924909
| 0.83835
| 0.835042
| 0.830742
| 0.823134
| 0
| 0.066812
| 0.146945
| 12,896
| 48
| 709
| 268.666667
| 0.757567
| 0
| 0
| 0.304348
| 0
| 0.608696
| 0.85352
| 0.435949
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021739
| false
| 0
| 0.021739
| 0
| 0.065217
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
dc14f41353c89e488b0dfec13d105608c7519aa5
| 6,184
|
py
|
Python
|
tfkbnufft/tests/nufft/fft_functions_test.py
|
chaithyagr/tfkbnufft
|
da8de17bc5cb738d11150662d0876bec9efb54d8
|
[
"MIT"
] | 23
|
2020-03-03T16:23:34.000Z
|
2022-02-01T17:48:27.000Z
|
tfkbnufft/tests/nufft/fft_functions_test.py
|
chaithyagr/tfkbnufft
|
da8de17bc5cb738d11150662d0876bec9efb54d8
|
[
"MIT"
] | 29
|
2020-05-11T08:51:15.000Z
|
2021-07-21T10:39:21.000Z
|
tfkbnufft/tests/nufft/fft_functions_test.py
|
chaithyagr/tfkbnufft
|
da8de17bc5cb738d11150662d0876bec9efb54d8
|
[
"MIT"
] | 6
|
2020-06-18T13:11:40.000Z
|
2021-07-02T15:46:31.000Z
|
import numpy as np
import pytest
from skimage.data import shepp_logan_phantom
import tensorflow as tf
import torch
from tfkbnufft.nufft import fft_functions as tf_fft_functions
from torchkbnufft.nufft import fft_functions as torch_fft_functions
def _crop_center(img, cropx, cropy):
y, x = img.shape
startx = x//2-(cropx//2)
starty = y//2-(cropy//2)
return img[starty:starty+cropy, startx:startx+cropx]
@pytest.mark.parametrize('norm', ['ortho', None])
@pytest.mark.parametrize('multiprocessing', [True, False])
def test_scale_and_fft_on_image_volume(norm, multiprocessing):
# problem definition
x = shepp_logan_phantom().astype(np.complex64)
im_size = x.shape
scaling_coeffs = np.random.randn(*im_size) + 1j * np.random.randn(*im_size)
scaling_coeffs = scaling_coeffs.astype(np.complex64)
grid_size = [2*im_dim for im_dim in im_size]
# torch computations
torch_x = np.stack((np.real(x), np.imag(x)))
torch_x = torch.tensor(torch_x).unsqueeze(0).unsqueeze(0)
torch_scaling_coeffs = torch.tensor(
np.stack((np.real(scaling_coeffs), np.imag(scaling_coeffs)))
)
res_torch = torch_fft_functions.scale_and_fft_on_image_volume(
torch_x,
torch_scaling_coeffs,
torch.tensor(grid_size).float(),
torch.tensor(im_size),
norm,
).numpy()
res_torch = res_torch[:, :, 0] + 1j *res_torch[:, :, 1]
# tf computations
res_tf = tf_fft_functions.scale_and_fft_on_image_volume(
tf.convert_to_tensor(x)[None, None, ...],
tf.convert_to_tensor(scaling_coeffs),
tf.convert_to_tensor(grid_size),
tf.convert_to_tensor(im_size),
norm,
multiprocessing=multiprocessing,
).numpy()
np.testing.assert_allclose(res_torch, res_tf, rtol=1e-4, atol=2*1e-2)
@pytest.mark.parametrize('norm', ['ortho', None])
@pytest.mark.parametrize('multiprocessing', [True, False])
def test_ifft_and_scale_on_gridded_data(norm, multiprocessing):
# problem definition
x = shepp_logan_phantom().astype(np.complex64)
grid_size = x.shape
im_size = [im_dim//2 for im_dim in grid_size]
scaling_coeffs = np.random.randn(*im_size) + 1j * np.random.randn(*im_size)
scaling_coeffs = scaling_coeffs.astype(np.complex64)
# torch computations
torch_x = np.stack((np.real(x), np.imag(x)))
torch_x = torch.tensor(torch_x).unsqueeze(0).unsqueeze(0)
torch_scaling_coeffs = torch.tensor(
np.stack((np.real(scaling_coeffs), np.imag(scaling_coeffs)))
)
res_torch = torch_fft_functions.ifft_and_scale_on_gridded_data(
torch_x,
torch_scaling_coeffs,
torch.tensor(grid_size).float(),
torch.tensor(im_size),
norm,
).numpy()
res_torch = res_torch[:, :, 0] + 1j *res_torch[:, :, 1]
# tf computations
res_tf = tf_fft_functions.ifft_and_scale_on_gridded_data(
tf.convert_to_tensor(x)[None, None, ...],
tf.convert_to_tensor(scaling_coeffs),
tf.convert_to_tensor(grid_size),
tf.convert_to_tensor(im_size),
norm,
multiprocessing=multiprocessing,
).numpy()
np.testing.assert_allclose(res_torch, res_tf, rtol=1e-4, atol=2)
@pytest.mark.parametrize('norm', ['ortho', None])
@pytest.mark.parametrize('multiprocessing', [True, False])
def test_scale_and_fft_on_image_volume_3d(norm, multiprocessing):
# problem definition
x = shepp_logan_phantom().astype(np.complex64)
x = _crop_center(x, 128, 128)
x = x[None, ...]
x = np.tile(x, [128, 1, 1])
im_size = x.shape
scaling_coeffs = np.random.randn(*im_size) + 1j * np.random.randn(*im_size)
scaling_coeffs = scaling_coeffs.astype(np.complex64)
grid_size = [2*im_dim for im_dim in im_size]
# torch computations
torch_x = np.stack((np.real(x), np.imag(x)))
torch_x = torch.tensor(torch_x).unsqueeze(0).unsqueeze(0)
torch_scaling_coeffs = torch.tensor(
np.stack((np.real(scaling_coeffs), np.imag(scaling_coeffs)))
)
res_torch = torch_fft_functions.scale_and_fft_on_image_volume(
torch_x,
torch_scaling_coeffs,
torch.tensor(grid_size).float(),
torch.tensor(im_size),
norm,
).numpy()
res_torch = res_torch[:, :, 0] + 1j *res_torch[:, :, 1]
# tf computations
res_tf = tf_fft_functions.scale_and_fft_on_image_volume(
tf.convert_to_tensor(x)[None, None, ...],
tf.convert_to_tensor(scaling_coeffs),
tf.convert_to_tensor(grid_size),
tf.convert_to_tensor(im_size),
norm,
im_rank=3,
multiprocessing=multiprocessing,
).numpy()
np.testing.assert_allclose(res_torch, res_tf, rtol=1e-4, atol=2*1e-2)
@pytest.mark.parametrize('norm', ['ortho', None])
@pytest.mark.parametrize('multiprocessing', [True, False])
def test_ifft_and_scale_on_gridded_data_3d(norm, multiprocessing):
# problem definition
x = shepp_logan_phantom().astype(np.complex64)
x = _crop_center(x, 128, 128)
x = x[None, ...]
x = np.tile(x, [128, 1, 1])
grid_size = x.shape
im_size = [im_dim//2 for im_dim in grid_size]
scaling_coeffs = np.random.randn(*im_size) + 1j * np.random.randn(*im_size)
scaling_coeffs = scaling_coeffs.astype(np.complex64)
# torch computations
torch_x = np.stack((np.real(x), np.imag(x)))
torch_x = torch.tensor(torch_x).unsqueeze(0).unsqueeze(0)
torch_scaling_coeffs = torch.tensor(
np.stack((np.real(scaling_coeffs), np.imag(scaling_coeffs)))
)
res_torch = torch_fft_functions.ifft_and_scale_on_gridded_data(
torch_x,
torch_scaling_coeffs,
torch.tensor(grid_size).float(),
torch.tensor(im_size),
norm,
).numpy()
res_torch = res_torch[:, :, 0] + 1j *res_torch[:, :, 1]
# tf computations
res_tf = tf_fft_functions.ifft_and_scale_on_gridded_data(
tf.convert_to_tensor(x)[None, None, ...],
tf.convert_to_tensor(scaling_coeffs),
tf.convert_to_tensor(grid_size),
tf.convert_to_tensor(im_size),
norm,
im_rank=3,
multiprocessing=multiprocessing,
).numpy()
np.testing.assert_allclose(res_torch, res_tf, rtol=1e-4, atol=2)
| 37.93865
| 79
| 0.680304
| 887
| 6,184
| 4.438557
| 0.098083
| 0.105664
| 0.044704
| 0.069088
| 0.933198
| 0.920498
| 0.920498
| 0.920498
| 0.920498
| 0.920498
| 0
| 0.017953
| 0.18936
| 6,184
| 162
| 80
| 38.17284
| 0.767405
| 0.034767
| 0
| 0.857143
| 0
| 0
| 0.016118
| 0
| 0
| 0
| 0
| 0
| 0.028571
| 1
| 0.035714
| false
| 0
| 0.05
| 0
| 0.092857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
905a09b38eb6d6d645fbe8afbc1e205a9a822406
| 2,647
|
py
|
Python
|
tests/unittests/test_client.py
|
shreyb/gracc-request
|
51cacafad961351fc21a5fcdf494f26108ba5c10
|
[
"Apache-2.0"
] | null | null | null |
tests/unittests/test_client.py
|
shreyb/gracc-request
|
51cacafad961351fc21a5fcdf494f26108ba5c10
|
[
"Apache-2.0"
] | null | null | null |
tests/unittests/test_client.py
|
shreyb/gracc-request
|
51cacafad961351fc21a5fcdf494f26108ba5c10
|
[
"Apache-2.0"
] | null | null | null |
from datetime import datetime, timedelta
import unittest
from graccreq import Client
class TestClient(unittest.TestCase):
def setUp(self):
#self.client = Client.Client("gracc.osg.requests")
pass
def test_summary(self):
# Set the timerange
start_time = datetime(2016, 6, 1)
end_time = start_time + timedelta(days=32)
status = {'num_messages': 0}
def getMessage(msg):
status['num_messages'] += 1
client = Client("gracc.osg.requests", "gracc.osg.requests")
client.query(start_time, end_time, 'summary', getMessage)
self.assertGreater(status['num_messages'], 1)
def test_client_range(self):
# Set the timerange
start_time = datetime(2016, 6, 3)
end_time = start_time + timedelta(days=31)
print start_time
print end_time
status = {'num_messages': 0}
def getMessage(msg):
status['num_messages'] += 1
client = Client("gracc.osg.requests", "gracc.osg.requests")
client.query(start_time, end_time, 'summary', getMessage)
self.assertGreater(status['num_messages'], 1)
def test_client_long_range(self):
# Set the timerange
start_time = datetime(2016, 6, 1)
end_time = start_time + timedelta(days=31)
print start_time
print end_time
status = {'num_messages': 0}
def getMessage(msg):
status['num_messages'] += 1
client = Client("gracc.osg.requests", "gracc.osg.requests")
client.query(start_time, end_time, 'summary', getMessage)
self.assertGreater(status['num_messages'], 1)
def test_raw(self):
# Set the timerange
start_time = datetime(2016, 6, 1)
end_time = start_time + timedelta(days=31)
status = {'num_messages': 0}
def getMessage(msg):
status['num_messages'] += 1
client = Client("gracc.osg.requests", "gracc.osg.requests")
client.query(start_time, end_time, 'raw', getMessage)
self.assertGreater(status['num_messages'], 1)
def test_transfer_summary(self):
start_time = datetime(2016, 6, 1)
end_time = start_time + timedelta(days=31)
status = {'num_messages': 0}
def getMessage(msg):
status['num_messages'] += 1
client = Client("gracc.osg.requests", "gracc.osg.requests")
client.query(start_time, end_time, 'transfer_summary', getMessage)
self.assertGreater(status['num_messages'], 1)
| 31.141176
| 74
| 0.596524
| 305
| 2,647
| 5
| 0.147541
| 0.100328
| 0.167213
| 0.118033
| 0.879344
| 0.860984
| 0.860984
| 0.860984
| 0.826885
| 0.792787
| 0
| 0.02924
| 0.289384
| 2,647
| 84
| 75
| 31.511905
| 0.781499
| 0.045712
| 0
| 0.727273
| 0
| 0
| 0.158667
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 0
| null | null | 0.018182
| 0.054545
| null | null | 0.072727
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9068d1780318613a7ada061f6be0b6f6ab365e69
| 12,526
|
py
|
Python
|
client/swagger_client/api/notification_api.py
|
kakwa/certascale
|
0df8da0f518506500117152fd0e28ee3286949af
|
[
"MIT"
] | null | null | null |
client/swagger_client/api/notification_api.py
|
kakwa/certascale
|
0df8da0f518506500117152fd0e28ee3286949af
|
[
"MIT"
] | null | null | null |
client/swagger_client/api/notification_api.py
|
kakwa/certascale
|
0df8da0f518506500117152fd0e28ee3286949af
|
[
"MIT"
] | 2
|
2020-11-04T03:07:00.000Z
|
2020-11-05T08:14:33.000Z
|
# coding: utf-8
"""
certascale API
Certascale API documentation # noqa: E501
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from swagger_client.api_client import ApiClient
class NotificationApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def notification_acknowledge(self, notification_id, body, **kwargs): # noqa: E501
"""notification_acknowledge # noqa: E501
Acknowledge the notification # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.notification_acknowledge(notification_id, body, async=True)
>>> result = thread.get()
:param async bool
:param str notification_id: (required)
:param NotificationUpdate body: (required)
:return: DefaultMessage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.notification_acknowledge_with_http_info(notification_id, body, **kwargs) # noqa: E501
else:
(data) = self.notification_acknowledge_with_http_info(notification_id, body, **kwargs) # noqa: E501
return data
def notification_acknowledge_with_http_info(self, notification_id, body, **kwargs): # noqa: E501
"""notification_acknowledge # noqa: E501
Acknowledge the notification # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.notification_acknowledge_with_http_info(notification_id, body, async=True)
>>> result = thread.get()
:param async bool
:param str notification_id: (required)
:param NotificationUpdate body: (required)
:return: DefaultMessage
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['notification_id', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method notification_acknowledge" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'notification_id' is set
if ('notification_id' not in params or
params['notification_id'] is None):
raise ValueError("Missing the required parameter `notification_id` when calling `notification_acknowledge`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `notification_acknowledge`") # noqa: E501
collection_formats = {}
path_params = {}
if 'notification_id' in params:
path_params['notificationId'] = params['notification_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['bearerAuth'] # noqa: E501
return self.api_client.call_api(
'/notification/{notificationId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DefaultMessage', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def notification_get(self, notification_id, **kwargs): # noqa: E501
"""notification_get # noqa: E501
Get specific content for a notification # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.notification_get(notification_id, async=True)
>>> result = thread.get()
:param async bool
:param str notification_id: (required)
:return: Notification
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.notification_get_with_http_info(notification_id, **kwargs) # noqa: E501
else:
(data) = self.notification_get_with_http_info(notification_id, **kwargs) # noqa: E501
return data
def notification_get_with_http_info(self, notification_id, **kwargs): # noqa: E501
"""notification_get # noqa: E501
Get specific content for a notification # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.notification_get_with_http_info(notification_id, async=True)
>>> result = thread.get()
:param async bool
:param str notification_id: (required)
:return: Notification
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['notification_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method notification_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'notification_id' is set
if ('notification_id' not in params or
params['notification_id'] is None):
raise ValueError("Missing the required parameter `notification_id` when calling `notification_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'notification_id' in params:
path_params['notificationId'] = params['notification_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['bearerAuth'] # noqa: E501
return self.api_client.call_api(
'/notification/{notificationId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Notification', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def notification_list(self, **kwargs): # noqa: E501
"""notification_list # noqa: E501
Get all your certificate update notifications # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.notification_list(async=True)
>>> result = thread.get()
:param async bool
:param int next_id:
:return: NotificationList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.notification_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.notification_list_with_http_info(**kwargs) # noqa: E501
return data
def notification_list_with_http_info(self, **kwargs): # noqa: E501
"""notification_list # noqa: E501
Get all your certificate update notifications # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.notification_list_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param int next_id:
:return: NotificationList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['next_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method notification_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'next_id' in params:
query_params.append(('next_id', params['next_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['bearerAuth'] # noqa: E501
return self.api_client.call_api(
'/notification', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NotificationList', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 37.279762
| 134
| 0.614083
| 1,359
| 12,526
| 5.422369
| 0.11479
| 0.053196
| 0.022798
| 0.029312
| 0.907314
| 0.89578
| 0.88533
| 0.855883
| 0.847741
| 0.837427
| 0
| 0.017705
| 0.296583
| 12,526
| 335
| 135
| 37.391045
| 0.818636
| 0.066342
| 0
| 0.739884
| 1
| 0
| 0.185284
| 0.042591
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.023121
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
90c1dee111c2cad0337b44a3c3126786f982d106
| 27,280
|
py
|
Python
|
jaseci_core/jaseci/jac/jac_parse/jacListener.py
|
ChrisIsKing/jaseci
|
0495cb5aed77d563213943a2090de4255241b78c
|
[
"MIT"
] | null | null | null |
jaseci_core/jaseci/jac/jac_parse/jacListener.py
|
ChrisIsKing/jaseci
|
0495cb5aed77d563213943a2090de4255241b78c
|
[
"MIT"
] | null | null | null |
jaseci_core/jaseci/jac/jac_parse/jacListener.py
|
ChrisIsKing/jaseci
|
0495cb5aed77d563213943a2090de4255241b78c
|
[
"MIT"
] | null | null | null |
# Generated from jac.g4 by ANTLR 4.9.2
from antlr4 import *
if __name__ is not None and "." in __name__:
from .jacParser import jacParser
else:
from jacParser import jacParser
# This class defines a complete listener for a parse tree produced by jacParser.
class jacListener(ParseTreeListener):
# Enter a parse tree produced by jacParser#start.
def enterStart(self, ctx:jacParser.StartContext):
pass
# Exit a parse tree produced by jacParser#start.
def exitStart(self, ctx:jacParser.StartContext):
pass
# Enter a parse tree produced by jacParser#import_module.
def enterImport_module(self, ctx:jacParser.Import_moduleContext):
pass
# Exit a parse tree produced by jacParser#import_module.
def exitImport_module(self, ctx:jacParser.Import_moduleContext):
pass
# Enter a parse tree produced by jacParser#element.
def enterElement(self, ctx:jacParser.ElementContext):
pass
# Exit a parse tree produced by jacParser#element.
def exitElement(self, ctx:jacParser.ElementContext):
pass
# Enter a parse tree produced by jacParser#architype.
def enterArchitype(self, ctx:jacParser.ArchitypeContext):
pass
# Exit a parse tree produced by jacParser#architype.
def exitArchitype(self, ctx:jacParser.ArchitypeContext):
pass
# Enter a parse tree produced by jacParser#walker.
def enterWalker(self, ctx:jacParser.WalkerContext):
pass
# Exit a parse tree produced by jacParser#walker.
def exitWalker(self, ctx:jacParser.WalkerContext):
pass
# Enter a parse tree produced by jacParser#ver_label.
def enterVer_label(self, ctx:jacParser.Ver_labelContext):
pass
# Exit a parse tree produced by jacParser#ver_label.
def exitVer_label(self, ctx:jacParser.Ver_labelContext):
pass
# Enter a parse tree produced by jacParser#namespaces.
def enterNamespaces(self, ctx:jacParser.NamespacesContext):
pass
# Exit a parse tree produced by jacParser#namespaces.
def exitNamespaces(self, ctx:jacParser.NamespacesContext):
pass
# Enter a parse tree produced by jacParser#walk_entry_block.
def enterWalk_entry_block(self, ctx:jacParser.Walk_entry_blockContext):
pass
# Exit a parse tree produced by jacParser#walk_entry_block.
def exitWalk_entry_block(self, ctx:jacParser.Walk_entry_blockContext):
pass
# Enter a parse tree produced by jacParser#walk_exit_block.
def enterWalk_exit_block(self, ctx:jacParser.Walk_exit_blockContext):
pass
# Exit a parse tree produced by jacParser#walk_exit_block.
def exitWalk_exit_block(self, ctx:jacParser.Walk_exit_blockContext):
pass
# Enter a parse tree produced by jacParser#walk_activity_block.
def enterWalk_activity_block(self, ctx:jacParser.Walk_activity_blockContext):
pass
# Exit a parse tree produced by jacParser#walk_activity_block.
def exitWalk_activity_block(self, ctx:jacParser.Walk_activity_blockContext):
pass
# Enter a parse tree produced by jacParser#attr_block.
def enterAttr_block(self, ctx:jacParser.Attr_blockContext):
pass
# Exit a parse tree produced by jacParser#attr_block.
def exitAttr_block(self, ctx:jacParser.Attr_blockContext):
pass
# Enter a parse tree produced by jacParser#attr_stmt.
def enterAttr_stmt(self, ctx:jacParser.Attr_stmtContext):
pass
# Exit a parse tree produced by jacParser#attr_stmt.
def exitAttr_stmt(self, ctx:jacParser.Attr_stmtContext):
pass
# Enter a parse tree produced by jacParser#graph_block.
def enterGraph_block(self, ctx:jacParser.Graph_blockContext):
pass
# Exit a parse tree produced by jacParser#graph_block.
def exitGraph_block(self, ctx:jacParser.Graph_blockContext):
pass
# Enter a parse tree produced by jacParser#graph_block_spawn.
def enterGraph_block_spawn(self, ctx:jacParser.Graph_block_spawnContext):
pass
# Exit a parse tree produced by jacParser#graph_block_spawn.
def exitGraph_block_spawn(self, ctx:jacParser.Graph_block_spawnContext):
pass
# Enter a parse tree produced by jacParser#graph_block_dot.
def enterGraph_block_dot(self, ctx:jacParser.Graph_block_dotContext):
pass
# Exit a parse tree produced by jacParser#graph_block_dot.
def exitGraph_block_dot(self, ctx:jacParser.Graph_block_dotContext):
pass
# Enter a parse tree produced by jacParser#has_root.
def enterHas_root(self, ctx:jacParser.Has_rootContext):
pass
# Exit a parse tree produced by jacParser#has_root.
def exitHas_root(self, ctx:jacParser.Has_rootContext):
pass
# Enter a parse tree produced by jacParser#has_stmt.
def enterHas_stmt(self, ctx:jacParser.Has_stmtContext):
pass
# Exit a parse tree produced by jacParser#has_stmt.
def exitHas_stmt(self, ctx:jacParser.Has_stmtContext):
pass
# Enter a parse tree produced by jacParser#has_assign.
def enterHas_assign(self, ctx:jacParser.Has_assignContext):
pass
# Exit a parse tree produced by jacParser#has_assign.
def exitHas_assign(self, ctx:jacParser.Has_assignContext):
pass
# Enter a parse tree produced by jacParser#can_stmt.
def enterCan_stmt(self, ctx:jacParser.Can_stmtContext):
pass
# Exit a parse tree produced by jacParser#can_stmt.
def exitCan_stmt(self, ctx:jacParser.Can_stmtContext):
pass
# Enter a parse tree produced by jacParser#event_clause.
def enterEvent_clause(self, ctx:jacParser.Event_clauseContext):
pass
# Exit a parse tree produced by jacParser#event_clause.
def exitEvent_clause(self, ctx:jacParser.Event_clauseContext):
pass
# Enter a parse tree produced by jacParser#preset_in_out.
def enterPreset_in_out(self, ctx:jacParser.Preset_in_outContext):
pass
# Exit a parse tree produced by jacParser#preset_in_out.
def exitPreset_in_out(self, ctx:jacParser.Preset_in_outContext):
pass
# Enter a parse tree produced by jacParser#dotted_name.
def enterDotted_name(self, ctx:jacParser.Dotted_nameContext):
pass
# Exit a parse tree produced by jacParser#dotted_name.
def exitDotted_name(self, ctx:jacParser.Dotted_nameContext):
pass
# Enter a parse tree produced by jacParser#name_list.
def enterName_list(self, ctx:jacParser.Name_listContext):
pass
# Exit a parse tree produced by jacParser#name_list.
def exitName_list(self, ctx:jacParser.Name_listContext):
pass
# Enter a parse tree produced by jacParser#expr_list.
def enterExpr_list(self, ctx:jacParser.Expr_listContext):
pass
# Exit a parse tree produced by jacParser#expr_list.
def exitExpr_list(self, ctx:jacParser.Expr_listContext):
pass
# Enter a parse tree produced by jacParser#code_block.
def enterCode_block(self, ctx:jacParser.Code_blockContext):
pass
# Exit a parse tree produced by jacParser#code_block.
def exitCode_block(self, ctx:jacParser.Code_blockContext):
pass
# Enter a parse tree produced by jacParser#node_ctx_block.
def enterNode_ctx_block(self, ctx:jacParser.Node_ctx_blockContext):
pass
# Exit a parse tree produced by jacParser#node_ctx_block.
def exitNode_ctx_block(self, ctx:jacParser.Node_ctx_blockContext):
pass
# Enter a parse tree produced by jacParser#statement.
def enterStatement(self, ctx:jacParser.StatementContext):
pass
# Exit a parse tree produced by jacParser#statement.
def exitStatement(self, ctx:jacParser.StatementContext):
pass
# Enter a parse tree produced by jacParser#if_stmt.
def enterIf_stmt(self, ctx:jacParser.If_stmtContext):
pass
# Exit a parse tree produced by jacParser#if_stmt.
def exitIf_stmt(self, ctx:jacParser.If_stmtContext):
pass
# Enter a parse tree produced by jacParser#try_stmt.
def enterTry_stmt(self, ctx:jacParser.Try_stmtContext):
pass
# Exit a parse tree produced by jacParser#try_stmt.
def exitTry_stmt(self, ctx:jacParser.Try_stmtContext):
pass
# Enter a parse tree produced by jacParser#else_from_try.
def enterElse_from_try(self, ctx:jacParser.Else_from_tryContext):
pass
# Exit a parse tree produced by jacParser#else_from_try.
def exitElse_from_try(self, ctx:jacParser.Else_from_tryContext):
pass
# Enter a parse tree produced by jacParser#elif_stmt.
def enterElif_stmt(self, ctx:jacParser.Elif_stmtContext):
pass
# Exit a parse tree produced by jacParser#elif_stmt.
def exitElif_stmt(self, ctx:jacParser.Elif_stmtContext):
pass
# Enter a parse tree produced by jacParser#else_stmt.
def enterElse_stmt(self, ctx:jacParser.Else_stmtContext):
pass
# Exit a parse tree produced by jacParser#else_stmt.
def exitElse_stmt(self, ctx:jacParser.Else_stmtContext):
pass
# Enter a parse tree produced by jacParser#for_stmt.
def enterFor_stmt(self, ctx:jacParser.For_stmtContext):
pass
# Exit a parse tree produced by jacParser#for_stmt.
def exitFor_stmt(self, ctx:jacParser.For_stmtContext):
pass
# Enter a parse tree produced by jacParser#while_stmt.
def enterWhile_stmt(self, ctx:jacParser.While_stmtContext):
pass
# Exit a parse tree produced by jacParser#while_stmt.
def exitWhile_stmt(self, ctx:jacParser.While_stmtContext):
pass
# Enter a parse tree produced by jacParser#ctrl_stmt.
def enterCtrl_stmt(self, ctx:jacParser.Ctrl_stmtContext):
pass
# Exit a parse tree produced by jacParser#ctrl_stmt.
def exitCtrl_stmt(self, ctx:jacParser.Ctrl_stmtContext):
pass
# Enter a parse tree produced by jacParser#destroy_action.
def enterDestroy_action(self, ctx:jacParser.Destroy_actionContext):
pass
# Exit a parse tree produced by jacParser#destroy_action.
def exitDestroy_action(self, ctx:jacParser.Destroy_actionContext):
pass
# Enter a parse tree produced by jacParser#report_action.
def enterReport_action(self, ctx:jacParser.Report_actionContext):
pass
# Exit a parse tree produced by jacParser#report_action.
def exitReport_action(self, ctx:jacParser.Report_actionContext):
pass
# Enter a parse tree produced by jacParser#walker_action.
def enterWalker_action(self, ctx:jacParser.Walker_actionContext):
pass
# Exit a parse tree produced by jacParser#walker_action.
def exitWalker_action(self, ctx:jacParser.Walker_actionContext):
pass
# Enter a parse tree produced by jacParser#ignore_action.
def enterIgnore_action(self, ctx:jacParser.Ignore_actionContext):
pass
# Exit a parse tree produced by jacParser#ignore_action.
def exitIgnore_action(self, ctx:jacParser.Ignore_actionContext):
pass
# Enter a parse tree produced by jacParser#take_action.
def enterTake_action(self, ctx:jacParser.Take_actionContext):
pass
# Exit a parse tree produced by jacParser#take_action.
def exitTake_action(self, ctx:jacParser.Take_actionContext):
pass
# Enter a parse tree produced by jacParser#expression.
def enterExpression(self, ctx:jacParser.ExpressionContext):
pass
# Exit a parse tree produced by jacParser#expression.
def exitExpression(self, ctx:jacParser.ExpressionContext):
pass
# Enter a parse tree produced by jacParser#assignment.
def enterAssignment(self, ctx:jacParser.AssignmentContext):
pass
# Exit a parse tree produced by jacParser#assignment.
def exitAssignment(self, ctx:jacParser.AssignmentContext):
pass
# Enter a parse tree produced by jacParser#copy_assign.
def enterCopy_assign(self, ctx:jacParser.Copy_assignContext):
pass
# Exit a parse tree produced by jacParser#copy_assign.
def exitCopy_assign(self, ctx:jacParser.Copy_assignContext):
pass
# Enter a parse tree produced by jacParser#inc_assign.
def enterInc_assign(self, ctx:jacParser.Inc_assignContext):
pass
# Exit a parse tree produced by jacParser#inc_assign.
def exitInc_assign(self, ctx:jacParser.Inc_assignContext):
pass
# Enter a parse tree produced by jacParser#connect.
def enterConnect(self, ctx:jacParser.ConnectContext):
pass
# Exit a parse tree produced by jacParser#connect.
def exitConnect(self, ctx:jacParser.ConnectContext):
pass
# Enter a parse tree produced by jacParser#logical.
def enterLogical(self, ctx:jacParser.LogicalContext):
pass
# Exit a parse tree produced by jacParser#logical.
def exitLogical(self, ctx:jacParser.LogicalContext):
pass
# Enter a parse tree produced by jacParser#compare.
def enterCompare(self, ctx:jacParser.CompareContext):
pass
# Exit a parse tree produced by jacParser#compare.
def exitCompare(self, ctx:jacParser.CompareContext):
pass
# Enter a parse tree produced by jacParser#cmp_op.
def enterCmp_op(self, ctx:jacParser.Cmp_opContext):
pass
# Exit a parse tree produced by jacParser#cmp_op.
def exitCmp_op(self, ctx:jacParser.Cmp_opContext):
pass
# Enter a parse tree produced by jacParser#nin.
def enterNin(self, ctx:jacParser.NinContext):
pass
# Exit a parse tree produced by jacParser#nin.
def exitNin(self, ctx:jacParser.NinContext):
pass
# Enter a parse tree produced by jacParser#arithmetic.
def enterArithmetic(self, ctx:jacParser.ArithmeticContext):
pass
# Exit a parse tree produced by jacParser#arithmetic.
def exitArithmetic(self, ctx:jacParser.ArithmeticContext):
pass
# Enter a parse tree produced by jacParser#term.
def enterTerm(self, ctx:jacParser.TermContext):
pass
# Exit a parse tree produced by jacParser#term.
def exitTerm(self, ctx:jacParser.TermContext):
pass
# Enter a parse tree produced by jacParser#factor.
def enterFactor(self, ctx:jacParser.FactorContext):
pass
# Exit a parse tree produced by jacParser#factor.
def exitFactor(self, ctx:jacParser.FactorContext):
pass
# Enter a parse tree produced by jacParser#power.
def enterPower(self, ctx:jacParser.PowerContext):
pass
# Exit a parse tree produced by jacParser#power.
def exitPower(self, ctx:jacParser.PowerContext):
pass
# Enter a parse tree produced by jacParser#func_call.
def enterFunc_call(self, ctx:jacParser.Func_callContext):
pass
# Exit a parse tree produced by jacParser#func_call.
def exitFunc_call(self, ctx:jacParser.Func_callContext):
pass
# Enter a parse tree produced by jacParser#atom.
def enterAtom(self, ctx:jacParser.AtomContext):
pass
# Exit a parse tree produced by jacParser#atom.
def exitAtom(self, ctx:jacParser.AtomContext):
pass
# Enter a parse tree produced by jacParser#ref.
def enterRef(self, ctx:jacParser.RefContext):
pass
# Exit a parse tree produced by jacParser#ref.
def exitRef(self, ctx:jacParser.RefContext):
pass
# Enter a parse tree produced by jacParser#deref.
def enterDeref(self, ctx:jacParser.DerefContext):
pass
# Exit a parse tree produced by jacParser#deref.
def exitDeref(self, ctx:jacParser.DerefContext):
pass
# Enter a parse tree produced by jacParser#built_in.
def enterBuilt_in(self, ctx:jacParser.Built_inContext):
pass
# Exit a parse tree produced by jacParser#built_in.
def exitBuilt_in(self, ctx:jacParser.Built_inContext):
pass
# Enter a parse tree produced by jacParser#cast_built_in.
def enterCast_built_in(self, ctx:jacParser.Cast_built_inContext):
pass
# Exit a parse tree produced by jacParser#cast_built_in.
def exitCast_built_in(self, ctx:jacParser.Cast_built_inContext):
pass
# Enter a parse tree produced by jacParser#obj_built_in.
def enterObj_built_in(self, ctx:jacParser.Obj_built_inContext):
pass
# Exit a parse tree produced by jacParser#obj_built_in.
def exitObj_built_in(self, ctx:jacParser.Obj_built_inContext):
pass
# Enter a parse tree produced by jacParser#dict_built_in.
def enterDict_built_in(self, ctx:jacParser.Dict_built_inContext):
pass
# Exit a parse tree produced by jacParser#dict_built_in.
def exitDict_built_in(self, ctx:jacParser.Dict_built_inContext):
pass
# Enter a parse tree produced by jacParser#list_built_in.
def enterList_built_in(self, ctx:jacParser.List_built_inContext):
pass
# Exit a parse tree produced by jacParser#list_built_in.
def exitList_built_in(self, ctx:jacParser.List_built_inContext):
pass
# Enter a parse tree produced by jacParser#string_built_in.
def enterString_built_in(self, ctx:jacParser.String_built_inContext):
pass
# Exit a parse tree produced by jacParser#string_built_in.
def exitString_built_in(self, ctx:jacParser.String_built_inContext):
pass
# Enter a parse tree produced by jacParser#node_edge_ref.
def enterNode_edge_ref(self, ctx:jacParser.Node_edge_refContext):
pass
# Exit a parse tree produced by jacParser#node_edge_ref.
def exitNode_edge_ref(self, ctx:jacParser.Node_edge_refContext):
pass
# Enter a parse tree produced by jacParser#node_ref.
def enterNode_ref(self, ctx:jacParser.Node_refContext):
pass
# Exit a parse tree produced by jacParser#node_ref.
def exitNode_ref(self, ctx:jacParser.Node_refContext):
pass
# Enter a parse tree produced by jacParser#walker_ref.
def enterWalker_ref(self, ctx:jacParser.Walker_refContext):
pass
# Exit a parse tree produced by jacParser#walker_ref.
def exitWalker_ref(self, ctx:jacParser.Walker_refContext):
pass
# Enter a parse tree produced by jacParser#graph_ref.
def enterGraph_ref(self, ctx:jacParser.Graph_refContext):
pass
# Exit a parse tree produced by jacParser#graph_ref.
def exitGraph_ref(self, ctx:jacParser.Graph_refContext):
pass
# Enter a parse tree produced by jacParser#edge_ref.
def enterEdge_ref(self, ctx:jacParser.Edge_refContext):
pass
# Exit a parse tree produced by jacParser#edge_ref.
def exitEdge_ref(self, ctx:jacParser.Edge_refContext):
pass
# Enter a parse tree produced by jacParser#edge_to.
def enterEdge_to(self, ctx:jacParser.Edge_toContext):
pass
# Exit a parse tree produced by jacParser#edge_to.
def exitEdge_to(self, ctx:jacParser.Edge_toContext):
pass
# Enter a parse tree produced by jacParser#edge_from.
def enterEdge_from(self, ctx:jacParser.Edge_fromContext):
pass
# Exit a parse tree produced by jacParser#edge_from.
def exitEdge_from(self, ctx:jacParser.Edge_fromContext):
pass
# Enter a parse tree produced by jacParser#edge_any.
def enterEdge_any(self, ctx:jacParser.Edge_anyContext):
pass
# Exit a parse tree produced by jacParser#edge_any.
def exitEdge_any(self, ctx:jacParser.Edge_anyContext):
pass
# Enter a parse tree produced by jacParser#list_val.
def enterList_val(self, ctx:jacParser.List_valContext):
pass
# Exit a parse tree produced by jacParser#list_val.
def exitList_val(self, ctx:jacParser.List_valContext):
pass
# Enter a parse tree produced by jacParser#index_slice.
def enterIndex_slice(self, ctx:jacParser.Index_sliceContext):
pass
# Exit a parse tree produced by jacParser#index_slice.
def exitIndex_slice(self, ctx:jacParser.Index_sliceContext):
pass
# Enter a parse tree produced by jacParser#dict_val.
def enterDict_val(self, ctx:jacParser.Dict_valContext):
pass
# Exit a parse tree produced by jacParser#dict_val.
def exitDict_val(self, ctx:jacParser.Dict_valContext):
pass
# Enter a parse tree produced by jacParser#kv_pair.
def enterKv_pair(self, ctx:jacParser.Kv_pairContext):
pass
# Exit a parse tree produced by jacParser#kv_pair.
def exitKv_pair(self, ctx:jacParser.Kv_pairContext):
pass
# Enter a parse tree produced by jacParser#spawn.
def enterSpawn(self, ctx:jacParser.SpawnContext):
pass
# Exit a parse tree produced by jacParser#spawn.
def exitSpawn(self, ctx:jacParser.SpawnContext):
pass
# Enter a parse tree produced by jacParser#spawn_object.
def enterSpawn_object(self, ctx:jacParser.Spawn_objectContext):
pass
# Exit a parse tree produced by jacParser#spawn_object.
def exitSpawn_object(self, ctx:jacParser.Spawn_objectContext):
pass
# Enter a parse tree produced by jacParser#node_spawn.
def enterNode_spawn(self, ctx:jacParser.Node_spawnContext):
pass
# Exit a parse tree produced by jacParser#node_spawn.
def exitNode_spawn(self, ctx:jacParser.Node_spawnContext):
pass
# Enter a parse tree produced by jacParser#graph_spawn.
def enterGraph_spawn(self, ctx:jacParser.Graph_spawnContext):
pass
# Exit a parse tree produced by jacParser#graph_spawn.
def exitGraph_spawn(self, ctx:jacParser.Graph_spawnContext):
pass
# Enter a parse tree produced by jacParser#walker_spawn.
def enterWalker_spawn(self, ctx:jacParser.Walker_spawnContext):
pass
# Exit a parse tree produced by jacParser#walker_spawn.
def exitWalker_spawn(self, ctx:jacParser.Walker_spawnContext):
pass
# Enter a parse tree produced by jacParser#spawn_ctx.
def enterSpawn_ctx(self, ctx:jacParser.Spawn_ctxContext):
pass
# Exit a parse tree produced by jacParser#spawn_ctx.
def exitSpawn_ctx(self, ctx:jacParser.Spawn_ctxContext):
pass
# Enter a parse tree produced by jacParser#filter_ctx.
def enterFilter_ctx(self, ctx:jacParser.Filter_ctxContext):
pass
# Exit a parse tree produced by jacParser#filter_ctx.
def exitFilter_ctx(self, ctx:jacParser.Filter_ctxContext):
pass
# Enter a parse tree produced by jacParser#spawn_assign.
def enterSpawn_assign(self, ctx:jacParser.Spawn_assignContext):
pass
# Exit a parse tree produced by jacParser#spawn_assign.
def exitSpawn_assign(self, ctx:jacParser.Spawn_assignContext):
pass
# Enter a parse tree produced by jacParser#filter_compare.
def enterFilter_compare(self, ctx:jacParser.Filter_compareContext):
pass
# Exit a parse tree produced by jacParser#filter_compare.
def exitFilter_compare(self, ctx:jacParser.Filter_compareContext):
pass
# Enter a parse tree produced by jacParser#any_type.
def enterAny_type(self, ctx:jacParser.Any_typeContext):
pass
# Exit a parse tree produced by jacParser#any_type.
def exitAny_type(self, ctx:jacParser.Any_typeContext):
pass
# Enter a parse tree produced by jacParser#dot_graph.
def enterDot_graph(self, ctx:jacParser.Dot_graphContext):
pass
# Exit a parse tree produced by jacParser#dot_graph.
def exitDot_graph(self, ctx:jacParser.Dot_graphContext):
pass
# Enter a parse tree produced by jacParser#dot_stmt_list.
def enterDot_stmt_list(self, ctx:jacParser.Dot_stmt_listContext):
pass
# Exit a parse tree produced by jacParser#dot_stmt_list.
def exitDot_stmt_list(self, ctx:jacParser.Dot_stmt_listContext):
pass
# Enter a parse tree produced by jacParser#dot_stmt.
def enterDot_stmt(self, ctx:jacParser.Dot_stmtContext):
pass
# Exit a parse tree produced by jacParser#dot_stmt.
def exitDot_stmt(self, ctx:jacParser.Dot_stmtContext):
pass
# Enter a parse tree produced by jacParser#dot_attr_stmt.
def enterDot_attr_stmt(self, ctx:jacParser.Dot_attr_stmtContext):
pass
# Exit a parse tree produced by jacParser#dot_attr_stmt.
def exitDot_attr_stmt(self, ctx:jacParser.Dot_attr_stmtContext):
pass
# Enter a parse tree produced by jacParser#dot_attr_list.
def enterDot_attr_list(self, ctx:jacParser.Dot_attr_listContext):
pass
# Exit a parse tree produced by jacParser#dot_attr_list.
def exitDot_attr_list(self, ctx:jacParser.Dot_attr_listContext):
pass
# Enter a parse tree produced by jacParser#dot_a_list.
def enterDot_a_list(self, ctx:jacParser.Dot_a_listContext):
pass
# Exit a parse tree produced by jacParser#dot_a_list.
def exitDot_a_list(self, ctx:jacParser.Dot_a_listContext):
pass
# Enter a parse tree produced by jacParser#dot_edge_stmt.
def enterDot_edge_stmt(self, ctx:jacParser.Dot_edge_stmtContext):
pass
# Exit a parse tree produced by jacParser#dot_edge_stmt.
def exitDot_edge_stmt(self, ctx:jacParser.Dot_edge_stmtContext):
pass
# Enter a parse tree produced by jacParser#dot_edgeRHS.
def enterDot_edgeRHS(self, ctx:jacParser.Dot_edgeRHSContext):
pass
# Exit a parse tree produced by jacParser#dot_edgeRHS.
def exitDot_edgeRHS(self, ctx:jacParser.Dot_edgeRHSContext):
pass
# Enter a parse tree produced by jacParser#dot_edgeop.
def enterDot_edgeop(self, ctx:jacParser.Dot_edgeopContext):
pass
# Exit a parse tree produced by jacParser#dot_edgeop.
def exitDot_edgeop(self, ctx:jacParser.Dot_edgeopContext):
pass
# Enter a parse tree produced by jacParser#dot_node_stmt.
def enterDot_node_stmt(self, ctx:jacParser.Dot_node_stmtContext):
pass
# Exit a parse tree produced by jacParser#dot_node_stmt.
def exitDot_node_stmt(self, ctx:jacParser.Dot_node_stmtContext):
pass
# Enter a parse tree produced by jacParser#dot_node_id.
def enterDot_node_id(self, ctx:jacParser.Dot_node_idContext):
pass
# Exit a parse tree produced by jacParser#dot_node_id.
def exitDot_node_id(self, ctx:jacParser.Dot_node_idContext):
pass
# Enter a parse tree produced by jacParser#dot_port.
def enterDot_port(self, ctx:jacParser.Dot_portContext):
pass
# Exit a parse tree produced by jacParser#dot_port.
def exitDot_port(self, ctx:jacParser.Dot_portContext):
pass
# Enter a parse tree produced by jacParser#dot_subgraph.
def enterDot_subgraph(self, ctx:jacParser.Dot_subgraphContext):
pass
# Exit a parse tree produced by jacParser#dot_subgraph.
def exitDot_subgraph(self, ctx:jacParser.Dot_subgraphContext):
pass
# Enter a parse tree produced by jacParser#dot_id.
def enterDot_id(self, ctx:jacParser.Dot_idContext):
pass
# Exit a parse tree produced by jacParser#dot_id.
def exitDot_id(self, ctx:jacParser.Dot_idContext):
pass
del jacParser
| 30.21041
| 81
| 0.717889
| 3,617
| 27,280
| 5.246337
| 0.08073
| 0.062922
| 0.104869
| 0.188765
| 0.893971
| 0.835371
| 0.833843
| 0.66015
| 0.573988
| 0.190135
| 0
| 0.000234
| 0.216202
| 27,280
| 903
| 82
| 30.21041
| 0.887242
| 0.377199
| 0
| 0.491315
| 1
| 0
| 0.00006
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.491315
| false
| 0.491315
| 0.012407
| 0
| 0.506203
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
90c45f5591341c39e2d29df4038d4e4c19d826c5
| 16,641
|
py
|
Python
|
spekev2_verification_testsuite/test_dash_preset_video_1_preset_audio_1.py
|
crvinay/speke-reference-server
|
cbd111704639e2c0e6ba506081e71ac61424881d
|
[
"Apache-2.0"
] | 88
|
2018-05-22T17:26:10.000Z
|
2022-03-02T12:33:26.000Z
|
spekev2_verification_testsuite/test_dash_preset_video_1_preset_audio_1.py
|
crvinay/speke-reference-server
|
cbd111704639e2c0e6ba506081e71ac61424881d
|
[
"Apache-2.0"
] | 42
|
2018-06-11T19:19:23.000Z
|
2022-03-10T12:34:33.000Z
|
spekev2_verification_testsuite/test_dash_preset_video_1_preset_audio_1.py
|
crvinay/speke-reference-server
|
cbd111704639e2c0e6ba506081e71ac61424881d
|
[
"Apache-2.0"
] | 39
|
2018-07-10T23:15:05.000Z
|
2022-03-04T23:22:52.000Z
|
import xml.etree.ElementTree as ET
import pytest
import requests
from . import utils
@pytest.fixture(scope="session")
def widevine_response(spekev2_url):
test_request_data = utils.read_xml_file_contents(utils.GENERIC_WIDEVINE_TEST_FILE)
response = utils.speke_v2_request(spekev2_url, test_request_data)
return response.text
@pytest.fixture(scope="session")
def widevine_pssh_cpd_response(spekev2_url):
test_request_data = utils.read_xml_file_contents(utils.WIDEVINE_PSSH_CPD_TEST_FILE)
response = utils.speke_v2_request(spekev2_url, test_request_data)
return response.text
@pytest.fixture(scope="session")
def playready_response(spekev2_url):
test_request_data = utils.read_xml_file_contents(utils.GENERIC_PLAYREADY_TEST_FILE)
response = utils.speke_v2_request(spekev2_url, test_request_data)
return response.text
@pytest.fixture(scope="session")
def playready_pssh_cpd_response(spekev2_url):
test_request_data = utils.read_xml_file_contents(utils.PLAYREADY_PSSH_CPD_TEST_FILE)
response = utils.speke_v2_request(spekev2_url, test_request_data)
return response.text
@pytest.fixture(scope="session")
def widevine_playready_response(spekev2_url):
test_request_data = utils.read_xml_file_contents(utils.WIDEVINE_PLAYREADY_TEST_FILE)
response = utils.speke_v2_request(spekev2_url, test_request_data)
return response.text
def test_dash_widevine_no_rotation(widevine_response):
root_cpix = ET.fromstring(widevine_response)
assert all(attribute in root_cpix.attrib for attribute in utils.SPEKE_V2_GENERIC_RESPONSE_ATTRIBS_DICT['CPIX']), \
f"All mandatory attributes: {utils.SPEKE_V2_GENERIC_RESPONSE_ATTRIBS_DICT['CPIX']} for CPIX element are expected in the response"
assert root_cpix.get('version') == '2.3', \
"Attribute: version value for CPIX element is expected to be 2.3"
content_key_list_element = root_cpix.find('./{urn:dashif:org:cpix}ContentKeyList')
content_key_elements = content_key_list_element.findall('{urn:dashif:org:cpix}ContentKey')
assert content_key_elements
assert content_key_elements[0].get('kid') != content_key_elements[1].get('kid'), \
"kid attribute values for the different ContentKey elements under ContentKeyList are expected to be different"
for content_key_element in content_key_elements:
assert content_key_element.find('./{urn:dashif:org:cpix}Data/{urn:ietf:params:xml:ns:keyprov:pskc}Secret/{urn:ietf:params:xml:ns:keyprov:pskc}PlainValue').text, \
"PlainValue child element under Secret is expected to contain data for this request"
assert content_key_element.get('commonEncryptionScheme') == 'cenc', \
"commonEncryptionScheme attribute for ContentKey is expected to contain the value cenc"
drm_system_list_element = root_cpix.find('./{urn:dashif:org:cpix}DRMSystemList')
drm_system_elements = drm_system_list_element.findall('./{urn:dashif:org:cpix}DRMSystem')
assert len(drm_system_elements) == 2, \
"Two DRMSystem elements are expected, one for Video and other for Audio in this response"
assert drm_system_elements[0].get('kid') != drm_system_elements[1].get('kid')
for drm_system_element in drm_system_elements:
assert all(attribute in drm_system_element.attrib for attribute in utils.SPEKE_V2_GENERIC_RESPONSE_ATTRIBS_DICT['DRMSystem']), \
f"All mandatory attributes: {utils.SPEKE_V2_GENERIC_RESPONSE_ATTRIBS_DICT['DRMSystem']} for DRMSystem element are expected in the response"
assert drm_system_element.get('systemId') == utils.WIDEVINE_SYSTEM_ID, \
"SystemID for Widevine is expected in the response and must remain unchanged and must remain unchanged from the request"
pssh_data = drm_system_element.findall('./{urn:dashif:org:cpix}PSSH')
assert len(pssh_data) == 1, \
"Exactly 1 PSSH element is expected"
assert pssh_data[0].text, \
"PSSH element is expected to contain data"
smooth_streaming_protection_header_data_element = drm_system_element.findall('./{urn:dashif:org:cpix}SmoothStreamingProtectionHeaderData')
assert not smooth_streaming_protection_header_data_element, \
"SmoothStreamingProtectionHeaderData is not expected in this response"
content_key_usage_rule_list_element = root_cpix.find('./{urn:dashif:org:cpix}ContentKeyUsageRuleList')
content_key_usage_rule_elements = content_key_usage_rule_list_element.findall('./{urn:dashif:org:cpix}ContentKeyUsageRule')
assert len(content_key_usage_rule_elements) == 2, \
"Exactly 2 ContentKeyUsageRule elements are expected under ContentKeyUsageRuleList in this response"
assert content_key_usage_rule_elements[0].get('kid') != content_key_usage_rule_elements[1].get('kid'), \
"kid attribute values for the different ContentKeyUsageRule are expected to be different"
assert content_key_usage_rule_elements[0].get('intendedTrackType') != content_key_usage_rule_elements[1].get('intendedTrackType'), \
"intendedTrackType attribute values for the different ContentKeyUsageRule are expected to be different"
for content_key_usage_rule_element in content_key_usage_rule_elements:
assert all(attribute in content_key_usage_rule_element.attrib for attribute in utils.SPEKE_V2_GENERIC_RESPONSE_ATTRIBS_DICT['ContentKeyUsageRule']), \
f"All mandatory attributes: {utils.SPEKE_V2_GENERIC_RESPONSE_ATTRIBS_DICT['ContentKeyUsageRule']} are expected for ContentKeyUsageRule element"
assert content_key_usage_rule_element.get('intendedTrackType') in utils.SPEKE_V2_SUPPORTED_INTENDED_TRACK_TYPES, \
"intendedTrackType is a mandatory element for ContentKeyUsageRule"
def test_dash_playready_no_rotation(playready_response):
root_cpix = ET.fromstring(playready_response)
assert all(attribute in root_cpix.attrib for attribute in utils.SPEKE_V2_GENERIC_RESPONSE_ATTRIBS_DICT['CPIX']), \
f"All mandatory attributes: {utils.SPEKE_V2_GENERIC_RESPONSE_ATTRIBS_DICT['CPIX']} for CPIX element are expected in the response"
assert root_cpix.get('version') == '2.3', \
"Attribute: version value for CPIX element is expected to be 2.3"
content_key_list_element = root_cpix.find('./{urn:dashif:org:cpix}ContentKeyList')
content_key_elements = content_key_list_element.findall('{urn:dashif:org:cpix}ContentKey')
assert len(content_key_elements) == 2, \
"2 ContentKey elements are expected under ContentKeyList"
assert content_key_elements[0].get('kid') != content_key_elements[1].get('kid'), \
"kid attribute values for the different ContentKey elements under ContentKeyList are expected to be different"
for content_key_element in content_key_elements:
assert content_key_element.find('./{urn:dashif:org:cpix}Data/{urn:ietf:params:xml:ns:keyprov:pskc}Secret/{urn:ietf:params:xml:ns:keyprov:pskc}PlainValue').text, \
"PlainValue child element under Secret is expected to contain data for this request"
assert content_key_element.get('commonEncryptionScheme') == 'cenc', \
"commonEncryptionScheme attribute for ContentKey is expected to contain the value cenc"
drm_system_list_element = root_cpix.find('./{urn:dashif:org:cpix}DRMSystemList')
drm_system_elements = drm_system_list_element.findall('./{urn:dashif:org:cpix}DRMSystem')
assert len(drm_system_elements) == 2, \
"Two DRMSystem elements are expected, one for Video and other for Audio in this response"
assert drm_system_elements[0].get('kid') != drm_system_elements[1].get('kid'), \
"kid attribute values for the different DRMSystem elements are expected to be different"
for drm_system_element in drm_system_elements:
assert all(attribute in drm_system_element.attrib for attribute in utils.SPEKE_V2_GENERIC_RESPONSE_ATTRIBS_DICT['DRMSystem']), \
f"All mandatory attributes: {utils.SPEKE_V2_GENERIC_RESPONSE_ATTRIBS_DICT['DRMSystem']} for DRMSystem element are expected in the response"
assert drm_system_element.get('systemId') == utils.PLAYREADY_SYSTEM_ID, \
"SystemID for PlayReady is expected in the response and must remain unchanged from the request"
pssh_data = drm_system_element.findall('./{urn:dashif:org:cpix}PSSH')
assert len(pssh_data) == 1, \
"Exactly 1 PSSH element is expected in the response"
assert pssh_data[0].text, \
"PSSH element is expected to contain data"
content_key_usage_rule_list_element = root_cpix.find('./{urn:dashif:org:cpix}ContentKeyUsageRuleList')
content_key_usage_rule_elements = content_key_usage_rule_list_element.findall('./{urn:dashif:org:cpix}ContentKeyUsageRule')
assert len(content_key_usage_rule_elements) == 2, \
"Exactly 2 ContentKeyUsageRule elements are expected under ContentKeyUsageRuleList in this response"
assert content_key_usage_rule_elements[0].get('kid') != content_key_usage_rule_elements[1].get('kid'), \
"kid attribute values for the different ContentKeyUsageRule are expected to be different"
assert content_key_usage_rule_elements[0].get('intendedTrackType') != content_key_usage_rule_elements[1].get('intendedTrackType'), \
"intendedTrackType attribute values for the different ContentKeyUsageRule are expected to be different"
for content_key_usage_rule_element in content_key_usage_rule_elements:
assert all(attribute in content_key_usage_rule_element.attrib for attribute in utils.SPEKE_V2_GENERIC_RESPONSE_ATTRIBS_DICT['ContentKeyUsageRule']), \
f"All mandatory attributes: {utils.SPEKE_V2_GENERIC_RESPONSE_ATTRIBS_DICT['ContentKeyUsageRule']} are expected for ContentKeyUsageRule element"
assert content_key_usage_rule_element.get('intendedTrackType') in utils.SPEKE_V2_SUPPORTED_INTENDED_TRACK_TYPES, \
"intendedTrackType is a mandatory element for ContentKeyUsageRule"
def test_dash_widevine_playready_no_rotation(widevine_playready_response):
root_cpix = ET.fromstring(widevine_playready_response)
assert all(attribute in root_cpix.attrib for attribute in utils.SPEKE_V2_GENERIC_RESPONSE_ATTRIBS_DICT['CPIX']), \
f"All mandatory attributes: {utils.SPEKE_V2_GENERIC_RESPONSE_ATTRIBS_DICT['CPIX']} for CPIX element are expected in the response"
assert root_cpix.get('version') == '2.3', \
"Attribute: version value for CPIX element is expected to be 2.3"
content_key_list_element = root_cpix.find('./{urn:dashif:org:cpix}ContentKeyList')
content_key_elements = content_key_list_element.findall('{urn:dashif:org:cpix}ContentKey')
assert len(content_key_elements) == 2, \
"2 ContentKey elements are expected under ContentKeyList"
assert content_key_elements[0].get('kid') != content_key_elements[1].get('kid'), \
"kid attribute values for the different ContentKey elements under ContentKeyList are expected to be different"
for content_key_element in content_key_elements:
assert content_key_element.find('./{urn:dashif:org:cpix}Data/{urn:ietf:params:xml:ns:keyprov:pskc}Secret/{urn:ietf:params:xml:ns:keyprov:pskc}PlainValue').text, \
"PlainValue child element under Secret is expected to contain data for this request"
assert content_key_element.get('commonEncryptionScheme') == 'cenc', \
"commonEncryptionScheme attribute for ContentKey is expected to contain the value cenc"
drm_system_list_element = root_cpix.find('./{urn:dashif:org:cpix}DRMSystemList')
drm_system_elements = drm_system_list_element.findall('./{urn:dashif:org:cpix}DRMSystem')
assert len(drm_system_elements) == 4, \
"4 DRMSystem elements are expected, 2 for Widewine and 2 for PlayReady and 1 each for Video and Audio"
find_string_for_playready = "./{urn:dashif:org:cpix}DRMSystem[@systemId='" + utils.PLAYREADY_SYSTEM_ID + "']"
drm_system_elements_for_playready = drm_system_list_element.findall(find_string_for_playready)
assert len(drm_system_elements_for_playready) == 2, "Two DRMSystem elements for Playready are expected"
assert drm_system_elements_for_playready[0].get('kid') != drm_system_elements_for_playready[1].get('kid'), \
"kid attribute values for the 2 DRM elements for Playready are expected to be different"
find_string_for_widevine = "./{urn:dashif:org:cpix}DRMSystem[@systemId='" + utils.WIDEVINE_SYSTEM_ID + "']"
drm_system_elements_for_widevine = drm_system_list_element.findall(find_string_for_widevine)
assert len(drm_system_elements_for_widevine) == 2, "Two DRMSystem elements for Widevine are expected"
assert drm_system_elements_for_widevine[0].get('kid') != drm_system_elements_for_widevine[1].get('kid'), \
"kid attribute values for the 2 DRM elements for Widevine are expected to be different"
for drm_system_element in drm_system_elements:
assert all(attribute in drm_system_element.attrib for attribute in utils.SPEKE_V2_GENERIC_RESPONSE_ATTRIBS_DICT['DRMSystem']), \
f"All mandatory attributes: {utils.SPEKE_V2_GENERIC_RESPONSE_ATTRIBS_DICT['DRMSystem']} for DRMSystem element are expected in the response"
assert drm_system_element.get('systemId') in [utils.WIDEVINE_SYSTEM_ID, utils.PLAYREADY_SYSTEM_ID], \
"systemId value is expected to be either Widevine or Playready"
content_key_usage_rule_list_element = root_cpix.find('./{urn:dashif:org:cpix}ContentKeyUsageRuleList')
content_key_usage_rule_elements = content_key_usage_rule_list_element.findall('./{urn:dashif:org:cpix}ContentKeyUsageRule')
assert len(content_key_usage_rule_elements) == 2, \
"Exactly 2 ContentKeyUsageRule elements are expected under ContentKeyUsageRuleList in this response"
assert content_key_usage_rule_elements[0].get('kid') != content_key_usage_rule_elements[1].get('kid'), \
"kid attribute values for the different ContentKeyUsageRule elements are expected to be different"
assert content_key_usage_rule_elements[0].get('intendedTrackType') != content_key_usage_rule_elements[1].get('intendedTrackType')
for content_key_usage_rule_element in content_key_usage_rule_elements:
assert all(attribute in content_key_usage_rule_element.attrib for attribute in utils.SPEKE_V2_GENERIC_RESPONSE_ATTRIBS_DICT['ContentKeyUsageRule']), \
f"All mandatory attributes: {utils.SPEKE_V2_GENERIC_RESPONSE_ATTRIBS_DICT['ContentKeyUsageRule']} are expected for ContentKeyUsageRule element"
assert content_key_usage_rule_element.get('intendedTrackType') in utils.SPEKE_V2_SUPPORTED_INTENDED_TRACK_TYPES, \
"intendedTrackType is a mandatory element for ContentKeyUsageRule"
def test_dash_widevine_pssh_cpd_no_rotation(widevine_pssh_cpd_response):
root_cpix = ET.fromstring(widevine_pssh_cpd_response)
drm_system_list_element = root_cpix.find('./{urn:dashif:org:cpix}DRMSystemList')
drm_system_elements = drm_system_list_element.findall('./{urn:dashif:org:cpix}DRMSystem')
for drm_system_element in drm_system_elements:
pssh_data_bytes = drm_system_element.find('./{urn:dashif:org:cpix}PSSH')
content_protection_data_bytes = drm_system_element.find('./{urn:dashif:org:cpix}ContentProtectionData')
content_protection_data_string = utils.decode_b64_bytes(content_protection_data_bytes.text)
pssh_in_cpd = ET.fromstring(content_protection_data_string)
#Assert pssh in cpd is same as pssh box
assert pssh_data_bytes.text == pssh_in_cpd.text, \
"Content in PSSH box and the requested content in ContentProtectionData are expected to be the same"
def test_dash_playready_pssh_cpd_no_rotation(playready_pssh_cpd_response):
root_cpix = ET.fromstring(playready_pssh_cpd_response)
drm_system_list_element = root_cpix.find('./{urn:dashif:org:cpix}DRMSystemList')
drm_system_elements = drm_system_list_element.findall('./{urn:dashif:org:cpix}DRMSystem')
for drm_system_element in drm_system_elements:
pssh_data_bytes = drm_system_element.find('./{urn:dashif:org:cpix}PSSH')
content_protection_data_bytes = drm_system_element.find('./{urn:dashif:org:cpix}ContentProtectionData')
content_protection_data_string = utils.decode_b64_bytes(content_protection_data_bytes.text)
cpd_xml = '<cpd>' + content_protection_data_string + '</cpd>'
cpd_root = ET.fromstring(cpd_xml)
pssh_in_cpd = cpd_root.find("./{urn:mpeg:cenc:2013}pssh")
#Assert pssh in cpd is same as pssh box
assert pssh_data_bytes.text == pssh_in_cpd.text, \
"Content in PSSH box and the requested content in ContentProtectionData are expected to be the same"
| 69.627615
| 170
| 0.778198
| 2,266
| 16,641
| 5.407767
| 0.0609
| 0.05386
| 0.044067
| 0.055819
| 0.939775
| 0.917333
| 0.883711
| 0.861351
| 0.85164
| 0.846091
| 0
| 0.007573
| 0.135088
| 16,641
| 239
| 171
| 69.627615
| 0.843813
| 0.004567
| 0
| 0.706186
| 0
| 0.015464
| 0.400869
| 0.142297
| 0
| 0
| 0
| 0
| 0.283505
| 1
| 0.051546
| false
| 0
| 0.020619
| 0
| 0.097938
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
90d99f68d719b343329f8e42320ef0cb06205a2b
| 4,364
|
py
|
Python
|
PW_explorer/Input_Parsers/Telingo_Parser/Antlr_Files/Telingo_OutputListener.py
|
idaks/PW-explorer
|
2ea90722924ed2c0a04805f1588f304affc36354
|
[
"Apache-2.0"
] | 15
|
2017-07-11T13:34:22.000Z
|
2021-08-16T12:32:51.000Z
|
PW_explorer/Input_Parsers/Telingo_Parser/Antlr_Files/Telingo_OutputListener.py
|
idaks/PW-explorer
|
2ea90722924ed2c0a04805f1588f304affc36354
|
[
"Apache-2.0"
] | 34
|
2018-10-26T14:39:47.000Z
|
2020-08-03T12:19:26.000Z
|
PW_explorer/Input_Parsers/Telingo_Parser/Antlr_Files/Telingo_OutputListener.py
|
idaks/PW-explorer
|
2ea90722924ed2c0a04805f1588f304affc36354
|
[
"Apache-2.0"
] | 1
|
2017-08-09T05:04:56.000Z
|
2017-08-09T05:04:56.000Z
|
# Generated from PW_explorer/Input_Parsers/Telingo_Parser/Antlr_Files/Telingo_Output.g4 by ANTLR 4.7.1
from antlr4 import *
if __name__ is not None and "." in __name__:
from .Telingo_OutputParser import Telingo_OutputParser
else:
from Telingo_OutputParser import Telingo_OutputParser
# This class defines a complete listener for a parse tree produced by Telingo_OutputParser.
class Telingo_OutputListener(ParseTreeListener):
# Enter a parse tree produced by Telingo_OutputParser#telingoOutput.
def enterTelingoOutput(self, ctx:Telingo_OutputParser.TelingoOutputContext):
pass
# Exit a parse tree produced by Telingo_OutputParser#telingoOutput.
def exitTelingoOutput(self, ctx:Telingo_OutputParser.TelingoOutputContext):
pass
# Enter a parse tree produced by Telingo_OutputParser#pw.
def enterPw(self, ctx:Telingo_OutputParser.PwContext):
pass
# Exit a parse tree produced by Telingo_OutputParser#pw.
def exitPw(self, ctx:Telingo_OutputParser.PwContext):
pass
# Enter a parse tree produced by Telingo_OutputParser#state_desc.
def enterState_desc(self, ctx:Telingo_OutputParser.State_descContext):
pass
# Exit a parse tree produced by Telingo_OutputParser#state_desc.
def exitState_desc(self, ctx:Telingo_OutputParser.State_descContext):
pass
# Enter a parse tree produced by Telingo_OutputParser#fact.
def enterFact(self, ctx:Telingo_OutputParser.FactContext):
pass
# Exit a parse tree produced by Telingo_OutputParser#fact.
def exitFact(self, ctx:Telingo_OutputParser.FactContext):
pass
# Enter a parse tree produced by Telingo_OutputParser#fact_content.
def enterFact_content(self, ctx:Telingo_OutputParser.Fact_contentContext):
pass
# Exit a parse tree produced by Telingo_OutputParser#fact_content.
def exitFact_content(self, ctx:Telingo_OutputParser.Fact_contentContext):
pass
# Enter a parse tree produced by Telingo_OutputParser#fact_text.
def enterFact_text(self, ctx:Telingo_OutputParser.Fact_textContext):
pass
# Exit a parse tree produced by Telingo_OutputParser#fact_text.
def exitFact_text(self, ctx:Telingo_OutputParser.Fact_textContext):
pass
# Enter a parse tree produced by Telingo_OutputParser#summary.
def enterSummary(self, ctx:Telingo_OutputParser.SummaryContext):
pass
# Exit a parse tree produced by Telingo_OutputParser#summary.
def exitSummary(self, ctx:Telingo_OutputParser.SummaryContext):
pass
# Enter a parse tree produced by Telingo_OutputParser#models.
def enterModels(self, ctx:Telingo_OutputParser.ModelsContext):
pass
# Exit a parse tree produced by Telingo_OutputParser#models.
def exitModels(self, ctx:Telingo_OutputParser.ModelsContext):
pass
# Enter a parse tree produced by Telingo_OutputParser#optimum.
def enterOptimum(self, ctx:Telingo_OutputParser.OptimumContext):
pass
# Exit a parse tree produced by Telingo_OutputParser#optimum.
def exitOptimum(self, ctx:Telingo_OutputParser.OptimumContext):
pass
# Enter a parse tree produced by Telingo_OutputParser#optimization.
def enterOptimization(self, ctx:Telingo_OutputParser.OptimizationContext):
pass
# Exit a parse tree produced by Telingo_OutputParser#optimization.
def exitOptimization(self, ctx:Telingo_OutputParser.OptimizationContext):
pass
# Enter a parse tree produced by Telingo_OutputParser#calls.
def enterCalls(self, ctx:Telingo_OutputParser.CallsContext):
pass
# Exit a parse tree produced by Telingo_OutputParser#calls.
def exitCalls(self, ctx:Telingo_OutputParser.CallsContext):
pass
# Enter a parse tree produced by Telingo_OutputParser#time.
def enterTime(self, ctx:Telingo_OutputParser.TimeContext):
pass
# Exit a parse tree produced by Telingo_OutputParser#time.
def exitTime(self, ctx:Telingo_OutputParser.TimeContext):
pass
# Enter a parse tree produced by Telingo_OutputParser#cpuTime.
def enterCpuTime(self, ctx:Telingo_OutputParser.CpuTimeContext):
pass
# Exit a parse tree produced by Telingo_OutputParser#cpuTime.
def exitCpuTime(self, ctx:Telingo_OutputParser.CpuTimeContext):
pass
| 34.09375
| 102
| 0.753437
| 520
| 4,364
| 6.151923
| 0.178846
| 0.338543
| 0.084401
| 0.151922
| 0.849015
| 0.849015
| 0.569553
| 0.557362
| 0.467021
| 0.139106
| 0
| 0.001412
| 0.188359
| 4,364
| 127
| 103
| 34.362205
| 0.90175
| 0.401237
| 0
| 0.448276
| 1
| 0
| 0.000391
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.448276
| false
| 0.448276
| 0.051724
| 0
| 0.517241
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
90e09cd66f2f7a0706cf6e269ca581daa35d2523
| 155
|
py
|
Python
|
app/views.py
|
jerryz123/quizzinator
|
a9aee4e2c852403c1db0998714d76afc5a50c522
|
[
"CC-BY-3.0"
] | null | null | null |
app/views.py
|
jerryz123/quizzinator
|
a9aee4e2c852403c1db0998714d76afc5a50c522
|
[
"CC-BY-3.0"
] | null | null | null |
app/views.py
|
jerryz123/quizzinator
|
a9aee4e2c852403c1db0998714d76afc5a50c522
|
[
"CC-BY-3.0"
] | null | null | null |
from app import app
from flask import *
"""
def index():
return return_template("index.html")
def results():
return return_template("results.html")
"""
| 17.222222
| 39
| 0.716129
| 21
| 155
| 5.190476
| 0.47619
| 0.220183
| 0.366972
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135484
| 155
| 9
| 40
| 17.222222
| 0.813433
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2922d7098987f749e88677378732e3aa90d67e9c
| 169
|
py
|
Python
|
mini_project/code_file_2.py
|
MagedMohamedTurk/Python-Test-Function
|
d7ad4f8044e0780407621070a7229e4ebfca0331
|
[
"CNRI-Python"
] | 1
|
2021-11-17T11:04:40.000Z
|
2021-11-17T11:04:40.000Z
|
mini_project/code_file_2.py
|
MagedMohamedTurk/Python-Test-Function
|
d7ad4f8044e0780407621070a7229e4ebfca0331
|
[
"CNRI-Python"
] | null | null | null |
mini_project/code_file_2.py
|
MagedMohamedTurk/Python-Test-Function
|
d7ad4f8044e0780407621070a7229e4ebfca0331
|
[
"CNRI-Python"
] | null | null | null |
def sum_string(first_name, last_name):
"""TODO: Docstring for sum_string.
:returns: combine first and last name
"""
return first_name + ' ' + last_name
| 24.142857
| 41
| 0.668639
| 23
| 169
| 4.652174
| 0.565217
| 0.224299
| 0.242991
| 0.317757
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.224852
| 169
| 6
| 42
| 28.166667
| 0.816794
| 0.408284
| 0
| 0
| 0
| 0
| 0.011905
| 0
| 0
| 0
| 0
| 0.166667
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
292f64bd0c5ed8f0670a08d0748f0c2c17cc8b01
| 22
|
py
|
Python
|
elliot/recommender/latent_factor_models/MF2020/__init__.py
|
gategill/elliot
|
113763ba6d595976e14ead2e3d460d9705cd882e
|
[
"Apache-2.0"
] | 175
|
2021-03-04T15:46:25.000Z
|
2022-03-31T05:56:58.000Z
|
elliot/recommender/latent_factor_models/MF2020/__init__.py
|
gategill/elliot
|
113763ba6d595976e14ead2e3d460d9705cd882e
|
[
"Apache-2.0"
] | 15
|
2021-03-06T17:53:56.000Z
|
2022-03-24T17:02:07.000Z
|
elliot/recommender/latent_factor_models/MF2020/__init__.py
|
gategill/elliot
|
113763ba6d595976e14ead2e3d460d9705cd882e
|
[
"Apache-2.0"
] | 39
|
2021-03-04T15:46:26.000Z
|
2022-03-09T15:37:12.000Z
|
from .MF import MF2020
| 22
| 22
| 0.818182
| 4
| 22
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 0.136364
| 22
| 1
| 22
| 22
| 0.736842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2931a379b4932d8c14a5dbd190b97eb55daf7a12
| 174
|
py
|
Python
|
src/manifests/routes.py
|
unikubehq/manifests
|
d8ac0edb2d24cc875f2c61f26d8fef04b289d1b4
|
[
"Apache-2.0"
] | 2
|
2021-06-18T21:11:12.000Z
|
2021-06-20T07:52:40.000Z
|
src/manifests/routes.py
|
unikubehq/manifests
|
d8ac0edb2d24cc875f2c61f26d8fef04b289d1b4
|
[
"Apache-2.0"
] | 28
|
2021-08-25T07:34:25.000Z
|
2022-03-21T08:33:39.000Z
|
src/manifests/routes.py
|
unikubehq/manifests
|
d8ac0edb2d24cc875f2c61f26d8fef04b289d1b4
|
[
"Apache-2.0"
] | null | null | null |
from sanic import Sanic
from manifests.generator import generate_manifests
def setup_routes(app: Sanic):
app.add_route(generate_manifests, "/<environment_id:string>")
| 21.75
| 65
| 0.798851
| 23
| 174
| 5.826087
| 0.652174
| 0.253731
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114943
| 174
| 7
| 66
| 24.857143
| 0.87013
| 0
| 0
| 0
| 1
| 0
| 0.137931
| 0.137931
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2958f6c061e096954e683b40b4e25588e19012a2
| 128
|
py
|
Python
|
app/migrations/__init__.py
|
bmwant21/bmwlog
|
bfbe2718714f6ccf5cfca2204305f203aac3682a
|
[
"Unlicense"
] | 1
|
2019-09-23T06:01:14.000Z
|
2019-09-23T06:01:14.000Z
|
app/migrations/__init__.py
|
bmwant21/bmwlog
|
bfbe2718714f6ccf5cfca2204305f203aac3682a
|
[
"Unlicense"
] | 1
|
2018-06-03T16:30:16.000Z
|
2018-06-03T16:30:16.000Z
|
app/migrations/__init__.py
|
bmwant21/bmwlog
|
bfbe2718714f6ccf5cfca2204305f203aac3682a
|
[
"Unlicense"
] | null | null | null |
from .migration001 import m_001
from .migration002 import m_002
from .migration003 import m_003
from .migration004 import m_004
| 25.6
| 31
| 0.84375
| 20
| 128
| 5.2
| 0.55
| 0.269231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.214286
| 0.125
| 128
| 4
| 32
| 32
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
295c83a464115fab77ff6e8d66a3c4276268d96d
| 12,909
|
py
|
Python
|
pypy/translator/microbench/pybench/Exceptions.py
|
camillobruni/pygirl
|
ddbd442d53061d6ff4af831c1eab153bcc771b5a
|
[
"MIT"
] | 12
|
2016-01-06T07:10:28.000Z
|
2021-05-13T23:02:02.000Z
|
pypy/translator/microbench/pybench/Exceptions.py
|
woodrow/pyoac
|
b5dc59e6a38e7912db47f26fb23ffa4764a3c0e7
|
[
"MIT"
] | null | null | null |
pypy/translator/microbench/pybench/Exceptions.py
|
woodrow/pyoac
|
b5dc59e6a38e7912db47f26fb23ffa4764a3c0e7
|
[
"MIT"
] | 2
|
2016-07-29T07:09:50.000Z
|
2016-10-16T08:50:26.000Z
|
from pybench import Test
class TryRaiseExcept(Test):
version = 0.1
operations = 2 + 3
rounds = 60000
def test(self):
error = ValueError
for i in xrange(self.rounds):
try:
raise error
except:
pass
try:
raise error
except:
pass
try:
raise error,"something"
except:
pass
try:
raise error,"something"
except:
pass
try:
raise error,"something"
except:
pass
def calibrate(self):
error = ValueError
for i in xrange(self.rounds):
pass
class TryExcept(Test):
version = 0.1
operations = 15 * 10
rounds = 200000
def test(self):
for i in xrange(self.rounds):
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
try:
pass
except:
pass
def calibrate(self):
for i in xrange(self.rounds):
pass
| 18.928152
| 39
| 0.232473
| 696
| 12,909
| 4.311782
| 0.045977
| 0.516495
| 0.662779
| 0.849717
| 0.967677
| 0.944352
| 0.944352
| 0.924359
| 0.912363
| 0.885038
| 0
| 0.006239
| 0.739252
| 12,909
| 681
| 40
| 18.955947
| 0.885324
| 0
| 0
| 0.98908
| 0
| 0
| 0.002092
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.478939
| 0.00156
| null | null | 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 13
|
296521b4a47b633245ebc9ce156eaf1dab8cf5dc
| 120,996
|
py
|
Python
|
tccli/services/scf/scf_client.py
|
tencentcloudapi-test/tencentcloud-cli
|
da9733765df2b405b83b7acff48256f31e053ab1
|
[
"Apache-2.0"
] | null | null | null |
tccli/services/scf/scf_client.py
|
tencentcloudapi-test/tencentcloud-cli
|
da9733765df2b405b83b7acff48256f31e053ab1
|
[
"Apache-2.0"
] | null | null | null |
tccli/services/scf/scf_client.py
|
tencentcloudapi-test/tencentcloud-cli
|
da9733765df2b405b83b7acff48256f31e053ab1
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
import sys
import six
import json
import tccli.options_define as OptionsDefine
import tccli.format_output as FormatOutput
from tccli import __version__
from tccli.utils import Utils
from tccli.exceptions import ConfigurationError, ClientError, ParamError
from tencentcloud.common import credential
from tencentcloud.common.profile.http_profile import HttpProfile
from tencentcloud.common.profile.client_profile import ClientProfile
from tencentcloud.scf.v20180416 import scf_client as scf_client_v20180416
from tencentcloud.scf.v20180416 import models as models_v20180416
from jmespath import search
import time
def doInvokeFunction(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.InvokeFunctionRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.InvokeFunction(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteFunction(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteFunctionRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteFunction(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doUpdateAlias(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.UpdateAliasRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.UpdateAlias(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doListTriggers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ListTriggersRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ListTriggers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doGetFunctionEventInvokeConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.GetFunctionEventInvokeConfigRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.GetFunctionEventInvokeConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doGetLayerVersion(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.GetLayerVersionRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.GetLayerVersion(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateTrigger(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateTriggerRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateTrigger(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateNamespace(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateNamespaceRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateNamespace(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCopyFunction(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CopyFunctionRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CopyFunction(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doGetFunctionLogs(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.GetFunctionLogsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.GetFunctionLogs(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doListAliases(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ListAliasesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ListAliases(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteTrigger(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteTriggerRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteTrigger(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doGetProvisionedConcurrencyConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.GetProvisionedConcurrencyConfigRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.GetProvisionedConcurrencyConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doUpdateNamespace(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.UpdateNamespaceRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.UpdateNamespace(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteProvisionedConcurrencyConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteProvisionedConcurrencyConfigRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteProvisionedConcurrencyConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doPublishVersion(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.PublishVersionRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.PublishVersion(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteLayerVersion(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteLayerVersionRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteLayerVersion(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doGetFunction(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.GetFunctionRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.GetFunction(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteAlias(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteAliasRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteAlias(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doGetAsyncEventStatus(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.GetAsyncEventStatusRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.GetAsyncEventStatus(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteNamespace(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteNamespaceRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteNamespace(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateAlias(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateAliasRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateAlias(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doTerminateAsyncEvent(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.TerminateAsyncEventRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.TerminateAsyncEvent(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doUpdateFunctionEventInvokeConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.UpdateFunctionEventInvokeConfigRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.UpdateFunctionEventInvokeConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doListVersionByFunction(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ListVersionByFunctionRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ListVersionByFunction(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doListLayers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ListLayersRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ListLayers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doListAsyncEvents(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ListAsyncEventsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ListAsyncEvents(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doListLayerVersions(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ListLayerVersionsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ListLayerVersions(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doGetAccount(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.GetAccountRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.GetAccount(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doGetReservedConcurrencyConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.GetReservedConcurrencyConfigRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.GetReservedConcurrencyConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doListFunctions(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ListFunctionsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ListFunctions(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doUpdateFunctionConfiguration(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.UpdateFunctionConfigurationRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.UpdateFunctionConfiguration(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doPutReservedConcurrencyConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.PutReservedConcurrencyConfigRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.PutReservedConcurrencyConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doPublishLayerVersion(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.PublishLayerVersionRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.PublishLayerVersion(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteReservedConcurrencyConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteReservedConcurrencyConfigRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteReservedConcurrencyConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doInvoke(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.InvokeRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.Invoke(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doGetFunctionAddress(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.GetFunctionAddressRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.GetFunctionAddress(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doPutProvisionedConcurrencyConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.PutProvisionedConcurrencyConfigRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.PutProvisionedConcurrencyConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doGetAlias(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.GetAliasRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.GetAlias(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doPutTotalConcurrencyConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.PutTotalConcurrencyConfigRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.PutTotalConcurrencyConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateFunction(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateFunctionRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateFunction(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doListNamespaces(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ListNamespacesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ListNamespaces(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doUpdateFunctionCode(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.UpdateFunctionCodeRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.UpdateFunctionCode(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doGetRequestStatus(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.ScfClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.GetRequestStatusRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.GetRequestStatus(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
CLIENT_MAP = {
"v20180416": scf_client_v20180416,
}
MODELS_MAP = {
"v20180416": models_v20180416,
}
ACTION_MAP = {
"InvokeFunction": doInvokeFunction,
"DeleteFunction": doDeleteFunction,
"UpdateAlias": doUpdateAlias,
"ListTriggers": doListTriggers,
"GetFunctionEventInvokeConfig": doGetFunctionEventInvokeConfig,
"GetLayerVersion": doGetLayerVersion,
"CreateTrigger": doCreateTrigger,
"CreateNamespace": doCreateNamespace,
"CopyFunction": doCopyFunction,
"GetFunctionLogs": doGetFunctionLogs,
"ListAliases": doListAliases,
"DeleteTrigger": doDeleteTrigger,
"GetProvisionedConcurrencyConfig": doGetProvisionedConcurrencyConfig,
"UpdateNamespace": doUpdateNamespace,
"DeleteProvisionedConcurrencyConfig": doDeleteProvisionedConcurrencyConfig,
"PublishVersion": doPublishVersion,
"DeleteLayerVersion": doDeleteLayerVersion,
"GetFunction": doGetFunction,
"DeleteAlias": doDeleteAlias,
"GetAsyncEventStatus": doGetAsyncEventStatus,
"DeleteNamespace": doDeleteNamespace,
"CreateAlias": doCreateAlias,
"TerminateAsyncEvent": doTerminateAsyncEvent,
"UpdateFunctionEventInvokeConfig": doUpdateFunctionEventInvokeConfig,
"ListVersionByFunction": doListVersionByFunction,
"ListLayers": doListLayers,
"ListAsyncEvents": doListAsyncEvents,
"ListLayerVersions": doListLayerVersions,
"GetAccount": doGetAccount,
"GetReservedConcurrencyConfig": doGetReservedConcurrencyConfig,
"ListFunctions": doListFunctions,
"UpdateFunctionConfiguration": doUpdateFunctionConfiguration,
"PutReservedConcurrencyConfig": doPutReservedConcurrencyConfig,
"PublishLayerVersion": doPublishLayerVersion,
"DeleteReservedConcurrencyConfig": doDeleteReservedConcurrencyConfig,
"Invoke": doInvoke,
"GetFunctionAddress": doGetFunctionAddress,
"PutProvisionedConcurrencyConfig": doPutProvisionedConcurrencyConfig,
"GetAlias": doGetAlias,
"PutTotalConcurrencyConfig": doPutTotalConcurrencyConfig,
"CreateFunction": doCreateFunction,
"ListNamespaces": doListNamespaces,
"UpdateFunctionCode": doUpdateFunctionCode,
"GetRequestStatus": doGetRequestStatus,
}
AVAILABLE_VERSION_LIST = [
"v20180416",
]
def action_caller():
return ACTION_MAP
def parse_global_arg(parsed_globals):
g_param = parsed_globals
is_exist_profile = True
if not parsed_globals["profile"]:
is_exist_profile = False
g_param["profile"] = "default"
configure_path = os.path.join(os.path.expanduser("~"), ".tccli")
is_conf_exist, conf_path = Utils.file_existed(configure_path, g_param["profile"] + ".configure")
is_cred_exist, cred_path = Utils.file_existed(configure_path, g_param["profile"] + ".credential")
conf = {}
cred = {}
if is_conf_exist:
conf = Utils.load_json_msg(conf_path)
if is_cred_exist:
cred = Utils.load_json_msg(cred_path)
if not (isinstance(conf, dict) and isinstance(cred, dict)):
raise ConfigurationError(
"file: %s or %s is not json format"
% (g_param["profile"] + ".configure", g_param["profile"] + ".credential"))
if OptionsDefine.Token not in cred:
cred[OptionsDefine.Token] = None
if not is_exist_profile:
if os.environ.get(OptionsDefine.ENV_SECRET_ID) and os.environ.get(OptionsDefine.ENV_SECRET_KEY):
cred[OptionsDefine.SecretId] = os.environ.get(OptionsDefine.ENV_SECRET_ID)
cred[OptionsDefine.SecretKey] = os.environ.get(OptionsDefine.ENV_SECRET_KEY)
cred[OptionsDefine.Token] = os.environ.get(OptionsDefine.ENV_TOKEN)
if os.environ.get(OptionsDefine.ENV_REGION):
conf[OptionsDefine.Region] = os.environ.get(OptionsDefine.ENV_REGION)
if os.environ.get(OptionsDefine.ENV_ROLE_ARN) and os.environ.get(OptionsDefine.ENV_ROLE_SESSION_NAME):
cred[OptionsDefine.RoleArn] = os.environ.get(OptionsDefine.ENV_ROLE_ARN)
cred[OptionsDefine.RoleSessionName] = os.environ.get(OptionsDefine.ENV_ROLE_SESSION_NAME)
for param in g_param.keys():
if g_param[param] is None:
if param in [OptionsDefine.SecretKey, OptionsDefine.SecretId, OptionsDefine.Token]:
if param in cred:
g_param[param] = cred[param]
elif not g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
raise ConfigurationError("%s is invalid" % param)
elif param in [OptionsDefine.Region, OptionsDefine.Output]:
if param in conf:
g_param[param] = conf[param]
else:
raise ConfigurationError("%s is invalid" % param)
elif param.replace('_', '-') in [OptionsDefine.RoleArn, OptionsDefine.RoleSessionName]:
if param.replace('_', '-') in cred:
g_param[param] = cred[param.replace('_', '-')]
try:
if g_param[OptionsDefine.ServiceVersion]:
g_param[OptionsDefine.Version] = "v" + g_param[OptionsDefine.ServiceVersion].replace('-', '')
else:
version = conf["scf"][OptionsDefine.Version]
g_param[OptionsDefine.Version] = "v" + version.replace('-', '')
if g_param[OptionsDefine.Endpoint] is None:
g_param[OptionsDefine.Endpoint] = conf["scf"][OptionsDefine.Endpoint]
except Exception as err:
raise ConfigurationError("config file:%s error, %s" % (conf_path, str(err)))
if g_param[OptionsDefine.Version] not in AVAILABLE_VERSION_LIST:
raise Exception("available versions: %s" % " ".join(AVAILABLE_VERSION_LIST))
if g_param[OptionsDefine.Waiter]:
param = eval(g_param[OptionsDefine.Waiter])
if 'expr' not in param:
raise Exception('`expr` in `--waiter` must be defined')
if 'to' not in param:
raise Exception('`to` in `--waiter` must be defined')
if 'timeout' not in param:
if 'waiter' in conf and 'timeout' in conf['waiter']:
param['timeout'] = conf['waiter']['timeout']
else:
param['timeout'] = 180
if 'interval' not in param:
if 'waiter' in conf and 'interval' in conf['waiter']:
param['interval'] = conf['waiter']['interval']
else:
param['timeout'] = 5
param['interval'] = min(param['interval'], param['timeout'])
g_param['OptionsDefine.WaiterInfo'] = param
# 如果在配置文件中读取字段的值,python2中的json.load函数会读取unicode类型的值,因此这里要转化类型
if six.PY2:
for key, value in g_param.items():
if isinstance(value, six.text_type):
g_param[key] = value.encode('utf-8')
return g_param
| 52.652742
| 155
| 0.678551
| 13,034
| 120,996
| 6.074574
| 0.023324
| 0.09859
| 0.298285
| 0.129294
| 0.915277
| 0.911981
| 0.91039
| 0.907939
| 0.904984
| 0.902458
| 0
| 0.00439
| 0.186725
| 120,996
| 2,297
| 156
| 52.675664
| 0.80022
| 0.004306
| 0
| 0.807404
| 0
| 0
| 0.139055
| 0.072965
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021556
| false
| 0
| 0.007498
| 0.000469
| 0.029991
| 0.020619
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4640b0c5fe71ca398e3a8dd725ebed2fe4c260e0
| 6,367
|
py
|
Python
|
mrpy/mr_utils/operators/prediction/centered_polynomial_interpolation.py
|
marc-nguessan/mrpy
|
6fb0bce485234a45bb863f71bc2bdf0a22014de3
|
[
"BSD-3-Clause"
] | 2
|
2020-01-06T10:48:44.000Z
|
2020-01-09T20:07:08.000Z
|
mrpy/mr_utils/operators/prediction/centered_polynomial_interpolation.py
|
marc-nguessan/mrpy
|
6fb0bce485234a45bb863f71bc2bdf0a22014de3
|
[
"BSD-3-Clause"
] | 1
|
2020-01-09T20:08:50.000Z
|
2020-01-09T20:11:20.000Z
|
mrpy/mr_utils/operators/prediction/centered_polynomial_interpolation.py
|
marc-nguessan/mrpy
|
6fb0bce485234a45bb863f71bc2bdf0a22014de3
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import print_function, division
"""...
"""
import numpy as np
from six.moves import range
import config as cfg
from mrpy.mr_utils import mesh
# I need to precise where do the coefficients come from
coef = np.zeros(shape=(6, 5), dtype=np.float)
coef[1, 0] = -1./8
coef[2, 0] = -22./128
coef[2, 1] = 3./128
coef[3, 0] = -201./1024
coef[3, 1] = 11./256
coef[3, 2] = -5./1024
coef[4, 0] = -3461./16384
coef[4, 1] = 949./16384
coef[4, 2] = -185./16384
coef[4, 3] = 35./32768
coef[5, 0] = -29011./131072
coef[5, 1] = 569./8192
coef[5, 2] = -4661./262144
coef[5, 3] = 49./16384
coef[5, 4] = -63./262144
def compute_prediction_value(tree, index_parent, index_child):
"""...
"""
s = tree.stencil_prediction
if tree.dimension == 1:
i = tree.nindex_x[index_parent]
parent_level = tree.nlevel[index_parent]
p = tree.nindex_x[index_child] % 2
prediction_value = tree.nvalue[index_parent]
for m in range(1, s+1):
prediction_value = prediction_value + (-1)**p*coef[s, m-1]*(mesh.get_value(tree, parent_level, i+m) - mesh.get_value(tree, parent_level, i-m))
return prediction_value
if tree.dimension == 2:
i = tree.nindex_x[index_parent]
j = tree.nindex_y[index_parent]
parent_level = tree.nlevel[index_parent]
p = tree.nindex_x[index_child] % 2
q = tree.nindex_y[index_child] % 2
prediction_value = tree.nvalue[index_parent]
for m in range(1, s+1):
prediction_value = prediction_value + \
(-1)**p*coef[s, m-1]*(mesh.get_value(tree, parent_level, i+m, j) - mesh.get_value(tree, parent_level, i-m, j)) + \
(-1)**q*coef[s, m-1]*(mesh.get_value(tree, parent_level, i, j+m) - mesh.get_value(tree, parent_level, i, j-m))
temp = 0
for a in range(1, s+1):
foo = 0
for b in range(1, s+1):
foo = foo + coef[s, b-1]*(mesh.get_value(tree, parent_level, i+a, j+b) - \
mesh.get_value(tree, parent_level, i-a, j+b) - \
mesh.get_value(tree, parent_level, i+a, j-b) + \
mesh.get_value(tree, parent_level, i-a, j-b))
temp = temp + coef[s, a-1]*foo
prediction_value = prediction_value - (-1)**(p+q)*temp
return prediction_value
if tree.dimension == 3:
i = tree.nindex_x[index_parent]
j = tree.nindex_y[index_parent]
k = tree.nindex_z[index_parent]
parent_level = tree.nlevel[index_parent]
p = tree.nindex_x[index_child] % 2
q = tree.nindex_y[index_child] % 2
r = tree.nindex_z[index_child] % 2
prediction_value = tree.nvalue[index_parent]
for m in range(1, s+1):
prediction_value = prediction_value + \
(-1)**p*coef[s, m-1]*(mesh.get_value(tree, parent_level, i+m, j, k) - mesh.get_value(tree, parent_level, i-m, j, k)) + \
(-1)**q*coef[s, m-1]*(mesh.get_value(tree, parent_level, i, j+m, k) - mesh.get_value(tree, parent_level, i, j-m, k)) + \
(-1)**r*coef[s, m-1]*(mesh.get_value(tree, parent_level, i, j, k+m) - mesh.get_value(tree, parent_level, i, j, k-m))
temp = 0
for a in range(1, s+1):
foo = 0
for b in range(1, s+1):
foo = foo + coef[s, b-1]*(mesh.get_value(tree, parent_level, i+a, j+b, k) - \
mesh.get_value(tree, parent_level, i-a, j+b, k) - \
mesh.get_value(tree, parent_level, i+a, j-b, k) + \
mesh.get_value(tree, parent_level, i-a, j-b, k))
temp = temp + coef[s, a-1]*foo
prediction_value = prediction_value - (-1)**(p+q)*temp
temp = 0
for a in range(1, s+1):
foo = 0
for b in range(1, s+1):
foo = foo + coef[s, b-1]*(mesh.get_value(tree, parent_level, i+a, j, k+b) - \
mesh.get_value(tree, parent_level, i-a, j, k+b) - \
mesh.get_value(tree, parent_level, i+a, j, k-b) + \
mesh.get_value(tree, parent_level, i-a, j, k-b))
temp = temp + coef[s, a-1]*foo
prediction_value = prediction_value - (-1)**(p+r)*temp
temp = 0
for a in range(1, s+1):
foo = 0
for b in range(1, s+1):
foo = foo + coef[s, b-1]*(mesh.get_value(tree, parent_level, i, j+a, k+b) - \
mesh.get_value(tree, parent_level, i, j-a, k+b) - \
mesh.get_value(tree, parent_level, i, j+a, k-b) + \
mesh.get_value(tree, parent_level, i, j-a, k-b))
temp = temp + coef[s, a-1]*foo
prediction_value = prediction_value - (-1)**(q+r)*temp
temp = 0
for a in range(1, s+1):
bar = 0
for b in range(1, s+1):
foo = 0
for c in range(1, s+1):
foo = foo + coef[s, c-1]*(mesh.get_value(tree, parent_level, i+a, j+b, k+c) - \
mesh.get_value(tree, parent_level, i-a, j+b, k+c) - \
mesh.get_value(tree, parent_level, i+a, j-b, k+c) - \
mesh.get_value(tree, parent_level, i+a, j+b, k-c) + \
mesh.get_value(tree, parent_level, i-a, j-b, k+c) + \
mesh.get_value(tree, parent_level, i-a, j+b, k-c) + \
mesh.get_value(tree, parent_level, i+a, j-b, k-c) - \
mesh.get_value(tree, parent_level, i-a, j-b, k-c))
bar = bar + coef[s, b-1]*foo
temp = temp + coef[s, a-1]*bar
prediction_value = prediction_value + (-1)**(p+q+r)*temp
return prediction_value
| 43.02027
| 155
| 0.481703
| 928
| 6,367
| 3.15625
| 0.102371
| 0.122909
| 0.147491
| 0.196654
| 0.811881
| 0.811881
| 0.778081
| 0.766815
| 0.755548
| 0.722089
| 0
| 0.053229
| 0.377415
| 6,367
| 147
| 156
| 43.312925
| 0.685671
| 0.010052
| 0
| 0.434783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008696
| false
| 0
| 0.043478
| 0
| 0.078261
| 0.008696
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
464e4b24b9785f288d4a1c64ebb9ea512b4ef252
| 12,759
|
py
|
Python
|
synthtiger/layers/layer.py
|
moonbings/synthtiger
|
87f6de877d8497d3f3b14d414bd3b0ea5f017175
|
[
"MIT"
] | 1
|
2021-11-17T10:07:59.000Z
|
2021-11-17T10:07:59.000Z
|
synthtiger/layers/layer.py
|
moonbings/synthtiger
|
87f6de877d8497d3f3b14d414bd3b0ea5f017175
|
[
"MIT"
] | null | null | null |
synthtiger/layers/layer.py
|
moonbings/synthtiger
|
87f6de877d8497d3f3b14d414bd3b0ea5f017175
|
[
"MIT"
] | null | null | null |
"""
SynthTIGER
Copyright (c) 2021-present NAVER Corp.
MIT license
"""
from typing import Iterable
import cv2
import numpy as np
from synthtiger import utils
class Layer:
def __init__(self, image):
image = np.array(image, dtype=np.float32)
image = utils.add_alpha_channel(image)
height, width = image.shape[:2]
self.image = image
self.bbox = [0, 0, width, height]
def __add__(self, obj):
if isinstance(obj, Iterable):
layers = [self] + list(obj)
elif isinstance(obj, Group):
layers = [self] + list(obj.layers)
else:
layers = [self] + [obj]
group = Group(layers)
return group
def __sub__(self, obj):
if isinstance(obj, Iterable):
layers = filter(lambda layer: layer not in list(obj), [self])
elif isinstance(obj, Group):
layers = filter(lambda layer: layer not in list(obj.layers), [self])
else:
layers = filter(lambda layer: layer != obj, [self])
group = Group(layers)
return group
def copy(self):
layer = Layer(self.image)
layer.quad = self.quad
return layer
def output(self, bbox=None):
if bbox is None:
bbox = self.bbox
image = utils.create_image(bbox[2:])
utils.paste_image(self.image, image, self.quad - bbox[:2])
return image
def paste(self, layer, mode="normal"):
image = self.output()
utils.paste_image(layer.image, image, layer.quad - self.topleft, mode=mode)
layer = Layer(image)
layer.bbox = self.bbox
return layer
def erase(self, layer):
image = self.output()
utils.erase_image(layer.image, image, layer.quad - self.topleft)
layer = Layer(image)
layer.bbox = self.bbox
return layer
@property
def quad(self):
return np.array(self._quad)
@quad.setter
def quad(self, data):
self._quad = np.array(data, dtype=np.float32)
self._bbox = utils.to_bbox(self._quad)
@property
def bbox(self):
return np.array(self._bbox)
@bbox.setter
def bbox(self, data):
self._bbox = np.array(data, dtype=np.float32)
self._quad = utils.to_quad(self._bbox)
@property
def size(self):
return np.array(self.bbox[2:])
@size.setter
def size(self, data):
scale_x = data[0] / self.bbox[2] if self.bbox[2] > 0 else 0
scale_y = data[1] / self.bbox[3] if self.bbox[3] > 0 else 0
self.quad = self.bbox[:2] + (self.quad - self.bbox[:2]) * (scale_x, scale_y)
@property
def width(self):
return self.bbox[2]
@width.setter
def width(self, data):
self.size = (data, self.bbox[3])
@property
def height(self):
return self.bbox[3]
@height.setter
def height(self, data):
self.size = (self.bbox[2], data)
@property
def top(self):
return self.bbox[1]
@top.setter
def top(self, data):
self.quad += (0, data - self.bbox[1])
@property
def bottom(self):
return self.bbox[1] + self.bbox[3]
@bottom.setter
def bottom(self, data):
self.quad += (0, data - (self.bbox[1] + self.bbox[3]))
@property
def left(self):
return self.bbox[0]
@left.setter
def left(self, data):
self.quad += (data - self.bbox[0], 0)
@property
def right(self):
return self.bbox[0] + self.bbox[2]
@right.setter
def right(self, data):
self.quad += (data - (self.bbox[0] + self.bbox[2]), 0)
@property
def topleft(self):
return np.array(self.bbox[:2])
@topleft.setter
def topleft(self, data):
self.quad += data - self.bbox[:2]
@property
def topright(self):
return np.array(self.bbox[:2] + (self.bbox[2], 0))
@topright.setter
def topright(self, data):
self.quad += data - (self.bbox[:2] + (self.bbox[2], 0))
@property
def bottomleft(self):
return np.array(self.bbox[:2] + (0, self.bbox[3]))
@bottomleft.setter
def bottomleft(self, data):
self.quad += data - (self.bbox[:2] + (0, self.bbox[3]))
@property
def bottomright(self):
return np.array(self.bbox[:2] + self.bbox[2:])
@bottomright.setter
def bottomright(self, data):
self.quad += data - (self.bbox[:2] + self.bbox[2:])
@property
def midtop(self):
return np.mean(self.quad, axis=0) * (1, 0) + (0, self.bbox[1])
@midtop.setter
def midtop(self, data):
origin = np.mean(self.quad, axis=0) * (1, 0) + (0, self.bbox[1])
self.quad += data - origin
@property
def midbottom(self):
return np.mean(self.quad, axis=0) * (1, 0) + (0, self.bbox[1] + self.bbox[3])
@midbottom.setter
def midbottom(self, data):
origin = np.mean(self.quad, axis=0) * (1, 0) + (0, self.bbox[1] + self.bbox[3])
self.quad += data - origin
@property
def midleft(self):
return np.mean(self.quad, axis=0) * (0, 1) + (self.bbox[0], 0)
@midleft.setter
def midleft(self, data):
origin = np.mean(self.quad, axis=0) * (0, 1) + (self.bbox[0], 0)
self.quad += data - origin
@property
def midright(self):
return np.mean(self.quad, axis=0) * (0, 1) + (self.bbox[0] + self.bbox[2], 0)
@midright.setter
def midright(self, data):
origin = np.mean(self.quad, axis=0) * (0, 1) + (self.bbox[0] + self.bbox[2], 0)
self.quad += data - origin
@property
def center(self):
return np.mean(self.quad, axis=0)
@center.setter
def center(self, data):
origin = np.mean(self.quad, axis=0)
self.quad += data - origin
@property
def centerx(self):
return np.mean(self.quad, axis=0)[0]
@centerx.setter
def centerx(self, data):
origin = np.mean(self.quad, axis=0)[0]
self.quad += (data - origin, 0)
@property
def centery(self):
return np.mean(self.quad, axis=0)[1]
@centery.setter
def centery(self, data):
origin = np.mean(self.quad, axis=0)[1]
self.quad += (0, data - origin)
class Group:
def __init__(self, obj):
if isinstance(obj, Iterable):
self.layers = list(obj)
elif isinstance(obj, Group):
self.layers = list(obj.layers)
else:
self.layers = [obj]
def __len__(self):
return len(self.layers)
def __getitem__(self, idx):
return self.layers[idx]
def __setitem__(self, idx, layer):
self.layers[idx] = layer
def __add__(self, obj):
if isinstance(obj, Iterable):
layers = self.layers + list(obj)
elif isinstance(obj, Group):
layers = self.layers + list(obj.layers)
else:
layers = self.layers + [obj]
group = Group(layers)
return group
def __sub__(self, obj):
if isinstance(obj, Iterable):
layers = filter(lambda layer: layer not in list(obj), self.layers)
elif isinstance(obj, Group):
layers = filter(lambda layer: layer not in list(obj.layers), self.layers)
else:
layers = filter(lambda layer: layer != obj, self.layers)
group = Group(layers)
return group
def copy(self):
layers = [layer.copy() for layer in self.layers]
group = Group(layers)
return group
def output(self, bbox=None):
if bbox is None:
bbox = self.bbox
image = utils.create_image(bbox[2:])
for layer in reversed(self.layers):
utils.paste_image(layer.image, image, layer.quad - bbox[:2])
return image
def merge(self):
layer = Layer(self.output())
layer.bbox = [*self.topleft, *layer.size]
return layer
@property
def quad(self):
return utils.merge_quad([layer.quad for layer in self.layers])
@quad.setter
def quad(self, data):
quad = np.array(data, dtype=np.float32)
matrix = cv2.getPerspectiveTransform(self.quad, quad)
for layer in self.layers:
quad = np.append(layer.quad, np.ones((4, 1)), axis=-1).dot(matrix.T)
layer.quad = (quad / quad[..., 2, np.newaxis])[..., :2]
@property
def bbox(self):
return utils.merge_bbox([layer.bbox for layer in self.layers])
@bbox.setter
def bbox(self, data):
bbox = np.array(data, dtype=np.float32)
quad = utils.to_quad(bbox)
matrix = cv2.getPerspectiveTransform(self.quad, quad)
for layer in self.layers:
quad = np.append(layer.quad, np.ones((4, 1)), axis=-1).dot(matrix.T)
layer.quad = (quad / quad[..., 2, np.newaxis])[..., :2]
@property
def size(self):
return np.array(self.bbox[2:])
@size.setter
def size(self, data):
scale_x = data[0] / self.bbox[2] if self.bbox[2] > 0 else 0
scale_y = data[1] / self.bbox[3] if self.bbox[3] > 0 else 0
self.quad = self.bbox[:2] + (self.quad - self.bbox[:2]) * (scale_x, scale_y)
@property
def width(self):
return self.bbox[2]
@width.setter
def width(self, data):
self.size = (data, self.bbox[3])
@property
def height(self):
return self.bbox[3]
@height.setter
def height(self, data):
self.size = (self.bbox[2], data)
@property
def top(self):
return self.bbox[1]
@top.setter
def top(self, data):
self.quad += (0, data - self.bbox[1])
@property
def bottom(self):
return self.bbox[1] + self.bbox[3]
@bottom.setter
def bottom(self, data):
self.quad += (0, data - (self.bbox[1] + self.bbox[3]))
@property
def left(self):
return self.bbox[0]
@left.setter
def left(self, data):
self.quad += (data - self.bbox[0], 0)
@property
def right(self):
return self.bbox[0] + self.bbox[2]
@right.setter
def right(self, data):
self.quad += (data - (self.bbox[0] + self.bbox[2]), 0)
@property
def topleft(self):
return np.array(self.bbox[:2])
@topleft.setter
def topleft(self, data):
self.quad += data - self.bbox[:2]
@property
def topright(self):
return np.array(self.bbox[:2] + (self.bbox[2], 0))
@topright.setter
def topright(self, data):
self.quad += data - (self.bbox[:2] + (self.bbox[2], 0))
@property
def bottomleft(self):
return np.array(self.bbox[:2] + (0, self.bbox[3]))
@bottomleft.setter
def bottomleft(self, data):
self.quad += data - (self.bbox[:2] + (0, self.bbox[3]))
@property
def bottomright(self):
return np.array(self.bbox[:2] + self.bbox[2:])
@bottomright.setter
def bottomright(self, data):
self.quad += data - (self.bbox[:2] + self.bbox[2:])
@property
def midtop(self):
return np.mean(self.quad, axis=0) * (1, 0) + (0, self.bbox[1])
@midtop.setter
def midtop(self, data):
origin = np.mean(self.quad, axis=0) * (1, 0) + (0, self.bbox[1])
self.quad += data - origin
@property
def midbottom(self):
return np.mean(self.quad, axis=0) * (1, 0) + (0, self.bbox[1] + self.bbox[3])
@midbottom.setter
def midbottom(self, data):
origin = np.mean(self.quad, axis=0) * (1, 0) + (0, self.bbox[1] + self.bbox[3])
self.quad += data - origin
@property
def midleft(self):
return np.mean(self.quad, axis=0) * (0, 1) + (self.bbox[0], 0)
@midleft.setter
def midleft(self, data):
origin = np.mean(self.quad, axis=0) * (0, 1) + (self.bbox[0], 0)
self.quad += data - origin
@property
def midright(self):
return np.mean(self.quad, axis=0) * (0, 1) + (self.bbox[0] + self.bbox[2], 0)
@midright.setter
def midright(self, data):
origin = np.mean(self.quad, axis=0) * (0, 1) + (self.bbox[0] + self.bbox[2], 0)
self.quad += data - origin
@property
def center(self):
return np.mean(self.quad, axis=0)
@center.setter
def center(self, data):
origin = np.mean(self.quad, axis=0)
self.quad += data - origin
@property
def centerx(self):
return np.mean(self.quad, axis=0)[0]
@centerx.setter
def centerx(self, data):
origin = np.mean(self.quad, axis=0)[0]
self.quad += (data - origin, 0)
@property
def centery(self):
return np.mean(self.quad, axis=0)[1]
@centery.setter
def centery(self, data):
origin = np.mean(self.quad, axis=0)[1]
self.quad += (0, data - origin)
| 26.470954
| 87
| 0.564543
| 1,750
| 12,759
| 4.078857
| 0.058857
| 0.122163
| 0.057999
| 0.054917
| 0.886663
| 0.876856
| 0.83763
| 0.807789
| 0.744887
| 0.732838
| 0
| 0.027187
| 0.285054
| 12,759
| 481
| 88
| 26.525988
| 0.755317
| 0.004781
| 0
| 0.841962
| 0
| 0
| 0.000473
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.26158
| false
| 0
| 0.010899
| 0.114441
| 0.422343
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 9
|
3102f0d3628de748d84c316e820e3124075d5075
| 26,257
|
py
|
Python
|
kratos/tests/test_time_averaging.py
|
lkusch/Kratos
|
e8072d8e24ab6f312765185b19d439f01ab7b27b
|
[
"BSD-4-Clause"
] | 778
|
2017-01-27T16:29:17.000Z
|
2022-03-30T03:01:51.000Z
|
kratos/tests/test_time_averaging.py
|
lkusch/Kratos
|
e8072d8e24ab6f312765185b19d439f01ab7b27b
|
[
"BSD-4-Clause"
] | 6,634
|
2017-01-15T22:56:13.000Z
|
2022-03-31T15:03:36.000Z
|
kratos/tests/test_time_averaging.py
|
lkusch/Kratos
|
e8072d8e24ab6f312765185b19d439f01ab7b27b
|
[
"BSD-4-Clause"
] | 224
|
2017-02-07T14:12:49.000Z
|
2022-03-06T23:09:34.000Z
|
import KratosMultiphysics as Kratos
from KratosMultiphysics.process_factory import KratosProcessFactory
import KratosMultiphysics.KratosUnittest as UnitTest
import random
import math
class TimeAveragingProcessTests(UnitTest.TestCase):
def testTimeAveragingNodalHistoricalAvgProcess(self):
self.__CreateModel()
settings = Kratos.Parameters(r'''
[
{
"kratos_module" : "KratosMultiphysics",
"python_module" : "process_factory",
"process_name" : "TimeAveragingProcess",
"Parameters" : {
"model_part_name" : "test",
"variables_list" : ["VELOCITY", "DENSITY"],
"averaged_variables_list" : ["VELOCITY", "DENSITY"],
"time_averaging_container" : "NodalHistorical",
"time_averaging_method" : "Average",
"integration_start_point_control_variable_name" : "TIME",
"integration_start_point_control_value" : 2.0
}
}
]''')
factory = KratosProcessFactory(self.model)
self.process_list = factory.ConstructListOfProcesses(settings)
for process in self.process_list:
process.Check()
for process in self.process_list:
process.ExecuteInitialize()
velocity_vector = []
density_vector = []
for _ in self.model_part.Nodes:
velocity_vector.append([0.0, 0.0, 0.0])
density_vector.append(0.0)
total_time = 0.0
current_time = 0.0
for _ in range(1, 10):
current_time += 0.5
self.model_part.CloneTimeStep(current_time)
self.model_part.ProcessInfo[Kratos.STEP] += 1
for process in self.process_list:
process.ExecuteInitializeSolutionStep()
for node_index, node in enumerate(self.model_part.Nodes):
velocity = node.GetSolutionStepValue(Kratos.VELOCITY)
temp_velocity = Kratos.Array3()
temp_velocity[0] = velocity[0] * current_time
temp_velocity[1] = velocity[1] * current_time
temp_velocity[2] = velocity[2] * current_time
node.SetSolutionStepValue(Kratos.VELOCITY, 0, temp_velocity)
density = node.GetSolutionStepValue(Kratos.DENSITY)
density = density * current_time
node.SetSolutionStepValue(Kratos.DENSITY, 0, density)
if (self.model_part.ProcessInfo[Kratos.TIME] >= 2.0):
velocity_vector[node_index][0] += temp_velocity[0] * 0.5
velocity_vector[node_index][1] += temp_velocity[1] * 0.5
velocity_vector[node_index][2] += temp_velocity[2] * 0.5
density_vector[node_index] += density * 0.5
if (self.model_part.ProcessInfo[Kratos.TIME] >= 2.0):
total_time += 0.5
for process in self.process_list:
process.ExecuteFinalizeSolutionStep()
for process in self.process_list:
process.ExecuteFinalize()
for node_index, node in enumerate(self.model_part.Nodes):
averaged_velocity = node.GetValue(Kratos.VELOCITY)
averaged_density = node.GetValue(Kratos.DENSITY)
self.assertAlmostEqual(averaged_velocity[0],
velocity_vector[node_index][0] / total_time,
12)
self.assertAlmostEqual(averaged_velocity[1],
velocity_vector[node_index][1] / total_time,
12)
self.assertAlmostEqual(averaged_velocity[2],
velocity_vector[node_index][2] / total_time,
12)
self.assertAlmostEqual(averaged_density,
density_vector[node_index] / total_time, 12)
def testTimeAveragingNodalNonHistoricalAvgProcess(self):
self.__CreateModel()
settings = Kratos.Parameters(r'''
[
{
"kratos_module" : "KratosMultiphysics",
"python_module" : "process_factory",
"process_name" : "TimeAveragingProcess",
"Parameters" : {
"model_part_name" : "test",
"variables_list" : ["VELOCITY", "DENSITY"],
"averaged_variables_list" : ["ACCELERATION", "TEMPERATURE"],
"time_averaging_container" : "NodalNonHistorical",
"time_averaging_method" : "Average",
"integration_start_point_control_variable_name" : "TIME",
"integration_start_point_control_value" : 2.0
}
}
]''')
factory = KratosProcessFactory(self.model)
self.process_list = factory.ConstructListOfProcesses(settings)
for process in self.process_list:
process.Check()
for process in self.process_list:
process.ExecuteInitialize()
velocity_vector = []
density_vector = []
for _ in self.model_part.Nodes:
velocity_vector.append([0.0, 0.0, 0.0])
density_vector.append(0.0)
total_time = 0.0
current_time = 0.0
for _ in range(1, 10):
current_time += 0.5
self.model_part.CloneTimeStep(current_time)
self.model_part.ProcessInfo[Kratos.STEP] += 1
for process in self.process_list:
process.ExecuteInitializeSolutionStep()
for node_index, node in enumerate(self.model_part.Nodes):
velocity = node.GetValue(Kratos.VELOCITY)
temp_velocity = Kratos.Array3()
temp_velocity[0] = velocity[0] * current_time
temp_velocity[1] = velocity[1] * current_time
temp_velocity[2] = velocity[2] * current_time
node.SetValue(Kratos.VELOCITY, temp_velocity)
density = node.GetValue(Kratos.DENSITY)
density = density * current_time
node.SetValue(Kratos.DENSITY, density)
if (self.model_part.ProcessInfo[Kratos.TIME] >= 2.0):
velocity_vector[node_index][0] += temp_velocity[0] * 0.5
velocity_vector[node_index][1] += temp_velocity[1] * 0.5
velocity_vector[node_index][2] += temp_velocity[2] * 0.5
density_vector[node_index] += density * 0.5
if (self.model_part.ProcessInfo[Kratos.TIME] >= 2.0):
total_time += 0.5
for process in self.process_list:
process.ExecuteFinalizeSolutionStep()
for process in self.process_list:
process.ExecuteFinalize()
for node_index, node in enumerate(self.model_part.Nodes):
averaged_velocity = node.GetValue(Kratos.ACCELERATION)
averaged_density = node.GetValue(Kratos.TEMPERATURE)
self.assertAlmostEqual(averaged_velocity[0],
velocity_vector[node_index][0] / total_time,
12)
self.assertAlmostEqual(averaged_velocity[1],
velocity_vector[node_index][1] / total_time,
12)
self.assertAlmostEqual(averaged_velocity[2],
velocity_vector[node_index][2] / total_time,
12)
self.assertAlmostEqual(averaged_density,
density_vector[node_index] / total_time, 12)
def testTimeAveragingElementalNonHistoricalAvgProcess(self):
self.__CreateModel()
settings = Kratos.Parameters(r'''
[
{
"kratos_module" : "KratosMultiphysics",
"python_module" : "process_factory",
"process_name" : "TimeAveragingProcess",
"Parameters" : {
"model_part_name" : "test",
"variables_list" : ["VELOCITY", "DENSITY"],
"averaged_variables_list" : ["ACCELERATION", "TEMPERATURE"],
"time_averaging_container" : "ElementalNonHistorical",
"time_averaging_method" : "Average",
"integration_start_point_control_variable_name" : "TIME",
"integration_start_point_control_value" : 2.0
}
}
]''')
factory = KratosProcessFactory(self.model)
self.process_list = factory.ConstructListOfProcesses(settings)
for process in self.process_list:
process.Check()
for process in self.process_list:
process.ExecuteInitialize()
velocity_vector = []
density_vector = []
for _ in self.model_part.Nodes:
velocity_vector.append([0.0, 0.0, 0.0])
density_vector.append(0.0)
total_time = 0.0
current_time = 0.0
for _ in range(1, 10):
current_time += 0.5
self.model_part.CloneTimeStep(current_time)
self.model_part.ProcessInfo[Kratos.STEP] += 1
for process in self.process_list:
process.ExecuteInitializeSolutionStep()
for element_index, element in enumerate(self.model_part.Elements):
velocity = element.GetValue(Kratos.VELOCITY)
temp_velocity = Kratos.Array3()
temp_velocity[0] = velocity[0] * current_time
temp_velocity[1] = velocity[1] * current_time
temp_velocity[2] = velocity[2] * current_time
element.SetValue(Kratos.VELOCITY, temp_velocity)
density = element.GetValue(Kratos.DENSITY)
density = density * current_time
element.SetValue(Kratos.DENSITY, density)
if (self.model_part.ProcessInfo[Kratos.TIME] >= 2.0):
velocity_vector[element_index][0] += temp_velocity[0] * 0.5
velocity_vector[element_index][1] += temp_velocity[1] * 0.5
velocity_vector[element_index][2] += temp_velocity[2] * 0.5
density_vector[element_index] += density * 0.5
if (self.model_part.ProcessInfo[Kratos.TIME] >= 2.0):
total_time += 0.5
for process in self.process_list:
process.ExecuteFinalizeSolutionStep()
for process in self.process_list:
process.ExecuteFinalize()
for element_index, element in enumerate(self.model_part.Elements):
averaged_velocity = element.GetValue(Kratos.ACCELERATION)
averaged_density = element.GetValue(Kratos.TEMPERATURE)
self.assertAlmostEqual(averaged_velocity[0],
velocity_vector[element_index][0] / total_time,
12)
self.assertAlmostEqual(averaged_velocity[1],
velocity_vector[element_index][1] / total_time,
12)
self.assertAlmostEqual(averaged_velocity[2],
velocity_vector[element_index][2] / total_time,
12)
self.assertAlmostEqual(averaged_density,
density_vector[element_index] / total_time, 12)
def testTimeAveragingNodalHistoricalRMSProcess(self):
self.__CreateModel()
settings = Kratos.Parameters(r'''
[
{
"kratos_module" : "KratosMultiphysics",
"python_module" : "process_factory",
"process_name" : "TimeAveragingProcess",
"Parameters" : {
"model_part_name" : "test",
"variables_list" : ["VELOCITY", "DENSITY"],
"averaged_variables_list" : ["VELOCITY", "DENSITY"],
"time_averaging_container" : "NodalHistorical",
"time_averaging_method" : "RootMeanSquare",
"integration_start_point_control_variable_name" : "TIME",
"integration_start_point_control_value" : 2.0
}
}
]''')
factory = KratosProcessFactory(self.model)
self.process_list = factory.ConstructListOfProcesses(settings)
for process in self.process_list:
process.Check()
for process in self.process_list:
process.ExecuteInitialize()
velocity_vector = []
density_vector = []
for _ in self.model_part.Nodes:
velocity_vector.append([0.0, 0.0, 0.0])
density_vector.append(0.0)
total_time = 0.0
current_time = 0.0
for _ in range(1, 10):
current_time += 0.5
self.model_part.CloneTimeStep(current_time)
self.model_part.ProcessInfo[Kratos.STEP] += 1
for process in self.process_list:
process.ExecuteInitializeSolutionStep()
for node_index, node in enumerate(self.model_part.Nodes):
velocity = node.GetSolutionStepValue(Kratos.VELOCITY)
temp_velocity = Kratos.Array3()
temp_velocity[0] = velocity[0] * current_time
temp_velocity[1] = velocity[1] * current_time
temp_velocity[2] = velocity[2] * current_time
node.SetSolutionStepValue(Kratos.VELOCITY, 0, temp_velocity)
density = node.GetSolutionStepValue(Kratos.DENSITY)
density = density * current_time
node.SetSolutionStepValue(Kratos.DENSITY, 0, density)
if (self.model_part.ProcessInfo[Kratos.TIME] >= 2.0):
velocity_vector[node_index][0] += temp_velocity[0]**2 * 0.5
velocity_vector[node_index][1] += temp_velocity[1]**2 * 0.5
velocity_vector[node_index][2] += temp_velocity[2]**2 * 0.5
density_vector[node_index] += density**2 * 0.5
if (self.model_part.ProcessInfo[Kratos.TIME] >= 2.0):
total_time += 0.5
for process in self.process_list:
process.ExecuteFinalizeSolutionStep()
for process in self.process_list:
process.ExecuteFinalize()
for node_index, node in enumerate(self.model_part.Nodes):
averaged_velocity = node.GetValue(Kratos.VELOCITY)
averaged_density = node.GetValue(Kratos.DENSITY)
self.assertAlmostEqual(averaged_velocity[0],
math.sqrt(velocity_vector[node_index][0] / total_time),
12)
self.assertAlmostEqual(averaged_velocity[1],
math.sqrt(velocity_vector[node_index][1] / total_time),
12)
self.assertAlmostEqual(averaged_velocity[2],
math.sqrt(velocity_vector[node_index][2] / total_time),
12)
self.assertAlmostEqual(averaged_density,
math.sqrt(density_vector[node_index] / total_time), 12)
def testTimeAveragingNodalNonHistoricalRMSProcess(self):
self.__CreateModel()
settings = Kratos.Parameters(r'''
[
{
"kratos_module" : "KratosMultiphysics",
"python_module" : "process_factory",
"process_name" : "TimeAveragingProcess",
"Parameters" : {
"model_part_name" : "test",
"variables_list" : ["VELOCITY", "DENSITY"],
"averaged_variables_list" : ["ACCELERATION", "TEMPERATURE"],
"time_averaging_container" : "NodalNonHistorical",
"time_averaging_method" : "RootMeanSquare",
"integration_start_point_control_variable_name" : "TIME",
"integration_start_point_control_value" : 2.0
}
}
]''')
factory = KratosProcessFactory(self.model)
self.process_list = factory.ConstructListOfProcesses(settings)
for process in self.process_list:
process.Check()
for process in self.process_list:
process.ExecuteInitialize()
velocity_vector = []
density_vector = []
for _ in self.model_part.Nodes:
velocity_vector.append([0.0, 0.0, 0.0])
density_vector.append(0.0)
total_time = 0.0
current_time = 0.0
for _ in range(1, 10):
current_time += 0.5
self.model_part.CloneTimeStep(current_time)
self.model_part.ProcessInfo[Kratos.STEP] += 1
for process in self.process_list:
process.ExecuteInitializeSolutionStep()
for node_index, node in enumerate(self.model_part.Nodes):
velocity = node.GetValue(Kratos.VELOCITY)
temp_velocity = Kratos.Array3()
temp_velocity[0] = velocity[0] * current_time
temp_velocity[1] = velocity[1] * current_time
temp_velocity[2] = velocity[2] * current_time
node.SetValue(Kratos.VELOCITY, temp_velocity)
density = node.GetValue(Kratos.DENSITY)
density = density * current_time
node.SetValue(Kratos.DENSITY, density)
if (self.model_part.ProcessInfo[Kratos.TIME] >= 2.0):
velocity_vector[node_index][0] += temp_velocity[0]**2 * 0.5
velocity_vector[node_index][1] += temp_velocity[1]**2 * 0.5
velocity_vector[node_index][2] += temp_velocity[2]**2 * 0.5
density_vector[node_index] += density**2 * 0.5
if (self.model_part.ProcessInfo[Kratos.TIME] >= 2.0):
total_time += 0.5
for process in self.process_list:
process.ExecuteFinalizeSolutionStep()
for process in self.process_list:
process.ExecuteFinalize()
for node_index, node in enumerate(self.model_part.Nodes):
averaged_velocity = node.GetValue(Kratos.ACCELERATION)
averaged_density = node.GetValue(Kratos.TEMPERATURE)
self.assertAlmostEqual(averaged_velocity[0],
math.sqrt(velocity_vector[node_index][0] / total_time),
12)
self.assertAlmostEqual(averaged_velocity[1],
math.sqrt(velocity_vector[node_index][1] / total_time),
12)
self.assertAlmostEqual(averaged_velocity[2],
math.sqrt(velocity_vector[node_index][2] / total_time),
12)
self.assertAlmostEqual(averaged_density,
math.sqrt(density_vector[node_index] / total_time), 12)
def testTimeAveragingElementalNonHistoricalRMSProcess(self):
self.__CreateModel()
settings = Kratos.Parameters(r'''
[
{
"kratos_module" : "KratosMultiphysics",
"python_module" : "process_factory",
"process_name" : "TimeAveragingProcess",
"Parameters" : {
"model_part_name" : "test",
"variables_list" : ["VELOCITY", "DENSITY"],
"averaged_variables_list" : ["ACCELERATION", "TEMPERATURE"],
"time_averaging_container" : "ElementalNonHistorical",
"time_averaging_method" : "RootMeanSquare",
"integration_start_point_control_variable_name" : "TIME",
"integration_start_point_control_value" : 2.0
}
}
]''')
factory = KratosProcessFactory(self.model)
self.process_list = factory.ConstructListOfProcesses(settings)
for process in self.process_list:
process.Check()
for process in self.process_list:
process.ExecuteInitialize()
velocity_vector = []
density_vector = []
for _ in self.model_part.Nodes:
velocity_vector.append([0.0, 0.0, 0.0])
density_vector.append(0.0)
total_time = 0.0
current_time = 0.0
for _ in range(1, 10):
current_time += 0.5
self.model_part.CloneTimeStep(current_time)
self.model_part.ProcessInfo[Kratos.STEP] += 1
for process in self.process_list:
process.ExecuteInitializeSolutionStep()
for element_index, element in enumerate(self.model_part.Elements):
velocity = element.GetValue(Kratos.VELOCITY)
temp_velocity = Kratos.Array3()
temp_velocity[0] = velocity[0] * current_time
temp_velocity[1] = velocity[1] * current_time
temp_velocity[2] = velocity[2] * current_time
element.SetValue(Kratos.VELOCITY, temp_velocity)
density = element.GetValue(Kratos.DENSITY)
density = density * current_time
element.SetValue(Kratos.DENSITY, density)
if (self.model_part.ProcessInfo[Kratos.TIME] >= 2.0):
velocity_vector[element_index][0] += temp_velocity[0]**2 * 0.5
velocity_vector[element_index][1] += temp_velocity[1]**2 * 0.5
velocity_vector[element_index][2] += temp_velocity[2]**2 * 0.5
density_vector[element_index] += density**2 * 0.5
if (self.model_part.ProcessInfo[Kratos.TIME] >= 2.0):
total_time += 0.5
for process in self.process_list:
process.ExecuteFinalizeSolutionStep()
for process in self.process_list:
process.ExecuteFinalize()
for element_index, element in enumerate(self.model_part.Elements):
averaged_velocity = element.GetValue(Kratos.ACCELERATION)
averaged_density = element.GetValue(Kratos.TEMPERATURE)
self.assertAlmostEqual(averaged_velocity[0],
math.sqrt(velocity_vector[element_index][0] / total_time),
12)
self.assertAlmostEqual(averaged_velocity[1],
math.sqrt(velocity_vector[element_index][1] / total_time),
12)
self.assertAlmostEqual(averaged_velocity[2],
math.sqrt(velocity_vector[element_index][2] / total_time),
12)
self.assertAlmostEqual(averaged_density,
math.sqrt(density_vector[element_index] / total_time), 12)
def __CreateModel(self):
self.model = Kratos.Model()
self.model_part = self.model.CreateModelPart("test")
self.model_part.AddNodalSolutionStepVariable(Kratos.VELOCITY)
self.model_part.AddNodalSolutionStepVariable(Kratos.DENSITY)
self.model_part.CreateNewNode(1, 0.0, 0.0, 0.0)
self.model_part.CreateNewNode(2, 1.0, 0.0, 0.0)
self.model_part.CreateNewNode(3, 2.0, 0.0, 0.0)
self.model_part.CreateNewNode(4, 2.0, 1.0, 0.0)
self.model_part.CreateNewNode(5, 1.5, 1.0, 0.0)
self.model_part.CreateNewNode(6, 1.0, 1.0, 0.0)
self.model_part.AddProperties(Kratos.Properties(1))
self.model_part.CreateNewElement("Element2D3N", 1, [1,2,6], self.model_part.GetProperties()[1])
self.model_part.CreateNewElement("Element2D3N", 2, [2,5,6], self.model_part.GetProperties()[1])
self.model_part.CreateNewElement("Element2D3N", 3, [2,3,5], self.model_part.GetProperties()[1])
self.model_part.CreateNewElement("Element2D3N", 4, [3,4,5], self.model_part.GetProperties()[1])
for node in self.model_part.Nodes:
vector = Kratos.Vector(3)
vector[0] = random.random()
vector[1] = random.random()
vector[2] = random.random()
if (node.SolutionStepsDataHas(Kratos.VELOCITY)):
node.SetSolutionStepValue(Kratos.VELOCITY, 0, vector)
scalar = random.random()
if (node.SolutionStepsDataHas(Kratos.DENSITY)):
node.SetSolutionStepValue(Kratos.DENSITY, 0, scalar)
vector = Kratos.Vector(3)
vector[0] = random.random()
vector[1] = random.random()
vector[2] = random.random()
node.SetValue(Kratos.VELOCITY, vector)
scalar = random.random()
node.SetValue(Kratos.DENSITY, scalar)
for elem in self.model_part.Elements:
vector[0] = random.random()
vector[1] = random.random()
vector[2] = random.random()
elem.SetValue(Kratos.VELOCITY, vector)
scalar = random.random()
elem.SetValue(Kratos.DENSITY, scalar)
if __name__ == '__main__':
UnitTest.main()
| 44.203704
| 103
| 0.545797
| 2,432
| 26,257
| 5.662829
| 0.044408
| 0.045745
| 0.058525
| 0.034853
| 0.944017
| 0.926953
| 0.916134
| 0.909454
| 0.898925
| 0.891083
| 0
| 0.027801
| 0.361618
| 26,257
| 594
| 104
| 44.203704
| 0.793819
| 0
| 0
| 0.835391
| 0
| 0
| 0.20051
| 0.050118
| 0
| 0
| 0
| 0
| 0.049383
| 1
| 0.014403
| false
| 0
| 0.010288
| 0
| 0.026749
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
730518fec80a515479bb81163da466a091b8d46b
| 15,161
|
py
|
Python
|
mainapp/migrations/0049_auto_20150810_1505.py
|
vesellov/callfeed.net
|
88a12f244dac1bcc170c3028d86e53618b33aa6a
|
[
"MIT"
] | 1
|
2015-11-17T08:44:23.000Z
|
2015-11-17T08:44:23.000Z
|
mainapp/migrations/0049_auto_20150810_1505.py
|
vesellov/callfeed.net
|
88a12f244dac1bcc170c3028d86e53618b33aa6a
|
[
"MIT"
] | null | null | null |
mainapp/migrations/0049_auto_20150810_1505.py
|
vesellov/callfeed.net
|
88a12f244dac1bcc170c3028d86e53618b33aa6a
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mainapp', '0048_auto_20150809_1021'),
]
operations = [
migrations.AlterField(
model_name='widget',
name='operator_incoming_number',
field=models.CharField(default=b'callfeed', max_length=8, verbose_name=b'\xd0\x9e\xd1\x82\xd0\xbe\xd0\xb1\xd1\x80\xd0\xb0\xd0\xb6\xd0\xb0\xd0\xb5\xd0\xbc\xd1\x8b\xd0\xb9 \xd0\xb2\xd1\x85\xd0\xbe\xd0\xb4\xd1\x8f\xd1\x89\xd0\xb8\xd0\xb9 \xd0\xbd\xd0\xbe\xd0\xbc\xd0\xb5\xd1\x80 \xd1\x83 \xd0\xbe\xd0\xbf\xd0\xb5\xd1\x80\xd0\xb0\xd1\x82\xd0\xbe\xd1\x80\xd0\xb0', choices=[(b'callfeed', b'CallFeed.NET'), (b'client', b'\xd0\x9a\xd0\xbb\xd0\xb8\xd0\xb5\xd0\xbd\xd1\x82')]),
preserve_default=True,
),
migrations.AlterField(
model_name='widget',
name='settings',
field=models.CharField(default=b'{"text_timeoff_start": "<span style=\\"color: #FAD468;\\">\xd0\x97\xd0\xb4\xd1\x80\xd0\xb0\xd0\xb2\xd1\x81\xd1\x82\xd0\xb2\xd1\x83\xd0\xb9\xd1\x82\xd0\xb5!</span><br/>\xd0\x9a \xd1\x81\xd0\xbe\xd0\xb6\xd0\xb0\xd0\xbb\xd0\xb5\xd0\xbd\xd0\xb8\xd1\x8e \xd0\xbd\xd0\xb0\xd1\x88 \xd1\x80\xd0\xb0\xd0\xb1\xd0\xbe\xd1\x87\xd0\xb8\xd0\xb9 \xd0\xb4\xd0\xb5\xd0\xbd\xd1\x8c \xd1\x83\xd0\xb6\xd0\xb5 \xd0\xb7\xd0\xb0\xd0\xba\xd0\xbe\xd0\xbd\xd1\x87\xd0\xb8\xd0\xbb\xd1\x81\xd1\x8f. \xd0\x9f\xd0\xbe\xd0\xb6\xd0\xb0\xd0\xbb\xd1\x83\xd0\xb9\xd1\x81\xd1\x82\xd0\xb0 \xd0\xbe\xd1\x81\xd1\x82\xd0\xb0\xd0\xb2\xd1\x8c\xd1\x82\xd0\xb5 \xd0\x92\xd0\xb0\xd1\x88 \xd0\xbd\xd0\xbe\xd0\xbc\xd0\xb5\xd1\x80 \xd1\x82\xd0\xb5\xd0\xbb\xd0\xb5\xd1\x84\xd0\xbe\xd0\xbd\xd0\xb0 \xd0\xb8 \xd0\xb2\xd1\x8b\xd0\xb1\xd0\xb5\xd1\x80\xd0\xb8\xd1\x82\xd0\xb5 \xd1\x83\xd0\xb4\xd0\xbe\xd0\xb1\xd0\xbd\xd0\xbe\xd0\xb5 \xd0\xb2\xd1\x80\xd0\xb5\xd0\xbc\xd1\x8f \xd0\xb7\xd0\xb2\xd0\xbe\xd0\xbd\xd0\xba\xd0\xb0.", "text_link_order": "\xd0\x92\xd1\x8b\xd0\xb1\xd1\x80\xd0\xb0\xd1\x82\xd1\x8c \xd1\x83\xd0\xb4\xd0\xbe\xd0\xb1\xd0\xbd\xd0\xbe\xd0\xb5 \xd0\xb2\xd1\x80\xd0\xb5\xd0\xbc\xd1\x8f \xd0\xb7\xd0\xb2\xd0\xbe\xd0\xbd\xd0\xba\xd0\xb0", "text_message_sent": "\xd0\xa1\xd0\xbf\xd0\xb0\xd1\x81\xd0\xb8\xd0\xb1\xd0\xbe \xd0\xb7\xd0\xb0 \xd0\xb2\xd0\xb0\xd1\x88\xd0\xb5 \xd1\x81\xd0\xbe\xd0\xbe\xd0\xb1\xd1\x89\xd0\xb5\xd0\xbd\xd0\xb8\xd0\xb5!<br/>\xd0\x9c\xd1\x8b \xd0\xbe\xd0\xb1\xd1\x8f\xd0\xb7\xd0\xb0\xd1\x82\xd0\xb5\xd0\xbb\xd1\x8c\xd0\xbd\xd0\xbe \xd1\x81\xd0\xb2\xd1\x8f\xd0\xb6\xd0\xb5\xd0\xbc\xd1\x81\xd1\x8f \xd1\x81 \xd0\xb2\xd0\xb0\xd0\xbc\xd0\xb8 \xd0\xb2 \xd0\xb1\xd0\xbb\xd0\xb8\xd0\xb6\xd0\xb0\xd0\xb9\xd1\x88\xd0\xb5\xd0\xb5 \xd0\xb2\xd1\x80\xd0\xb5\xd0\xbc\xd1\x8f.", "param_content_border_radius": 15, "text_dial_connected": "\xd0\x9e\xd0\xb6\xd0\xb8\xd0\xb4\xd0\xb0\xd0\xb9\xd1\x82\xd0\xb5 \xd0\xb7\xd0\xb2\xd0\xbe\xd0\xbd\xd0\xba\xd0\xb0!<br/>\xd0\x98\xd0\xb4\xd0\xb5\xd1\x82 \xd0\xbd\xd0\xb0\xd0\xb1\xd0\xbe\xd1\x80 \xd0\xbd\xd0\xbe\xd0\xbc\xd0\xb5\xd1\x80\xd0\xb0 \xd0\xbe\xd0\xbf\xd0\xb5\xd1\x80\xd0\xb0\xd1\x82\xd0\xbe\xd1\x80\xd0\xb0...", "text_send_message_done": "\xd0\xa1\xd0\xbf\xd0\xb0\xd1\x81\xd0\xb8\xd0\xb1\xd0\xbe \xd0\xb7\xd0\xb0 \xd0\x92\xd0\xb0\xd1\x88\xd0\xb5 \xd1\x81\xd0\xbe\xd0\xbe\xd0\xb1\xd1\x89\xd0\xb5\xd0\xbd\xd0\xb8\xd0\xb5!<br/>\xd0\x9c\xd1\x8b \xd0\xbe\xd0\xb1\xd1\x8f\xd0\xb7\xd0\xb0\xd1\x82\xd0\xb5\xd0\xbb\xd1\x8c\xd0\xbd\xd0\xbe \xd1\x81\xd0\xb2\xd1\x8f\xd0\xb6\xd0\xb5\xd0\xbc\xd1\x81\xd1\x8f \xd1\x81 \xd0\x92\xd0\xb0\xd0\xbc\xd0\xb8 \xd0\xb2 \xd0\xb1\xd0\xbb\xd0\xb8\xd0\xb6\xd0\xb0\xd0\xb9\xd1\x88\xd0\xb5\xd0\xb5 \xd0\xb2\xd1\x80\xd0\xb5\xd0\xbc\xd1\x8f.", "text_link_message_go_back": "\xd0\x97\xd0\xb0\xd0\xba\xd0\xb0\xd0\xb7\xd0\xb0\xd1\x82\xd1\x8c \xd0\xb7\xd0\xb2\xd0\xbe\xd0\xbd\xd0\xbe\xd0\xba", "color_font_scondary_global": "#CCDCDC", "param_total_max_width": 290, "text_timeoff_done": "\xd0\xa1\xd0\xbf\xd0\xb0\xd1\x81\xd0\xb8\xd0\xb1\xd0\xbe!<br/>\xd0\x9c\xd1\x8b \xd0\xbe\xd0\xb1\xd1\x8f\xd0\xb7\xd0\xb0\xd1\x82\xd0\xb5\xd0\xbb\xd1\x8c\xd0\xbd\xd0\xbe \xd0\xbf\xd0\xb5\xd1\x80\xd0\xb5\xd0\xb7\xd0\xb2\xd0\xbe\xd0\xbd\xd0\xb8\xd0\xbc \xd0\x92\xd0\xb0\xd0\xbc \xd0\xb2 \xd1\x83\xd0\xba\xd0\xb0\xd0\xb7\xd0\xb0\xd0\xbd\xd0\xbd\xd0\xbe\xd0\xb5 \xd0\xb2\xd1\x80\xd0\xb5\xd0\xbc\xd1\x8f.", "color_opacity_inputs": "1", "text_dial_late": "\xd0\x9e\xd0\xbf\xd0\xb5\xd1\x80\xd0\xb0\xd1\x82\xd0\xbe\xd1\x80 \xd0\xbd\xd0\xb5 \xd1\x83\xd1\x81\xd0\xbf\xd0\xb5\xd0\xbb \xd0\xbf\xd0\xbe\xd0\xb4\xd0\xbd\xd1\x8f\xd1\x82\xd1\x8c \xd1\x82\xd1\x80\xd1\x83\xd0\xb1\xd0\xba\xd1\x83.", "param_button_width": 230, "countdown_from": 15, "text_order_failed": "\xd0\x98\xd0\xb7\xd0\xb2\xd0\xb8\xd0\xbd\xd0\xb8\xd1\x82\xd0\xb5, \xd1\x81\xd0\xb5\xd1\x80\xd0\xb2\xd0\xb8\xd1\x81 \xd0\xb2 \xd0\xb4\xd0\xb0\xd0\xbd\xd0\xbd\xd0\xbe\xd0\xb9 \xd0\xbc\xd0\xbe\xd0\xbc\xd0\xb5\xd0\xbd\xd1\x82 \xd0\xbd\xd0\xb5 \xd0\xb4\xd0\xbe\xd1\x81\xd1\x82\xd1\x83\xd0\xbf\xd0\xb5\xd0\xbd.<br/>\xd0\x9f\xd1\x80\xd0\xbe\xd1\x81\xd0\xb8\xd0\xbc \xd0\xbf\xd1\x80\xd0\xbe\xd1\x89\xd0\xb5\xd0\xbd\xd0\xb8\xd0\xb5 \xd0\xb7\xd0\xb0 \xd0\xb4\xd0\xbe\xd1\x81\xd1\x82\xd0\xb0\xd0\xb2\xd0\xbb\xd0\xb5\xd0\xbd\xd0\xbd\xd1\x8b\xd0\xb5 \xd0\xbd\xd0\xb5\xd1\x83\xd0\xb4\xd0\xbe\xd0\xb1\xd1\x81\xd1\x82\xd0\xb2\xd0\xb0.", "color_background_image_global": "", "flag_button_text_animated": false, "text_link_send_message": "\xd0\x9d\xd0\xb0\xd0\xbf\xd0\xb8\xd1\x81\xd0\xb0\xd1\x82\xd1\x8c \xd1\x81\xd0\xbe\xd0\xbe\xd0\xb1\xd1\x89\xd0\xb5\xd0\xbd\xd0\xb8\xd0\xb5", "param_root_position_bottom": "10px", "flag_phone_field": true, "font_family1": "", "param_timeoff_sent_height": 170, "color_font_global": "#fff", "param_message_sent_height": 170, "text_message_failed": "\xd0\x98\xd0\xb7\xd0\xb2\xd0\xb8\xd0\xbd\xd0\xb8\xd1\x82\xd0\xb5, \xd1\x81\xd0\xb5\xd1\x80\xd0\xb2\xd0\xb8\xd1\x81 \xd0\xb2 \xd0\xb4\xd0\xb0\xd0\xbd\xd0\xbd\xd0\xbe\xd0\xb9 \xd0\xbc\xd0\xbe\xd0\xbc\xd0\xb5\xd0\xbd\xd1\x82 \xd0\xbd\xd0\xb5 \xd0\xb4\xd0\xbe\xd1\x81\xd1\x82\xd1\x83\xd0\xbf\xd0\xb5\xd0\xbd.<br/>\xd0\x9f\xd1\x80\xd0\xbe\xd1\x81\xd0\xb8\xd0\xbc \xd0\xbf\xd1\x80\xd0\xbe\xd1\x89\xd0\xb5\xd0\xbd\xd0\xb8\xd0\xb5 \xd0\xb7\xd0\xb0 \xd0\xb4\xd0\xbe\xd1\x81\xd1\x82\xd0\xb0\xd0\xb2\xd0\xbb\xd0\xb5\xd0\xbd\xd0\xbd\xd1\x8b\xd0\xb5 \xd0\xbd\xd0\xb5\xd1\x83\xd0\xb4\xd0\xbe\xd0\xb1\xd1\x81\xd1\x82\xd0\xb2\xd0\xb0.", "text_message_start": "\xd0\x9f\xd0\xbe\xd1\x81\xd1\x82\xd0\xb0\xd1\x80\xd0\xb0\xd0\xb5\xd0\xbc\xd1\x81\xd1\x8f \xd0\xbe\xd1\x82\xd0\xb2\xd0\xb5\xd1\x82\xd0\xb8\xd1\x82\xd1\x8c \xd0\xbd\xd0\xb0 \xd0\x92\xd0\xb0\xd1\x88 \xd0\xb2\xd0\xbe\xd0\xbf\xd1\x80\xd0\xbe\xd1\x81 \xd0\xba\xd0\xb0\xd0\xba \xd0\xbc\xd0\xbe\xd0\xb6\xd0\xbd\xd0\xbe \xd1\x81\xd0\xba\xd0\xbe\xd1\x80\xd0\xb5\xd0\xb5", "text_dial_success": "\xd0\xa1\xd0\xbe\xd0\xb5\xd0\xb4\xd0\xb8\xd0\xbd\xd0\xb5\xd0\xbd\xd0\xb8\xd0\xb5 \xd1\x83\xd1\x81\xd1\x82\xd0\xb0\xd0\xbd\xd0\xbe\xd0\xb2\xd0\xbb\xd0\xb5\xd0\xbd\xd0\xbe!<br/>\xd0\x92\xd0\xbe\xd0\xb7\xd1\x8c\xd0\xbc\xd0\xb8\xd1\x82\xd0\xb5 \xd1\x82\xd1\x80\xd1\x83\xd0\xb1\xd0\xba\xd1\x83.", "param_main_height": 340, "cookie_ttl_seconds": 3600, "color_font_link": "#BCBCAC", "param_main_button_border_radius": 19, "flag_name_field_obligatory": false, "color_background_global": "#4e5a5d", "font_global_index": 0, "text_order_start": "\xd0\x92\xd1\x8b\xd0\xb1\xd0\xb5\xd1\x80\xd0\xb8\xd1\x82\xd0\xb5 \xd1\x83\xd0\xb4\xd0\xbe\xd0\xb1\xd0\xbd\xd0\xbe\xd0\xb5 \xd0\xb2\xd1\x80\xd0\xb5\xd0\xbc\xd1\x8f \xd0\xb7\xd0\xb2\xd0\xbe\xd0\xbd\xd0\xba\xd0\xb0", "color_opacity_main_button": ".9", "flag_email_field_obligatory": true, "text_dial_dropped": "\xd0\x9f\xd0\xbe\xd1\x85\xd0\xbe\xd0\xb6\xd0\xb5 \xd1\x87\xd1\x82\xd0\xbe \xd0\xb2\xd1\x8b \xd0\xbd\xd0\xb5 \xd0\xbf\xd0\xbe\xd0\xb4\xd0\xbd\xd1\x8f\xd0\xbb\xd0\xb8 \xd1\x82\xd1\x80\xd1\x83\xd0\xb1\xd0\xba\xd1\x83 \xd0\xb8\xd0\xbb\xd0\xb8 \xd1\x81\xd0\xb1\xd1\x80\xd0\xbe\xd1\x81\xd0\xb8\xd0\xbb\xd0\xb8 \xd0\xb2\xd1\x8b\xd0\xb7\xd0\xbe\xd0\xb2.", "param_dial_height": 270, "param_order_sent_height": 210, "param_total_max_height": 448, "text_dial_out_of_balance": "\xd0\x98\xd0\xb7\xd0\xb2\xd0\xb8\xd0\xbd\xd0\xb8\xd1\x82\xd0\xb5, \xd1\x81\xd0\xb5\xd1\x80\xd0\xb2\xd0\xb8\xd1\x81 \xd0\xb2 \xd0\xb4\xd0\xb0\xd0\xbd\xd0\xbd\xd0\xbe\xd0\xb9 \xd0\xbc\xd0\xbe\xd0\xbc\xd0\xb5\xd0\xbd\xd1\x82 \xd0\xbd\xd0\xb5 \xd0\xb4\xd0\xbe\xd1\x81\xd1\x82\xd1\x83\xd0\xbf\xd0\xb5\xd0\xbd.<br/>\xd0\x9f\xd1\x80\xd0\xbe\xd1\x81\xd0\xb8\xd0\xbc \xd0\xbf\xd1\x80\xd0\xbe\xd1\x89\xd0\xb5\xd0\xbd\xd0\xb8\xd0\xb5 \xd0\xb7\xd0\xb0 \xd0\xb4\xd0\xbe\xd1\x81\xd1\x82\xd0\xb0\xd0\xb2\xd0\xbb\xd0\xb5\xd0\xbd\xd0\xbd\xd1\x8b\xd0\xb5 \xd0\xbd\xd0\xb5\xd1\x83\xd0\xb4\xd0\xbe\xd0\xb1\xd1\x81\xd1\x82\xd0\xb2\xd0\xb0.", "text_link_go_back": "\xd0\x9d\xd0\xb0\xd0\xb7\xd0\xb0\xd0\xb4", "param_order_height": 320, "param_manager_panel_height": 50, "text_main": "<span style=\\"color: #FAD468;\\">\xd0\x97\xd0\xb4\xd1\x80\xd0\xb0\xd0\xb2\xd1\x81\xd1\x82\xd0\xb2\xd1\x83\xd0\xb9\xd1\x82\xd0\xb5!</span><br/>\xd0\x9f\xd0\xbe\xd0\xbb\xd1\x83\xd1\x87\xd0\xb8\xd1\x82\xd1\x8c 25% \xd1\x81\xd0\xba\xd0\xb8\xd0\xb4\xd0\xba\xd1\x83 \xd0\xbd\xd0\xb0 \xd0\xbb\xd1\x8e\xd0\xb1\xd0\xbe\xd0\xb9 \xd1\x82\xd0\xbe\xd0\xb2\xd0\xb0\xd1\x80 \xd0\xbd\xd0\xb0 \xd0\xbd\xd0\xb0\xd1\x88\xd0\xb5\xd0\xbc \xd1\x81\xd0\xb0\xd0\xb9\xd1\x82\xd0\xb5 \xd0\xbe\xd1\x87\xd0\xb5\xd0\xbd\xd1\x8c \xd0\xbb\xd0\xb5\xd0\xb3\xd0\xba\xd0\xbe!<br/>\xd0\x9f\xd1\x80\xd0\xbe\xd1\x81\xd1\x82\xd0\xbe \xd0\xb7\xd0\xb0\xd0\xba\xd0\xb0\xd0\xb6\xd0\xb8\xd1\x82\xd0\xb5 \xd0\xbe\xd0\xb1\xd1\x80\xd0\xb0\xd1\x82\xd0\xbd\xd1\x8b\xd0\xb9 \xd0\xb7\xd0\xb2\xd0\xbe\xd0\xbd\xd0\xbe\xd0\xba \xd0\xbf\xd1\x80\xd1\x8f\xd0\xbc\xd0\xbe \xd1\x81\xd0\xb5\xd0\xb9\xd1\x87\xd0\xb0\xd1\x81.", "flag_button_visible": true, "flag_phone_field_obligatory": true, "text_dial_finished": "\xd0\xa1\xd0\xbe\xd0\xb5\xd0\xb4\xd0\xb8\xd0\xbd\xd0\xb5\xd0\xbd\xd0\xb8\xd0\xb5 \xd0\xb7\xd0\xb0\xd0\xb2\xd0\xb5\xd1\x80\xd1\x88\xd0\xb5\xd0\xbd\xd0\xbe.", "text_free_done": "\xd0\xa1\xd0\xbf\xd0\xb0\xd1\x81\xd0\xb8\xd0\xb1\xd0\xbe!<br/>\xd0\x9c\xd1\x8b \xd0\xbe\xd0\xb1\xd1\x8f\xd0\xb7\xd0\xb0\xd1\x82\xd0\xb5\xd0\xbb\xd1\x8c\xd0\xbd\xd0\xbe \xd0\xbf\xd0\xb5\xd1\x80\xd0\xb5\xd0\xb7\xd0\xb2\xd0\xbe\xd0\xbd\xd0\xb8\xd0\xbc \xd0\x92\xd0\xb0\xd0\xbc \xd0\xb2 \xd1\x83\xd0\xba\xd0\xb0\xd0\xb7\xd0\xb0\xd0\xbd\xd0\xbd\xd0\xbe\xd0\xb5 \xd0\xb2\xd1\x80\xd0\xb5\xd0\xbc\xd1\x8f.", "flag_email_field": false, "flag_name_field": false, "param_free_sent_height": 170, "color_background_inputs": "#ccc", "text_free_start": "<span style=\\"color: #FAD468;\\">\xd0\x97\xd0\xb4\xd1\x80\xd0\xb0\xd0\xb2\xd1\x81\xd1\x82\xd0\xb2\xd1\x83\xd0\xb9\xd1\x82\xd0\xb5!</span><br/>\xd0\x9e\xd1\x81\xd1\x82\xd0\xb0\xd0\xb2\xd1\x8c\xd1\x82\xd0\xb5 \xd0\x92\xd0\xb0\xd1\x88 \xd0\xbd\xd0\xbe\xd0\xbc\xd0\xb5\xd1\x80 \xd1\x82\xd0\xb5\xd0\xbb\xd0\xb5\xd1\x84\xd0\xbe\xd0\xbd\xd0\xb0 \xd0\xb8 \xd0\xb2\xd1\x8b\xd0\xb1\xd0\xb5\xd1\x80\xd0\xb8\xd1\x82color_background_image_global\xd0\xb5 \xd1\x83\xd0\xb4\xd0\xbe\xd0\xb1\xd0\xbd\xd0\xbe\xd0\xb5 \xd0\xb2\xd1\x80\xd0\xb5\xd0\xbc\xd1\x8f \xd0\xb7\xd0\xb2\xd0\xbe\xd0\xbd\xd0\xba\xd0\xb0. \xd0\x9c\xd1\x8b \xd0\xbf\xd0\xb5\xd1\x80\xd0\xb5\xd0\xb7\xd0\xb2\xd0\xbe\xd0\xbd\xd0\xb8\xd0\xbc \xd0\xb8 \xd0\xbf\xd1\x80\xd0\xbe\xd0\xba\xd0\xbe\xd0\xbd\xd1\x81\xd1\x83\xd0\xbb\xd1\x8c\xd1\x82\xd0\xb8\xd1\x80\xd1\x83\xd0\xb5\xd0\xbc \xd0\xbf\xd0\xbe \xd0\xb2\xd1\x81\xd0\xb5\xd0\xbc \xd0\xb2\xd0\xbe\xd0\xbf\xd1\x80\xd0\xbe\xd1\x81\xd0\xb0\xd0\xbc.", "param_z_index_global": 9999, "text_dial_failed": "\xd0\x98\xd0\xb7\xd0\xb2\xd0\xb8\xd0\xbd\xd0\xb8\xd1\x82\xd0\xb5, \xd1\x81\xd0\xb5\xd1\x80\xd0\xb2\xd0\xb8\xd1\x81 \xd0\xb2 \xd0\xb4\xd0\xb0\xd0\xbd\xd0\xbd\xd0\xbe\xd0\xb9 \xd0\xbc\xd0\xbe\xd0\xbc\xd0\xb5\xd0\xbd\xd1\x82 \xd0\xbd\xd0\xb5 \xd0\xb4\xd0\xbe\xd1\x81\xd1\x82\xd1\x83\xd0\xbf\xd0\xb5\xd0\xbd.<br/>\xd0\x9f\xd1\x80\xd0\xbe\xd1\x81\xd0\xb8\xd0\xbc \xd0\xbf\xd1\x80\xd0\xbe\xd1\x89\xd0\xb5\xd0\xbd\xd0\xb8\xd0\xb5 \xd0\xb7\xd0\xb0 \xd0\xb4\xd0\xbe\xd1\x81\xd1\x82\xd0\xb0\xd0\xb2\xd0\xbb\xd0\xb5\xd0\xbd\xd0\xbd\xd1\x8b\xd0\xb5 \xd0\xbd\xd0\xb5\xd1\x83\xd0\xb4\xd0\xbe\xd0\xb1\xd1\x81\xd1\x82\xd0\xb2\xd0\xb0.", "param_free_height": 300, "param_root_position_left": "initial", "param_font1": "Arial", "text_dial_calling": "\xd0\x9e\xd0\xb6\xd0\xb8\xd0\xb4\xd0\xb0\xd0\xb9\xd1\x82\xd0\xb5 \xd0\xb7\xd0\xb2\xd0\xbe\xd0\xbd\xd0\xba\xd0\xb0!<br/>\xd0\x9e\xd0\xbf\xd0\xb5\xd1\x80\xd0\xb0\xd1\x82\xd0\xbe\xd1\x80 \xd0\xbd\xd0\xb0 \xd1\x81\xd0\xb2\xd1\x8f\xd0\xb7\xd0\xb8, \xd0\xbf\xd1\x80\xd0\xbe\xd0\xb8\xd0\xb7\xd0\xb2\xd0\xbe\xd0\xb4\xd0\xb8\xd1\x82\xd1\x81\xd1\x8f \xd0\xbd\xd0\xb0\xd0\xb1\xd0\xbe\xd1\x80 \xd0\xb2\xd0\xb2\xd0\xb5\xd0\xb4\xd0\xb5\xd0\xbd\xd0\xbd\xd0\xbe\xd0\xb3\xd0\xbe \xd0\xbd\xd0\xbe\xd0\xbc\xd0\xb5\xd1\x80\xd0\xb0 \xd1\x82\xd0\xb5\xd0\xbb\xd0\xb5\xd1\x84\xd0\xbe\xd0\xbd\xd0\xb0.", "text_dial_refused": "\xd0\x9e\xd0\xbf\xd0\xb5\xd1\x80\xd0\xb0\xd1\x82\xd0\xbe\xd1\x80 \xd0\xbd\xd0\xb5 \xd0\xb4\xd0\xbe\xd1\x81\xd1\x82\xd1\x83\xd0\xbf\xd0\xb5\xd0\xbd, \xd0\xb8\xd0\xbb\xd0\xb8 \xd0\xb7\xd0\xb0\xd0\xbd\xd1\x8f\xd1\x82 \xd0\xb2 \xd0\xb4\xd0\xb0\xd0\xbd\xd0\xbd\xd1\x8b\xd0\xb9 \xd0\xbc\xd0\xbe\xd0\xbc\xd0\xb5\xd0\xbd\xd1\x82.<br/>\xd0\x9f\xd0\xbe\xd0\xbf\xd1\x80\xd0\xbe\xd0\xb1\xd1\x83\xd0\xb9\xd1\x82\xd0\xb5 \xd1\x87\xd1\x83\xd1\x82\xd1\x8c \xd0\xbf\xd0\xbe\xd0\xb7\xd0\xb6\xd0\xb5.", "param_font_size_inputs": "14px", "text_dial_start": "\xd0\x9e\xd0\xb6\xd0\xb8\xd0\xb4\xd0\xb0\xd0\xb9\xd1\x82\xd0\xb5 \xd0\xb7\xd0\xb2\xd0\xbe\xd0\xbd\xd0\xba\xd0\xb0!<br/>\xd0\x9f\xd1\x80\xd0\xbe\xd0\xb8\xd0\xb7\xd0\xb2\xd0\xbe\xd0\xb4\xd0\xb8\xd1\x82\xd1\x81\xd1\x8f \xd1\x81\xd0\xbe\xd0\xb5\xd0\xb4\xd0\xb8\xd0\xbd\xd0\xb5\xd0\xbd\xd0\xb8\xd0\xb5 \xd1\x81 \xd1\x81\xd0\xb5\xd1\x80\xd0\xb2\xd0\xb5\xd1\x80\xd0\xbe\xd0\xbc...", "color_opacity_call_panel": "1", "fonts": [{"URL": "http://.../", "Name": "Arial", "Family": "Arial"}, {"URL": "http://.../", "Name": "Times New Roman", "Family": "TNR"}], "param_timeoff_height": 280, "color_opacity_call_button": "1", "text_button": "\xd0\xbe\xd0\xb1\xd1\x80\xd0\xb0\xd1\x82\xd0\xbd\xd1\x8b\xd0\xb9 \xd0\xb7\xd0\xb2\xd0\xbe\xd0\xbd\xd0\xbe\xd0\xba", "controllers": {"hash_checker": {"keyword": "callfeed"}, "delayed_popup": {"delay": 7000}}, "font_global_size": "12px", "param_message_height": 400, "flag_is_operator_shown_in_widget": true, "color_opacity_global": ".9", "text_dial_ready": "\xd0\x9f\xd0\xbe\xd0\xb4\xd0\xb3\xd0\xbe\xd1\x82\xd0\xbe\xd0\xb2\xd0\xba\xd0\xb0 \xd1\x81\xd0\xbe\xd0\xb5\xd0\xb4\xd0\xb8\xd0\xbd\xd0\xb5\xd0\xbd\xd0\xb8\xd1\x8f...", "param_root_position_right": "20px", "position": "fixed", "param_button_height": 38, "text_order_done": "\xd0\xa1\xd0\xbf\xd0\xb0\xd1\x81\xd0\xb8\xd0\xb1\xd0\xbe!<br/>\xd0\x9c\xd1\x8b \xd0\xbe\xd0\xb1\xd1\x8f\xd0\xb7\xd0\xb0\xd1\x82\xd0\xb5\xd0\xbb\xd1\x8c\xd0\xbd\xd0\xbe \xd0\xbf\xd0\xb5\xd1\x80\xd0\xb5\xd0\xb7\xd0\xb2\xd0\xbe\xd0\xbd\xd0\xb8\xd0\xbc \xd0\x92\xd0\xb0\xd0\xbc \xd0\xb2 \xd1\x83\xd0\xba\xd0\xb0\xd0\xb7\xd0\xb0\xd0\xbd\xd0\xbd\xd0\xbe\xd0\xb5 \xd0\xb2\xd1\x80\xd0\xb5\xd0\xbc\xd1\x8f.", "flag_disable_on_mobiles": false, "font_url1": ""}', max_length=5000),
preserve_default=True,
),
]
| 561.518519
| 14,136
| 0.724161
| 3,301
| 15,161
| 3.263254
| 0.063617
| 0.088563
| 0.089398
| 0.040104
| 0.806814
| 0.72837
| 0.685852
| 0.642406
| 0.622354
| 0.604066
| 0
| 0.196449
| 0.037728
| 15,161
| 26
| 14,137
| 583.115385
| 0.541915
| 0.001385
| 0
| 0.4
| 0
| 0.15
| 0.958581
| 0.824151
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.1
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7316cd559cb5600fd07a0f0339d9a62e16084570
| 112,885
|
py
|
Python
|
napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 64
|
2016-10-20T15:47:18.000Z
|
2021-11-11T11:57:32.000Z
|
napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 126
|
2016-10-05T10:36:14.000Z
|
2019-05-15T08:43:23.000Z
|
napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 63
|
2016-11-07T15:23:08.000Z
|
2021-09-22T14:41:16.000Z
|
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/system-level-counters/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: The container defines a list of system counters for the IS.
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__corrupted_lsps",
"__database_overloads",
"__manual_address_drop_from_areas",
"__exceed_max_seq_nums",
"__seq_num_skips",
"__own_lsp_purges",
"__id_len_mismatch",
"__part_changes",
"__max_area_address_mismatches",
"__auth_fails",
"__spf_runs",
"__auth_type_fails",
"__lsp_errors",
)
_yang_name = "state"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__corrupted_lsps = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="corrupted-lsps",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
self.__database_overloads = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="database-overloads",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
self.__manual_address_drop_from_areas = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="manual-address-drop-from-areas",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
self.__exceed_max_seq_nums = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="exceed-max-seq-nums",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
self.__seq_num_skips = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="seq-num-skips",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
self.__own_lsp_purges = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="own-lsp-purges",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
self.__id_len_mismatch = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="id-len-mismatch",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
self.__part_changes = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="part-changes",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
self.__max_area_address_mismatches = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="max-area-address-mismatches",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
self.__auth_fails = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="auth-fails",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
self.__spf_runs = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="spf-runs",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
self.__auth_type_fails = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="auth-type-fails",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
self.__lsp_errors = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="lsp-errors",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"system-level-counters",
"state",
]
def _get_corrupted_lsps(self):
"""
Getter method for corrupted_lsps, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/corrupted_lsps (yang:counter32)
YANG Description: Number of corrupted in-memory LSPs detected. LSPs received from the
wire with a bad checksum are silently dropped and not counted. LSPs
received from the wire with parse errors are counted by lsp-errors. MIB
Entry: SysCorrLSPs.
"""
return self.__corrupted_lsps
def _set_corrupted_lsps(self, v, load=False):
"""
Setter method for corrupted_lsps, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/corrupted_lsps (yang:counter32)
If this variable is read-only (config: false) in the
source YANG file, then _set_corrupted_lsps is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_corrupted_lsps() directly.
YANG Description: Number of corrupted in-memory LSPs detected. LSPs received from the
wire with a bad checksum are silently dropped and not counted. LSPs
received from the wire with parse errors are counted by lsp-errors. MIB
Entry: SysCorrLSPs.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="corrupted-lsps",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """corrupted_lsps must be of a type compatible with yang:counter32""",
"defined-type": "yang:counter32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="corrupted-lsps", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='yang:counter32', is_config=False)""",
}
)
self.__corrupted_lsps = t
if hasattr(self, "_set"):
self._set()
def _unset_corrupted_lsps(self):
self.__corrupted_lsps = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="corrupted-lsps",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
def _get_database_overloads(self):
"""
Getter method for database_overloads, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/database_overloads (yang:counter32)
YANG Description: Number of times the database has become
overloaded.
MIB entry: SysLSPL(Level)DbaseOloads.
"""
return self.__database_overloads
def _set_database_overloads(self, v, load=False):
"""
Setter method for database_overloads, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/database_overloads (yang:counter32)
If this variable is read-only (config: false) in the
source YANG file, then _set_database_overloads is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_database_overloads() directly.
YANG Description: Number of times the database has become
overloaded.
MIB entry: SysLSPL(Level)DbaseOloads.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="database-overloads",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """database_overloads must be of a type compatible with yang:counter32""",
"defined-type": "yang:counter32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="database-overloads", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='yang:counter32', is_config=False)""",
}
)
self.__database_overloads = t
if hasattr(self, "_set"):
self._set()
def _unset_database_overloads(self):
self.__database_overloads = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="database-overloads",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
def _get_manual_address_drop_from_areas(self):
"""
Getter method for manual_address_drop_from_areas, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/manual_address_drop_from_areas (yang:counter32)
YANG Description: Number of times a manual address has been dropped from area.
MIB Entry: SysManAddrDropFromAreas.
"""
return self.__manual_address_drop_from_areas
def _set_manual_address_drop_from_areas(self, v, load=False):
"""
Setter method for manual_address_drop_from_areas, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/manual_address_drop_from_areas (yang:counter32)
If this variable is read-only (config: false) in the
source YANG file, then _set_manual_address_drop_from_areas is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_manual_address_drop_from_areas() directly.
YANG Description: Number of times a manual address has been dropped from area.
MIB Entry: SysManAddrDropFromAreas.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="manual-address-drop-from-areas",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """manual_address_drop_from_areas must be of a type compatible with yang:counter32""",
"defined-type": "yang:counter32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="manual-address-drop-from-areas", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='yang:counter32', is_config=False)""",
}
)
self.__manual_address_drop_from_areas = t
if hasattr(self, "_set"):
self._set()
def _unset_manual_address_drop_from_areas(self):
self.__manual_address_drop_from_areas = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="manual-address-drop-from-areas",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
def _get_exceed_max_seq_nums(self):
"""
Getter method for exceed_max_seq_nums, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/exceed_max_seq_nums (yang:counter32)
YANG Description: The number of times the system has attempted to exceed the maximum
sequence number. MIB Entry: SysAttmptToExMaxSeqNums.
"""
return self.__exceed_max_seq_nums
def _set_exceed_max_seq_nums(self, v, load=False):
"""
Setter method for exceed_max_seq_nums, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/exceed_max_seq_nums (yang:counter32)
If this variable is read-only (config: false) in the
source YANG file, then _set_exceed_max_seq_nums is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_exceed_max_seq_nums() directly.
YANG Description: The number of times the system has attempted to exceed the maximum
sequence number. MIB Entry: SysAttmptToExMaxSeqNums.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="exceed-max-seq-nums",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """exceed_max_seq_nums must be of a type compatible with yang:counter32""",
"defined-type": "yang:counter32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="exceed-max-seq-nums", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='yang:counter32', is_config=False)""",
}
)
self.__exceed_max_seq_nums = t
if hasattr(self, "_set"):
self._set()
def _unset_exceed_max_seq_nums(self):
self.__exceed_max_seq_nums = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="exceed-max-seq-nums",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
def _get_seq_num_skips(self):
"""
Getter method for seq_num_skips, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/seq_num_skips (yang:counter32)
YANG Description: Number of times a sequence number skip has occurred. MIB Entry:
SysSeqNumSkips.
"""
return self.__seq_num_skips
def _set_seq_num_skips(self, v, load=False):
"""
Setter method for seq_num_skips, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/seq_num_skips (yang:counter32)
If this variable is read-only (config: false) in the
source YANG file, then _set_seq_num_skips is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_seq_num_skips() directly.
YANG Description: Number of times a sequence number skip has occurred. MIB Entry:
SysSeqNumSkips.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="seq-num-skips",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """seq_num_skips must be of a type compatible with yang:counter32""",
"defined-type": "yang:counter32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="seq-num-skips", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='yang:counter32', is_config=False)""",
}
)
self.__seq_num_skips = t
if hasattr(self, "_set"):
self._set()
def _unset_seq_num_skips(self):
self.__seq_num_skips = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="seq-num-skips",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
def _get_own_lsp_purges(self):
"""
Getter method for own_lsp_purges, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/own_lsp_purges (yang:counter32)
YANG Description: Number of times a zero-aged copy of the system's
own LSP is received from some other node.
MIB Entry: isisSysOwnLSPPurges.
"""
return self.__own_lsp_purges
def _set_own_lsp_purges(self, v, load=False):
"""
Setter method for own_lsp_purges, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/own_lsp_purges (yang:counter32)
If this variable is read-only (config: false) in the
source YANG file, then _set_own_lsp_purges is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_own_lsp_purges() directly.
YANG Description: Number of times a zero-aged copy of the system's
own LSP is received from some other node.
MIB Entry: isisSysOwnLSPPurges.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="own-lsp-purges",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """own_lsp_purges must be of a type compatible with yang:counter32""",
"defined-type": "yang:counter32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="own-lsp-purges", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='yang:counter32', is_config=False)""",
}
)
self.__own_lsp_purges = t
if hasattr(self, "_set"):
self._set()
def _unset_own_lsp_purges(self):
self.__own_lsp_purges = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="own-lsp-purges",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
def _get_id_len_mismatch(self):
"""
Getter method for id_len_mismatch, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/id_len_mismatch (yang:counter32)
YANG Description: Number of times a PDU is received with a different value for ID field
length from that of the receiving system. MIB Entry:
isisSysIDFieldLenMismatches.
"""
return self.__id_len_mismatch
def _set_id_len_mismatch(self, v, load=False):
"""
Setter method for id_len_mismatch, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/id_len_mismatch (yang:counter32)
If this variable is read-only (config: false) in the
source YANG file, then _set_id_len_mismatch is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_id_len_mismatch() directly.
YANG Description: Number of times a PDU is received with a different value for ID field
length from that of the receiving system. MIB Entry:
isisSysIDFieldLenMismatches.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="id-len-mismatch",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """id_len_mismatch must be of a type compatible with yang:counter32""",
"defined-type": "yang:counter32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="id-len-mismatch", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='yang:counter32', is_config=False)""",
}
)
self.__id_len_mismatch = t
if hasattr(self, "_set"):
self._set()
def _unset_id_len_mismatch(self):
self.__id_len_mismatch = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="id-len-mismatch",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
def _get_part_changes(self):
"""
Getter method for part_changes, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/part_changes (yang:counter32)
YANG Description: The number of partition changes detected. MIB Entry: SysPartChanges.
"""
return self.__part_changes
def _set_part_changes(self, v, load=False):
"""
Setter method for part_changes, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/part_changes (yang:counter32)
If this variable is read-only (config: false) in the
source YANG file, then _set_part_changes is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_part_changes() directly.
YANG Description: The number of partition changes detected. MIB Entry: SysPartChanges.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="part-changes",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """part_changes must be of a type compatible with yang:counter32""",
"defined-type": "yang:counter32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="part-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='yang:counter32', is_config=False)""",
}
)
self.__part_changes = t
if hasattr(self, "_set"):
self._set()
def _unset_part_changes(self):
self.__part_changes = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="part-changes",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
def _get_max_area_address_mismatches(self):
"""
Getter method for max_area_address_mismatches, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/max_area_address_mismatches (yang:counter32)
YANG Description: Number of times a PDU is received with a different value for
MaximumAreaAddresses from that of the receiving system. MIB Entry:
SysMaxAreaAddrMismatches.
"""
return self.__max_area_address_mismatches
def _set_max_area_address_mismatches(self, v, load=False):
"""
Setter method for max_area_address_mismatches, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/max_area_address_mismatches (yang:counter32)
If this variable is read-only (config: false) in the
source YANG file, then _set_max_area_address_mismatches is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_max_area_address_mismatches() directly.
YANG Description: Number of times a PDU is received with a different value for
MaximumAreaAddresses from that of the receiving system. MIB Entry:
SysMaxAreaAddrMismatches.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="max-area-address-mismatches",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """max_area_address_mismatches must be of a type compatible with yang:counter32""",
"defined-type": "yang:counter32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="max-area-address-mismatches", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='yang:counter32', is_config=False)""",
}
)
self.__max_area_address_mismatches = t
if hasattr(self, "_set"):
self._set()
def _unset_max_area_address_mismatches(self):
self.__max_area_address_mismatches = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="max-area-address-mismatches",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
def _get_auth_fails(self):
"""
Getter method for auth_fails, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/auth_fails (yang:counter32)
YANG Description: The number of authentication key failures.
MIB Entry: SysAuthFails.
"""
return self.__auth_fails
def _set_auth_fails(self, v, load=False):
"""
Setter method for auth_fails, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/auth_fails (yang:counter32)
If this variable is read-only (config: false) in the
source YANG file, then _set_auth_fails is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_auth_fails() directly.
YANG Description: The number of authentication key failures.
MIB Entry: SysAuthFails.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="auth-fails",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """auth_fails must be of a type compatible with yang:counter32""",
"defined-type": "yang:counter32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="auth-fails", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='yang:counter32', is_config=False)""",
}
)
self.__auth_fails = t
if hasattr(self, "_set"):
self._set()
def _unset_auth_fails(self):
self.__auth_fails = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="auth-fails",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
def _get_spf_runs(self):
"""
Getter method for spf_runs, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/spf_runs (yang:counter32)
YANG Description: The number of times SPF was ran at this level.
"""
return self.__spf_runs
def _set_spf_runs(self, v, load=False):
"""
Setter method for spf_runs, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/spf_runs (yang:counter32)
If this variable is read-only (config: false) in the
source YANG file, then _set_spf_runs is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_spf_runs() directly.
YANG Description: The number of times SPF was ran at this level.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="spf-runs",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """spf_runs must be of a type compatible with yang:counter32""",
"defined-type": "yang:counter32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="spf-runs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='yang:counter32', is_config=False)""",
}
)
self.__spf_runs = t
if hasattr(self, "_set"):
self._set()
def _unset_spf_runs(self):
self.__spf_runs = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="spf-runs",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
def _get_auth_type_fails(self):
"""
Getter method for auth_type_fails, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/auth_type_fails (yang:counter32)
YANG Description: The number of authentication type mismatches.
"""
return self.__auth_type_fails
def _set_auth_type_fails(self, v, load=False):
"""
Setter method for auth_type_fails, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/auth_type_fails (yang:counter32)
If this variable is read-only (config: false) in the
source YANG file, then _set_auth_type_fails is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_auth_type_fails() directly.
YANG Description: The number of authentication type mismatches.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="auth-type-fails",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """auth_type_fails must be of a type compatible with yang:counter32""",
"defined-type": "yang:counter32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="auth-type-fails", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='yang:counter32', is_config=False)""",
}
)
self.__auth_type_fails = t
if hasattr(self, "_set"):
self._set()
def _unset_auth_type_fails(self):
self.__auth_type_fails = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="auth-type-fails",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
def _get_lsp_errors(self):
"""
Getter method for lsp_errors, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/lsp_errors (yang:counter32)
YANG Description: The number of received LSPs with errors.
"""
return self.__lsp_errors
def _set_lsp_errors(self, v, load=False):
"""
Setter method for lsp_errors, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/lsp_errors (yang:counter32)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_errors is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_errors() directly.
YANG Description: The number of received LSPs with errors.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="lsp-errors",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """lsp_errors must be of a type compatible with yang:counter32""",
"defined-type": "yang:counter32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='yang:counter32', is_config=False)""",
}
)
self.__lsp_errors = t
if hasattr(self, "_set"):
self._set()
def _unset_lsp_errors(self):
self.__lsp_errors = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="lsp-errors",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
corrupted_lsps = __builtin__.property(_get_corrupted_lsps)
database_overloads = __builtin__.property(_get_database_overloads)
manual_address_drop_from_areas = __builtin__.property(
_get_manual_address_drop_from_areas
)
exceed_max_seq_nums = __builtin__.property(_get_exceed_max_seq_nums)
seq_num_skips = __builtin__.property(_get_seq_num_skips)
own_lsp_purges = __builtin__.property(_get_own_lsp_purges)
id_len_mismatch = __builtin__.property(_get_id_len_mismatch)
part_changes = __builtin__.property(_get_part_changes)
max_area_address_mismatches = __builtin__.property(_get_max_area_address_mismatches)
auth_fails = __builtin__.property(_get_auth_fails)
spf_runs = __builtin__.property(_get_spf_runs)
auth_type_fails = __builtin__.property(_get_auth_type_fails)
lsp_errors = __builtin__.property(_get_lsp_errors)
_pyangbind_elements = OrderedDict(
[
("corrupted_lsps", corrupted_lsps),
("database_overloads", database_overloads),
("manual_address_drop_from_areas", manual_address_drop_from_areas),
("exceed_max_seq_nums", exceed_max_seq_nums),
("seq_num_skips", seq_num_skips),
("own_lsp_purges", own_lsp_purges),
("id_len_mismatch", id_len_mismatch),
("part_changes", part_changes),
("max_area_address_mismatches", max_area_address_mismatches),
("auth_fails", auth_fails),
("spf_runs", spf_runs),
("auth_type_fails", auth_type_fails),
("lsp_errors", lsp_errors),
]
)
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/system-level-counters/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: The container defines a list of system counters for the IS.
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__corrupted_lsps",
"__database_overloads",
"__manual_address_drop_from_areas",
"__exceed_max_seq_nums",
"__seq_num_skips",
"__own_lsp_purges",
"__id_len_mismatch",
"__part_changes",
"__max_area_address_mismatches",
"__auth_fails",
"__spf_runs",
"__auth_type_fails",
"__lsp_errors",
)
_yang_name = "state"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__corrupted_lsps = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="corrupted-lsps",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
self.__database_overloads = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="database-overloads",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
self.__manual_address_drop_from_areas = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="manual-address-drop-from-areas",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
self.__exceed_max_seq_nums = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="exceed-max-seq-nums",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
self.__seq_num_skips = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="seq-num-skips",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
self.__own_lsp_purges = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="own-lsp-purges",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
self.__id_len_mismatch = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="id-len-mismatch",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
self.__part_changes = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="part-changes",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
self.__max_area_address_mismatches = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="max-area-address-mismatches",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
self.__auth_fails = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="auth-fails",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
self.__spf_runs = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="spf-runs",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
self.__auth_type_fails = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="auth-type-fails",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
self.__lsp_errors = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="lsp-errors",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"system-level-counters",
"state",
]
def _get_corrupted_lsps(self):
"""
Getter method for corrupted_lsps, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/corrupted_lsps (yang:counter32)
YANG Description: Number of corrupted in-memory LSPs detected. LSPs received from the
wire with a bad checksum are silently dropped and not counted. LSPs
received from the wire with parse errors are counted by lsp-errors. MIB
Entry: SysCorrLSPs.
"""
return self.__corrupted_lsps
def _set_corrupted_lsps(self, v, load=False):
"""
Setter method for corrupted_lsps, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/corrupted_lsps (yang:counter32)
If this variable is read-only (config: false) in the
source YANG file, then _set_corrupted_lsps is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_corrupted_lsps() directly.
YANG Description: Number of corrupted in-memory LSPs detected. LSPs received from the
wire with a bad checksum are silently dropped and not counted. LSPs
received from the wire with parse errors are counted by lsp-errors. MIB
Entry: SysCorrLSPs.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="corrupted-lsps",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """corrupted_lsps must be of a type compatible with yang:counter32""",
"defined-type": "yang:counter32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="corrupted-lsps", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='yang:counter32', is_config=False)""",
}
)
self.__corrupted_lsps = t
if hasattr(self, "_set"):
self._set()
def _unset_corrupted_lsps(self):
self.__corrupted_lsps = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="corrupted-lsps",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
def _get_database_overloads(self):
"""
Getter method for database_overloads, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/database_overloads (yang:counter32)
YANG Description: Number of times the database has become
overloaded.
MIB entry: SysLSPL(Level)DbaseOloads.
"""
return self.__database_overloads
def _set_database_overloads(self, v, load=False):
"""
Setter method for database_overloads, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/database_overloads (yang:counter32)
If this variable is read-only (config: false) in the
source YANG file, then _set_database_overloads is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_database_overloads() directly.
YANG Description: Number of times the database has become
overloaded.
MIB entry: SysLSPL(Level)DbaseOloads.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="database-overloads",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """database_overloads must be of a type compatible with yang:counter32""",
"defined-type": "yang:counter32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="database-overloads", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='yang:counter32', is_config=False)""",
}
)
self.__database_overloads = t
if hasattr(self, "_set"):
self._set()
def _unset_database_overloads(self):
self.__database_overloads = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="database-overloads",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
def _get_manual_address_drop_from_areas(self):
"""
Getter method for manual_address_drop_from_areas, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/manual_address_drop_from_areas (yang:counter32)
YANG Description: Number of times a manual address has been dropped from area.
MIB Entry: SysManAddrDropFromAreas.
"""
return self.__manual_address_drop_from_areas
def _set_manual_address_drop_from_areas(self, v, load=False):
"""
Setter method for manual_address_drop_from_areas, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/manual_address_drop_from_areas (yang:counter32)
If this variable is read-only (config: false) in the
source YANG file, then _set_manual_address_drop_from_areas is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_manual_address_drop_from_areas() directly.
YANG Description: Number of times a manual address has been dropped from area.
MIB Entry: SysManAddrDropFromAreas.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="manual-address-drop-from-areas",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """manual_address_drop_from_areas must be of a type compatible with yang:counter32""",
"defined-type": "yang:counter32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="manual-address-drop-from-areas", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='yang:counter32', is_config=False)""",
}
)
self.__manual_address_drop_from_areas = t
if hasattr(self, "_set"):
self._set()
def _unset_manual_address_drop_from_areas(self):
self.__manual_address_drop_from_areas = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="manual-address-drop-from-areas",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
def _get_exceed_max_seq_nums(self):
"""
Getter method for exceed_max_seq_nums, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/exceed_max_seq_nums (yang:counter32)
YANG Description: The number of times the system has attempted to exceed the maximum
sequence number. MIB Entry: SysAttmptToExMaxSeqNums.
"""
return self.__exceed_max_seq_nums
def _set_exceed_max_seq_nums(self, v, load=False):
"""
Setter method for exceed_max_seq_nums, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/exceed_max_seq_nums (yang:counter32)
If this variable is read-only (config: false) in the
source YANG file, then _set_exceed_max_seq_nums is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_exceed_max_seq_nums() directly.
YANG Description: The number of times the system has attempted to exceed the maximum
sequence number. MIB Entry: SysAttmptToExMaxSeqNums.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="exceed-max-seq-nums",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """exceed_max_seq_nums must be of a type compatible with yang:counter32""",
"defined-type": "yang:counter32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="exceed-max-seq-nums", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='yang:counter32', is_config=False)""",
}
)
self.__exceed_max_seq_nums = t
if hasattr(self, "_set"):
self._set()
def _unset_exceed_max_seq_nums(self):
self.__exceed_max_seq_nums = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="exceed-max-seq-nums",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
def _get_seq_num_skips(self):
"""
Getter method for seq_num_skips, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/seq_num_skips (yang:counter32)
YANG Description: Number of times a sequence number skip has occurred. MIB Entry:
SysSeqNumSkips.
"""
return self.__seq_num_skips
def _set_seq_num_skips(self, v, load=False):
"""
Setter method for seq_num_skips, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/seq_num_skips (yang:counter32)
If this variable is read-only (config: false) in the
source YANG file, then _set_seq_num_skips is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_seq_num_skips() directly.
YANG Description: Number of times a sequence number skip has occurred. MIB Entry:
SysSeqNumSkips.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="seq-num-skips",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """seq_num_skips must be of a type compatible with yang:counter32""",
"defined-type": "yang:counter32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="seq-num-skips", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='yang:counter32', is_config=False)""",
}
)
self.__seq_num_skips = t
if hasattr(self, "_set"):
self._set()
def _unset_seq_num_skips(self):
self.__seq_num_skips = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="seq-num-skips",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
def _get_own_lsp_purges(self):
"""
Getter method for own_lsp_purges, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/own_lsp_purges (yang:counter32)
YANG Description: Number of times a zero-aged copy of the system's
own LSP is received from some other node.
MIB Entry: isisSysOwnLSPPurges.
"""
return self.__own_lsp_purges
def _set_own_lsp_purges(self, v, load=False):
"""
Setter method for own_lsp_purges, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/own_lsp_purges (yang:counter32)
If this variable is read-only (config: false) in the
source YANG file, then _set_own_lsp_purges is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_own_lsp_purges() directly.
YANG Description: Number of times a zero-aged copy of the system's
own LSP is received from some other node.
MIB Entry: isisSysOwnLSPPurges.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="own-lsp-purges",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """own_lsp_purges must be of a type compatible with yang:counter32""",
"defined-type": "yang:counter32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="own-lsp-purges", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='yang:counter32', is_config=False)""",
}
)
self.__own_lsp_purges = t
if hasattr(self, "_set"):
self._set()
def _unset_own_lsp_purges(self):
self.__own_lsp_purges = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="own-lsp-purges",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
def _get_id_len_mismatch(self):
"""
Getter method for id_len_mismatch, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/id_len_mismatch (yang:counter32)
YANG Description: Number of times a PDU is received with a different value for ID field
length from that of the receiving system. MIB Entry:
isisSysIDFieldLenMismatches.
"""
return self.__id_len_mismatch
def _set_id_len_mismatch(self, v, load=False):
"""
Setter method for id_len_mismatch, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/id_len_mismatch (yang:counter32)
If this variable is read-only (config: false) in the
source YANG file, then _set_id_len_mismatch is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_id_len_mismatch() directly.
YANG Description: Number of times a PDU is received with a different value for ID field
length from that of the receiving system. MIB Entry:
isisSysIDFieldLenMismatches.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="id-len-mismatch",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """id_len_mismatch must be of a type compatible with yang:counter32""",
"defined-type": "yang:counter32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="id-len-mismatch", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='yang:counter32', is_config=False)""",
}
)
self.__id_len_mismatch = t
if hasattr(self, "_set"):
self._set()
def _unset_id_len_mismatch(self):
self.__id_len_mismatch = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="id-len-mismatch",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
def _get_part_changes(self):
"""
Getter method for part_changes, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/part_changes (yang:counter32)
YANG Description: The number of partition changes detected. MIB Entry: SysPartChanges.
"""
return self.__part_changes
def _set_part_changes(self, v, load=False):
"""
Setter method for part_changes, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/part_changes (yang:counter32)
If this variable is read-only (config: false) in the
source YANG file, then _set_part_changes is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_part_changes() directly.
YANG Description: The number of partition changes detected. MIB Entry: SysPartChanges.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="part-changes",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """part_changes must be of a type compatible with yang:counter32""",
"defined-type": "yang:counter32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="part-changes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='yang:counter32', is_config=False)""",
}
)
self.__part_changes = t
if hasattr(self, "_set"):
self._set()
def _unset_part_changes(self):
self.__part_changes = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="part-changes",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
def _get_max_area_address_mismatches(self):
"""
Getter method for max_area_address_mismatches, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/max_area_address_mismatches (yang:counter32)
YANG Description: Number of times a PDU is received with a different value for
MaximumAreaAddresses from that of the receiving system. MIB Entry:
SysMaxAreaAddrMismatches.
"""
return self.__max_area_address_mismatches
def _set_max_area_address_mismatches(self, v, load=False):
"""
Setter method for max_area_address_mismatches, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/max_area_address_mismatches (yang:counter32)
If this variable is read-only (config: false) in the
source YANG file, then _set_max_area_address_mismatches is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_max_area_address_mismatches() directly.
YANG Description: Number of times a PDU is received with a different value for
MaximumAreaAddresses from that of the receiving system. MIB Entry:
SysMaxAreaAddrMismatches.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="max-area-address-mismatches",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """max_area_address_mismatches must be of a type compatible with yang:counter32""",
"defined-type": "yang:counter32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="max-area-address-mismatches", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='yang:counter32', is_config=False)""",
}
)
self.__max_area_address_mismatches = t
if hasattr(self, "_set"):
self._set()
def _unset_max_area_address_mismatches(self):
self.__max_area_address_mismatches = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="max-area-address-mismatches",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
def _get_auth_fails(self):
"""
Getter method for auth_fails, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/auth_fails (yang:counter32)
YANG Description: The number of authentication key failures.
MIB Entry: SysAuthFails.
"""
return self.__auth_fails
def _set_auth_fails(self, v, load=False):
"""
Setter method for auth_fails, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/auth_fails (yang:counter32)
If this variable is read-only (config: false) in the
source YANG file, then _set_auth_fails is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_auth_fails() directly.
YANG Description: The number of authentication key failures.
MIB Entry: SysAuthFails.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="auth-fails",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """auth_fails must be of a type compatible with yang:counter32""",
"defined-type": "yang:counter32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="auth-fails", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='yang:counter32', is_config=False)""",
}
)
self.__auth_fails = t
if hasattr(self, "_set"):
self._set()
def _unset_auth_fails(self):
self.__auth_fails = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="auth-fails",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
def _get_spf_runs(self):
"""
Getter method for spf_runs, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/spf_runs (yang:counter32)
YANG Description: The number of times SPF was ran at this level.
"""
return self.__spf_runs
def _set_spf_runs(self, v, load=False):
"""
Setter method for spf_runs, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/spf_runs (yang:counter32)
If this variable is read-only (config: false) in the
source YANG file, then _set_spf_runs is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_spf_runs() directly.
YANG Description: The number of times SPF was ran at this level.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="spf-runs",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """spf_runs must be of a type compatible with yang:counter32""",
"defined-type": "yang:counter32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="spf-runs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='yang:counter32', is_config=False)""",
}
)
self.__spf_runs = t
if hasattr(self, "_set"):
self._set()
def _unset_spf_runs(self):
self.__spf_runs = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="spf-runs",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
def _get_auth_type_fails(self):
"""
Getter method for auth_type_fails, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/auth_type_fails (yang:counter32)
YANG Description: The number of authentication type mismatches.
"""
return self.__auth_type_fails
def _set_auth_type_fails(self, v, load=False):
"""
Setter method for auth_type_fails, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/auth_type_fails (yang:counter32)
If this variable is read-only (config: false) in the
source YANG file, then _set_auth_type_fails is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_auth_type_fails() directly.
YANG Description: The number of authentication type mismatches.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="auth-type-fails",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """auth_type_fails must be of a type compatible with yang:counter32""",
"defined-type": "yang:counter32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="auth-type-fails", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='yang:counter32', is_config=False)""",
}
)
self.__auth_type_fails = t
if hasattr(self, "_set"):
self._set()
def _unset_auth_type_fails(self):
self.__auth_type_fails = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="auth-type-fails",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
def _get_lsp_errors(self):
"""
Getter method for lsp_errors, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/lsp_errors (yang:counter32)
YANG Description: The number of received LSPs with errors.
"""
return self.__lsp_errors
def _set_lsp_errors(self, v, load=False):
"""
Setter method for lsp_errors, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/system_level_counters/state/lsp_errors (yang:counter32)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_errors is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_errors() directly.
YANG Description: The number of received LSPs with errors.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="lsp-errors",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """lsp_errors must be of a type compatible with yang:counter32""",
"defined-type": "yang:counter32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='yang:counter32', is_config=False)""",
}
)
self.__lsp_errors = t
if hasattr(self, "_set"):
self._set()
def _unset_lsp_errors(self):
self.__lsp_errors = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="lsp-errors",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:counter32",
is_config=False,
)
corrupted_lsps = __builtin__.property(_get_corrupted_lsps)
database_overloads = __builtin__.property(_get_database_overloads)
manual_address_drop_from_areas = __builtin__.property(
_get_manual_address_drop_from_areas
)
exceed_max_seq_nums = __builtin__.property(_get_exceed_max_seq_nums)
seq_num_skips = __builtin__.property(_get_seq_num_skips)
own_lsp_purges = __builtin__.property(_get_own_lsp_purges)
id_len_mismatch = __builtin__.property(_get_id_len_mismatch)
part_changes = __builtin__.property(_get_part_changes)
max_area_address_mismatches = __builtin__.property(_get_max_area_address_mismatches)
auth_fails = __builtin__.property(_get_auth_fails)
spf_runs = __builtin__.property(_get_spf_runs)
auth_type_fails = __builtin__.property(_get_auth_type_fails)
lsp_errors = __builtin__.property(_get_lsp_errors)
_pyangbind_elements = OrderedDict(
[
("corrupted_lsps", corrupted_lsps),
("database_overloads", database_overloads),
("manual_address_drop_from_areas", manual_address_drop_from_areas),
("exceed_max_seq_nums", exceed_max_seq_nums),
("seq_num_skips", seq_num_skips),
("own_lsp_purges", own_lsp_purges),
("id_len_mismatch", id_len_mismatch),
("part_changes", part_changes),
("max_area_address_mismatches", max_area_address_mismatches),
("auth_fails", auth_fails),
("spf_runs", spf_runs),
("auth_type_fails", auth_type_fails),
("lsp_errors", lsp_errors),
]
)
| 43.601777
| 460
| 0.6083
| 12,060
| 112,885
| 5.417413
| 0.021891
| 0.061071
| 0.045
| 0.050295
| 0.99426
| 0.990939
| 0.990939
| 0.990939
| 0.990939
| 0.990939
| 0
| 0.02241
| 0.294007
| 112,885
| 2,588
| 461
| 43.618624
| 0.79738
| 0.216424
| 0
| 0.90783
| 0
| 0.012884
| 0.27644
| 0.103571
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040634
| false
| 0
| 0.007433
| 0
| 0.080773
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
732e7afcbb8e13f1c38a1ab371dcbf0af5523e66
| 20,753
|
py
|
Python
|
tests/tabulation/test_library_slicing.py
|
sandialabs/Spitfire
|
65670e3ba5d1ccb4ac72524b77957706345c5bf6
|
[
"Apache-2.0"
] | 11
|
2020-03-20T02:10:17.000Z
|
2021-12-14T10:08:09.000Z
|
tests/tabulation/test_library_slicing.py
|
sandialabs/Spitfire
|
65670e3ba5d1ccb4ac72524b77957706345c5bf6
|
[
"Apache-2.0"
] | 18
|
2020-03-18T18:58:56.000Z
|
2021-12-21T02:35:35.000Z
|
tests/tabulation/test_library_slicing.py
|
sandialabs/Spitfire
|
65670e3ba5d1ccb4ac72524b77957706345c5bf6
|
[
"Apache-2.0"
] | 2
|
2021-05-31T17:24:56.000Z
|
2021-06-20T05:27:41.000Z
|
import unittest
from spitfire import Library, Dimension
from spitfire.chemistry.library import LibraryIndexError
import numpy as np
from copy import copy, deepcopy
machine_epsilon = np.finfo(float).eps
class Slice1D(unittest.TestCase):
def test_full(self):
l1 = Library(Dimension('x', np.linspace(0, 1, 16)))
l1['f'] = np.exp(l1.x_grid)
l1.extra_attributes['name'] = 'my_library_name'
l2 = l1[:]
self.assertTrue(np.all(np.abs(l1.x_grid - l2.x_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1['f'] - l2['f']) < 10. * machine_epsilon))
self.assertTrue(l2.shape == l1.x_grid.shape)
self.assertTrue(l2.size == l1.x_grid.size)
self.assertTrue(l1.extra_attributes['name'] == l2.extra_attributes['name'])
def test_partial(self):
l1 = Library(Dimension('x', np.linspace(0, 1, 16)))
l1['f'] = np.exp(l1.x_grid)
l1.extra_attributes['name'] = 'my_library_name'
n1 = 2
n2 = 8
l2 = l1[n1:n2]
self.assertTrue(np.all(np.abs(l1.x_grid[n1:n2] - l2.x_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1['f'][n1:n2] - l2['f']) < 10. * machine_epsilon))
self.assertTrue(l2.shape == l1.x_grid[n1:n2].shape)
self.assertTrue(l2.size == l1.x_grid[n1:n2].size)
self.assertTrue(l1.extra_attributes['name'] == l2.extra_attributes['name'])
def test_single(self):
l1 = Library(Dimension('x', np.linspace(0, 1, 16)))
l1['f'] = np.exp(l1.x_grid)
l1.extra_attributes['name'] = 'my_library_name'
n1 = 2
n2 = n1 + 1
l2 = l1[n1:n2]
self.assertTrue(np.all(np.abs(l1.x_grid[n1:n2] - l2.x_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1['f'][n1:n2] - l2['f']) < 10. * machine_epsilon))
self.assertTrue(l2.shape == l1.x_grid[n1:n2].shape)
self.assertTrue(l2.size == l1.x_grid[n1:n2].size)
self.assertTrue(l1.extra_attributes['name'] == l2.extra_attributes['name'])
def test_copy(self):
l1 = Library(Dimension('x', np.linspace(0, 1, 16)))
l1['f'] = np.exp(l1.x_grid)
l1.extra_attributes['name'] = 'my_library_name'
l2 = copy(l1)
l3 = deepcopy(l1)
l4 = Library.deepcopy(l1)
self.assertTrue(np.all(np.abs(l1.x_grid - l2.x_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1['f'] - l2['f']) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1.x_grid - l3.x_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1['f'] - l3['f']) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1.x_grid - l4.x_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1['f'] - l4['f']) < 10. * machine_epsilon))
self.assertTrue(l1.extra_attributes['name'] == l2.extra_attributes['name'])
def test_invalid_number(self):
l1 = Library(Dimension('x', np.linspace(0, 1, 16)))
try:
l2 = l1[:, :]
self.assertTrue(False)
except LibraryIndexError:
self.assertTrue(True)
def test_multiple_nonslice_args_1(self):
l1 = Library(Dimension('x', np.linspace(0, 1, 16)))
try:
l2 = l1['f', :]
self.assertTrue(False)
except LibraryIndexError:
self.assertTrue(True)
def test_multiple_nonslice_args_2(self):
l1 = Library(Dimension('x', np.linspace(0, 1, 16)))
try:
l2 = l1[:, 'g']
self.assertTrue(False)
except LibraryIndexError:
self.assertTrue(True)
def test_multiple_nonslice_args_3(self):
l1 = Library(Dimension('x', np.linspace(0, 1, 16)))
try:
l2 = l1['f', 'g']
self.assertTrue(False)
except LibraryIndexError:
self.assertTrue(True)
class Slice2D(unittest.TestCase):
def test_full(self):
l1 = Library(Dimension('x', np.linspace(0, 1, 2)), Dimension('y', np.linspace(-1, 1, 3)))
l1['f'] = np.exp(l1.x_grid) * np.cos(l1.y_grid)
l1.extra_attributes['name'] = 'my_library_name'
l2 = l1[:, :]
self.assertTrue(np.all(np.abs(l1.x_grid - l2.x_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1.y_grid - l2.y_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1['f'] - l2['f']) < 10. * machine_epsilon))
self.assertTrue(l2.shape == l1.x_grid.shape)
self.assertTrue(l2.size == l1.x_grid.size)
self.assertTrue(l1.extra_attributes['name'] == l2.extra_attributes['name'])
l3 = l1[:]
self.assertTrue(np.all(np.abs(l1.x_grid - l3.x_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1.y_grid - l3.y_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1['f'] - l3['f']) < 10. * machine_epsilon))
self.assertTrue(l3.shape == l1.x_grid.shape)
self.assertTrue(l3.size == l1.x_grid.size)
self.assertTrue(l1.extra_attributes['name'] == l3.extra_attributes['name'])
def test_partial(self):
l1 = Library(Dimension('x', np.linspace(0, 1, 10)), Dimension('y', np.linspace(-1, 1, 10)))
l1['f'] = np.exp(l1.x_grid) * np.cos(l1.y_grid)
l1.extra_attributes['name'] = 'my_library_name'
n1x = 2
n2x = 8
n1y = 1
n2y = -1
l2 = l1[n1x:n2x, n1y:n2y]
self.assertTrue(np.all(np.abs(l1.x_grid[n1x:n2x, n1y:n2y] - l2.x_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1['f'][n1x:n2x, n1y:n2y] - l2['f']) < 10. * machine_epsilon))
self.assertTrue(l2.shape == l1.x_grid[n1x:n2x, n1y:n2y].shape)
self.assertTrue(l2.size == l1.x_grid[n1x:n2x, n1y:n2y].size)
self.assertTrue(l1.extra_attributes['name'] == l2.extra_attributes['name'])
def test_single(self):
l1 = Library(Dimension('x', np.linspace(0, 1, 10)), Dimension('y', np.linspace(-1, 1, 10)))
l1['f'] = np.exp(l1.x_grid) * np.cos(l1.y_grid)
l1.extra_attributes['name'] = 'my_library_name'
n1x = 2
n2x = n1x + 1
n1y = 1
n2y = -1
l2 = l1[n1x:n2x, n1y:n2y]
self.assertTrue(np.all(np.abs(l1.x_grid[n1x:n2x, n1y:n2y] - l2.x_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1['f'][n1x:n2x, n1y:n2y] - l2['f']) < 10. * machine_epsilon))
self.assertTrue(l2.shape == l1.x_grid[n1x:n2x, n1y:n2y].shape)
self.assertTrue(l2.size == l1.x_grid[n1x:n2x, n1y:n2y].size)
self.assertTrue(l1.extra_attributes['name'] == l2.extra_attributes['name'])
def test_squeeze(self):
l1 = Library(Dimension('x', np.linspace(0, 1, 10)), Dimension('y', np.linspace(-1, 1, 10)))
l1['f'] = np.exp(l1.x_grid) * np.cos(l1.y_grid)
l1.extra_attributes['name'] = 'my_library_name'
iy = 3
l3 = Library.squeeze(l1[:, iy])
self.assertTrue(np.all(np.abs(l1['f'][:, iy] - l3['f']) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(np.squeeze(l1.x_grid[:, iy]) - l3.x_grid) < 10. * machine_epsilon))
self.assertTrue(l1.extra_attributes['name'] == l3.extra_attributes['name'])
def test_copy(self):
l1 = Library(Dimension('x', np.linspace(0, 1, 10)), Dimension('y', np.linspace(-1, 1, 10)))
l1['f'] = np.exp(l1.x_grid) * np.cos(l1.y_grid)
l1.extra_attributes['name'] = 'my_library_name'
l2 = copy(l1)
l3 = Library.copy(l1)
self.assertTrue(np.all(np.abs(l1.x_grid - l2.x_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1.y_grid - l2.y_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1['f'] - l2['f']) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1.x_grid - l3.x_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1.y_grid - l3.y_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1['f'] - l3['f']) < 10. * machine_epsilon))
self.assertTrue(l1.extra_attributes['name'] == l2.extra_attributes['name'])
self.assertTrue(l1.extra_attributes['name'] == l3.extra_attributes['name'])
def test_invalid_number_3(self):
l1 = Library(Dimension('x', np.linspace(0, 1, 2)), Dimension('y', np.linspace(-1, 1, 3)))
try:
l2 = l1[:, :, :]
self.assertTrue(False)
except LibraryIndexError:
self.assertTrue(True)
class Slice3D(unittest.TestCase):
def test_full(self):
l1 = Library(Dimension('x', np.linspace(0, 1, 2)),
Dimension('y', np.linspace(-1, 1, 3)),
Dimension('z', np.logspace(-1, 1, 4)))
l1['f'] = np.exp(l1.x_grid) * np.cos(l1.y_grid) * np.log(l1.z_grid)
l1.extra_attributes['name'] = 'my_library_name'
l2 = l1[:, :, :]
self.assertTrue(np.all(np.abs(l1.x_grid - l2.x_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1.y_grid - l2.y_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1.z_grid - l2.z_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1['f'] - l2['f']) < 10. * machine_epsilon))
self.assertTrue(l2.shape == l1.x_grid.shape)
self.assertTrue(l2.size == l1.x_grid.size)
self.assertTrue(l1.extra_attributes['name'] == l2.extra_attributes['name'])
l3 = l1[:]
self.assertTrue(np.all(np.abs(l1.x_grid - l3.x_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1.y_grid - l3.y_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1.z_grid - l3.z_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1['f'] - l3['f']) < 10. * machine_epsilon))
self.assertTrue(l3.shape == l1.x_grid.shape)
self.assertTrue(l3.size == l1.x_grid.size)
self.assertTrue(l1.extra_attributes['name'] == l3.extra_attributes['name'])
def test_partial(self):
l1 = Library(Dimension('x', np.linspace(0, 1, 10)),
Dimension('y', np.linspace(-1, 1, 4)),
Dimension('z', np.logspace(-1, 1, 7)))
l1['f'] = np.exp(l1.x_grid) * np.cos(l1.y_grid) * np.log(l1.z_grid)
l1.extra_attributes['name'] = 'my_library_name'
n1x = 2
n2x = 8
n1y = 1
n2y = -1
n1z = 3
n2z = 5
l2 = l1[n1x:n2x, n1y:n2y, n1z:n2z]
self.assertTrue(np.all(np.abs(l1.x_grid[n1x:n2x, n1y:n2y, n1z:n2z] - l2.x_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1['f'][n1x:n2x, n1y:n2y, n1z:n2z] - l2['f']) < 10. * machine_epsilon))
self.assertTrue(l2.shape == l1.x_grid[n1x:n2x, n1y:n2y, n1z:n2z].shape)
self.assertTrue(l2.size == l1.x_grid[n1x:n2x, n1y:n2y, n1z:n2z].size)
self.assertTrue(l1.extra_attributes['name'] == l2.extra_attributes['name'])
def test_single(self):
l1 = Library(Dimension('x', np.linspace(0, 1, 10)),
Dimension('y', np.linspace(-1, 1, 4)),
Dimension('z', np.logspace(-1, 1, 7)))
l1['f'] = np.exp(l1.x_grid) * np.cos(l1.y_grid) * np.log(l1.z_grid)
l1.extra_attributes['name'] = 'my_library_name'
n1x = 2
n2x = 8
n1y = 1
n2y = -1
n1z = 3
n2z = n1z + 1
l2 = l1[n1x:n2x, n1y:n2y, n1z:n2z]
self.assertTrue(np.all(np.abs(l1.x_grid[n1x:n2x, n1y:n2y, n1z:n2z] - l2.x_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1['f'][n1x:n2x, n1y:n2y, n1z:n2z] - l2['f']) < 10. * machine_epsilon))
self.assertTrue(l2.shape == l1.x_grid[n1x:n2x, n1y:n2y, n1z:n2z].shape)
self.assertTrue(l2.size == l1.x_grid[n1x:n2x, n1y:n2y, n1z:n2z].size)
self.assertTrue(l1.extra_attributes['name'] == l2.extra_attributes['name'])
def test_single_internal(self):
l1 = Library(Dimension('x', np.linspace(0, 1, 10)),
Dimension('y', np.linspace(-1, 1, 4)),
Dimension('z', np.logspace(-1, 1, 7)))
l1['f'] = np.exp(l1.x_grid) * np.cos(l1.y_grid) * np.log(l1.z_grid)
l1.extra_attributes['name'] = 'my_library_name'
n1x = 2
n2x = 8
n1y = 1
n2y = n1y + 1
n1z = 3
n2z = 6
l2 = l1[n1x:n2x, n1y:n2y, n1z:n2z]
self.assertTrue(np.all(np.abs(l1.x_grid[n1x:n2x, n1y:n2y, n1z:n2z] - l2.x_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1['f'][n1x:n2x, n1y:n2y, n1z:n2z] - l2['f']) < 10. * machine_epsilon))
self.assertTrue(l2.shape == l1.x_grid[n1x:n2x, n1y:n2y, n1z:n2z].shape)
self.assertTrue(l2.size == l1.x_grid[n1x:n2x, n1y:n2y, n1z:n2z].size)
self.assertTrue(l1.extra_attributes['name'] == l2.extra_attributes['name'])
def test_squeeze(self):
l1 = Library(Dimension('x', np.linspace(0, 1, 10)),
Dimension('y', np.linspace(-1, 1, 4)),
Dimension('z', np.logspace(-1, 1, 7)))
l1['f'] = np.exp(l1.x_grid) * np.cos(l1.y_grid) * np.log(l1.z_grid)
l1.extra_attributes['name'] = 'my_library_name'
iy = 2
l3 = Library.squeeze(l1[:, iy, :])
self.assertTrue(np.all(np.abs(np.squeeze(l1.x_grid[:, iy, :]) - l3.x_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(np.squeeze(l1.z_grid[:, iy, :]) - l3.z_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(np.squeeze(l1['f'][:, iy, :]) - l3['f']) < 10. * machine_epsilon))
self.assertTrue(l1.extra_attributes['name'] == l3.extra_attributes['name'])
def test_copy(self):
l1 = Library(Dimension('x', np.linspace(0, 1, 10)),
Dimension('y', np.linspace(-1, 1, 4)),
Dimension('z', np.logspace(-1, 1, 7)))
l1['f'] = np.exp(l1.x_grid) * np.cos(l1.y_grid) * np.log(l1.z_grid)
l1.extra_attributes['name'] = 'my_library_name'
l2 = copy(l1)
l3 = Library.copy(l1)
self.assertTrue(np.all(np.abs(l1.x_grid - l2.x_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1.y_grid - l2.y_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1.z_grid - l2.z_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1['f'] - l2['f']) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1.x_grid - l3.x_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1.y_grid - l3.y_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1.z_grid - l3.z_grid) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1['f'] - l3['f']) < 10. * machine_epsilon))
self.assertTrue(l1.extra_attributes['name'] == l2.extra_attributes['name'])
self.assertTrue(l1.extra_attributes['name'] == l3.extra_attributes['name'])
def test_view(self):
slices = (slice(0, None, None), slice(1, 3, None), slice(1, -3, None))
l1 = Library(Dimension('x', np.linspace(0, 1, 10)),
Dimension('y', np.linspace(-1, 1, 4)),
Dimension('z', np.logspace(-1, 1, 7)))
gold_float = 0.5
gold_array = np.exp(-2. * l1.x_grid[slices])
fvals = np.exp(l1.x_grid) * np.cos(l1.y_grid) * np.log(l1.z_grid)
# start with float argument
g = gold_float
# set slice of original array
fvals[:, :, :] = np.exp(l1.x_grid) * np.cos(l1.y_grid) * np.log(l1.z_grid)
l1['f'] = fvals
l2 = l1[slices]
fvals[slices] = g
self.assertTrue(np.all(np.abs(l1['f'][slices] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1[slices]['f'] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(fvals[slices] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l2['f'] - g) < 10. * machine_epsilon))
# set slice of library property
fvals[:, :, :] = np.exp(l1.x_grid) * np.cos(l1.y_grid) * np.log(l1.z_grid)
l1['f'] = fvals
l2 = l1[slices]
l1['f'][slices] = g
self.assertTrue(np.all(np.abs(l1['f'][slices] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1[slices]['f'] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(fvals[slices] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l2['f'] - g) < 10. * machine_epsilon))
# set property of library slice
fvals[:, :, :] = np.exp(l1.x_grid) * np.cos(l1.y_grid) * np.log(l1.z_grid)
l1['f'] = fvals
l2 = l1[slices]
l1[slices]['f'] = g
self.assertTrue(np.all(np.abs(l1['f'][slices] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1[slices]['f'] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(fvals[slices] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l2['f'] - g) < 10. * machine_epsilon))
# set slice of library view property
fvals[:, :, :] = np.exp(l1.x_grid) * np.cos(l1.y_grid) * np.log(l1.z_grid)
l1['f'] = fvals
l2 = l1[slices]
l2['f'][:, :, :] = g
self.assertTrue(np.all(np.abs(l1['f'][slices] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1[slices]['f'] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(fvals[slices] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l2['f'] - g) < 10. * machine_epsilon))
# set property of library view
fvals[:, :, :] = np.exp(l1.x_grid) * np.cos(l1.y_grid) * np.log(l1.z_grid)
l1['f'] = fvals
l2 = l1[slices]
l2['f'] = g
self.assertTrue(np.all(np.abs(l1['f'][slices] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1[slices]['f'] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(fvals[slices] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l2['f'] - g) < 10. * machine_epsilon))
# repeat with numpy array argument
g = gold_array
# set slice of original array
fvals[:, :, :] = np.exp(l1.x_grid) * np.cos(l1.y_grid) * np.log(l1.z_grid)
l1['f'] = fvals
l2 = l1[slices]
fvals[slices] = g
self.assertTrue(np.all(np.abs(l1['f'][slices] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1[slices]['f'] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(fvals[slices] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l2['f'] - g) < 10. * machine_epsilon))
# set slice of library property
fvals[:, :, :] = np.exp(l1.x_grid) * np.cos(l1.y_grid) * np.log(l1.z_grid)
l1['f'] = fvals
l2 = l1[slices]
l1['f'][slices] = g
self.assertTrue(np.all(np.abs(l1['f'][slices] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1[slices]['f'] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(fvals[slices] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l2['f'] - g) < 10. * machine_epsilon))
# set property of library slice
fvals[:, :, :] = np.exp(l1.x_grid) * np.cos(l1.y_grid) * np.log(l1.z_grid)
l1['f'] = fvals
l2 = l1[slices]
l1[slices]['f'] = g
self.assertTrue(np.all(np.abs(l1['f'][slices] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1[slices]['f'] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(fvals[slices] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l2['f'] - g) < 10. * machine_epsilon))
# set slice of library view property
fvals[:, :, :] = np.exp(l1.x_grid) * np.cos(l1.y_grid) * np.log(l1.z_grid)
l1['f'] = fvals
l2 = l1[slices]
l2['f'][:, :, :] = g
self.assertTrue(np.all(np.abs(l1['f'][slices] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1[slices]['f'] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(fvals[slices] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l2['f'] - g) < 10. * machine_epsilon))
# set property of library view
fvals[:, :, :] = np.exp(l1.x_grid) * np.cos(l1.y_grid) * np.log(l1.z_grid)
l1['f'] = fvals
l2 = l1[slices]
l2['f'] = g
self.assertTrue(np.all(np.abs(l1['f'][slices] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l1[slices]['f'] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(fvals[slices] - g) < 10. * machine_epsilon))
self.assertTrue(np.all(np.abs(l2['f'] - g) < 10. * machine_epsilon))
if __name__ == '__main__':
unittest.main()
| 49.767386
| 113
| 0.578037
| 3,142
| 20,753
| 3.696053
| 0.032145
| 0.178421
| 0.130888
| 0.155429
| 0.963403
| 0.961164
| 0.95927
| 0.956945
| 0.956945
| 0.9504
| 0
| 0.062791
| 0.233364
| 20,753
| 416
| 114
| 49.887019
| 0.667128
| 0.017443
| 0
| 0.857939
| 0
| 0
| 0.029198
| 0
| 0
| 0
| 0
| 0
| 0.412256
| 1
| 0.058496
| false
| 0
| 0.013928
| 0
| 0.08078
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b4563e33ad48c0c25ea1377735af0ceca7f724ae
| 6,888
|
py
|
Python
|
riak/tests/test_search.py
|
albeus/riak-python-client
|
51bf875f1f5e394d45540a3850a8453db0951c40
|
[
"Apache-2.0"
] | null | null | null |
riak/tests/test_search.py
|
albeus/riak-python-client
|
51bf875f1f5e394d45540a3850a8453db0951c40
|
[
"Apache-2.0"
] | null | null | null |
riak/tests/test_search.py
|
albeus/riak-python-client
|
51bf875f1f5e394d45540a3850a8453db0951c40
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import platform
if platform.python_version() < '2.7':
unittest = __import__('unittest2')
else:
import unittest
from . import SKIP_SEARCH
class EnableSearchTests(object):
@unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined')
def test_bucket_search_enabled(self):
bucket = self.client.bucket(self.bucket_name)
self.assertFalse(bucket.search_enabled())
@unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined')
def test_enable_search_commit_hook(self):
bucket = self.client.bucket(self.search_bucket)
bucket.clear_properties()
self.assertFalse(self.create_client().
bucket(self.search_bucket).
search_enabled())
bucket.enable_search()
self.assertTrue(self.create_client().
bucket(self.search_bucket).
search_enabled())
@unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined')
def test_disable_search_commit_hook(self):
bucket = self.client.bucket(self.search_bucket)
bucket.clear_properties()
bucket.enable_search()
self.assertTrue(self.create_client().bucket(self.search_bucket)
.search_enabled())
bucket.disable_search()
self.assertFalse(self.create_client().bucket(self.search_bucket)
.search_enabled())
bucket.enable_search()
class SolrSearchTests(object):
@unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined')
def test_add_document_to_index(self):
self.client.fulltext_add(self.search_bucket,
[{"id": "doc", "username": "tony"}])
results = self.client.fulltext_search(self.search_bucket,
"username:tony")
self.assertEquals("tony", results['docs'][0]['username'])
@unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined')
def test_add_multiple_documents_to_index(self):
self.client.fulltext_add(
self.search_bucket,
[{"id": "dizzy", "username": "dizzy"},
{"id": "russell", "username": "russell"}])
results = self.client.fulltext_search(
self.search_bucket, "username:russell OR username:dizzy")
self.assertEquals(2, len(results['docs']))
@unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined')
def test_delete_documents_from_search_by_id(self):
self.client.fulltext_add(
self.search_bucket,
[{"id": "dizzy", "username": "dizzy"},
{"id": "russell", "username": "russell"}])
self.client.fulltext_delete(self.search_bucket, docs=["dizzy"])
results = self.client.fulltext_search(
self.search_bucket, "username:russell OR username:dizzy")
self.assertEquals(1, len(results['docs']))
@unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined')
def test_delete_documents_from_search_by_query(self):
self.client.fulltext_add(
self.search_bucket,
[{"id": "dizzy", "username": "dizzy"},
{"id": "russell", "username": "russell"}])
self.client.fulltext_delete(
self.search_bucket,
queries=["username:dizzy", "username:russell"])
results = self.client.fulltext_search(
self.search_bucket, "username:russell OR username:dizzy")
self.assertEquals(0, len(results['docs']))
@unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined')
def test_delete_documents_from_search_by_query_and_id(self):
self.client.fulltext_add(
self.search_bucket,
[{"id": "dizzy", "username": "dizzy"},
{"id": "russell", "username": "russell"}])
self.client.fulltext_delete(
self.search_bucket,
docs=["dizzy"],
queries=["username:russell"])
results = self.client.fulltext_search(
self.search_bucket,
"username:russell OR username:dizzy")
self.assertEquals(0, len(results['docs']))
class SearchTests(object):
@unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined')
def test_solr_search_from_bucket(self):
bucket = self.client.bucket(self.search_bucket)
bucket.new("user", {"username": "roidrage"}).store()
results = bucket.search("username:roidrage")
self.assertEquals(1, len(results['docs']))
@unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined')
def test_solr_search_with_params_from_bucket(self):
bucket = self.client.bucket(self.search_bucket)
bucket.new("user", {"username": "roidrage"}).store()
results = bucket.search("username:roidrage", wt="xml")
self.assertEquals(1, len(results['docs']))
@unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined')
def test_solr_search_with_params(self):
bucket = self.client.bucket(self.search_bucket)
bucket.new("user", {"username": "roidrage"}).store()
results = self.client.fulltext_search(
self.search_bucket,
"username:roidrage", wt="xml")
self.assertEquals(1, len(results['docs']))
@unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined')
def test_solr_search(self):
bucket = self.client.bucket(self.search_bucket)
bucket.new("user", {"username": "roidrage"}).store()
results = self.client.fulltext_search(self.search_bucket,
"username:roidrage")
self.assertEquals(1, len(results["docs"]))
@unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined')
def test_search_integration(self):
# Create some objects to search across...
bucket = self.client.bucket(self.search_bucket)
bucket.new("one", {"foo": "one", "bar": "red"}).store()
bucket.new("two", {"foo": "two", "bar": "green"}).store()
bucket.new("three", {"foo": "three", "bar": "blue"}).store()
bucket.new("four", {"foo": "four", "bar": "orange"}).store()
bucket.new("five", {"foo": "five", "bar": "yellow"}).store()
# Run some operations...
results = self.client.fulltext_search(self.search_bucket,
"foo:one OR foo:two")
if (len(results) == 0):
print "\n\nNot running test \"testSearchIntegration()\".\n"
print """Please ensure that you have installed the Riak
Search hook on bucket \"searchbucket\" by running
\"bin/search-cmd install searchbucket\".\n\n"""
return
self.assertEqual(len(results['docs']), 2)
query = "(foo:one OR foo:two OR foo:three OR foo:four) AND\
(NOT bar:green)"
results = self.client.fulltext_search(self.search_bucket, query)
self.assertEqual(len(results['docs']), 3)
| 43.594937
| 72
| 0.615418
| 767
| 6,888
| 5.329857
| 0.148631
| 0.068493
| 0.109589
| 0.076321
| 0.804795
| 0.783757
| 0.776419
| 0.776419
| 0.753425
| 0.728474
| 0
| 0.00308
| 0.24579
| 6,888
| 157
| 73
| 43.872611
| 0.783831
| 0.012195
| 0
| 0.548148
| 0
| 0
| 0.177353
| 0
| 0
| 0
| 0
| 0
| 0.118519
| 0
| null | null | 0
| 0.02963
| null | null | 0.014815
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b4568f4028bab3e71d0e911d6c035adeb64dc4eb
| 46,069
|
py
|
Python
|
terrascript/aws/r.py
|
bkez322/python-terrascript
|
7779a9d0c65b7f4b463746c84a4f181dd895a849
|
[
"BSD-2-Clause"
] | 4
|
2022-02-07T21:08:14.000Z
|
2022-03-03T04:41:28.000Z
|
terrascript/aws/r.py
|
bkez322/python-terrascript
|
7779a9d0c65b7f4b463746c84a4f181dd895a849
|
[
"BSD-2-Clause"
] | null | null | null |
terrascript/aws/r.py
|
bkez322/python-terrascript
|
7779a9d0c65b7f4b463746c84a4f181dd895a849
|
[
"BSD-2-Clause"
] | 2
|
2022-02-06T01:49:42.000Z
|
2022-02-08T14:15:00.000Z
|
# terrascript/aws/r.py
import terrascript
class aws_accessanalyzer_analyzer(terrascript.Resource):
pass
class aws_acm_certificate(terrascript.Resource):
pass
class aws_acm_certificate_validation(terrascript.Resource):
pass
class aws_acmpca_certificate_authority(terrascript.Resource):
pass
class aws_ami(terrascript.Resource):
pass
class aws_ami_copy(terrascript.Resource):
pass
class aws_ami_from_instance(terrascript.Resource):
pass
class aws_ami_launch_permission(terrascript.Resource):
pass
class aws_api_gateway_account(terrascript.Resource):
pass
class aws_api_gateway_api_key(terrascript.Resource):
pass
class aws_api_gateway_authorizer(terrascript.Resource):
pass
class aws_api_gateway_base_path_mapping(terrascript.Resource):
pass
class aws_api_gateway_client_certificate(terrascript.Resource):
pass
class aws_api_gateway_deployment(terrascript.Resource):
pass
class aws_api_gateway_documentation_part(terrascript.Resource):
pass
class aws_api_gateway_documentation_version(terrascript.Resource):
pass
class aws_api_gateway_domain_name(terrascript.Resource):
pass
class aws_api_gateway_gateway_response(terrascript.Resource):
pass
class aws_api_gateway_integration(terrascript.Resource):
pass
class aws_api_gateway_integration_response(terrascript.Resource):
pass
class aws_api_gateway_method(terrascript.Resource):
pass
class aws_api_gateway_method_response(terrascript.Resource):
pass
class aws_api_gateway_method_settings(terrascript.Resource):
pass
class aws_api_gateway_model(terrascript.Resource):
pass
class aws_api_gateway_request_validator(terrascript.Resource):
pass
class aws_api_gateway_resource(terrascript.Resource):
pass
class aws_api_gateway_rest_api(terrascript.Resource):
pass
class aws_api_gateway_rest_api_policy(terrascript.Resource):
pass
class aws_api_gateway_stage(terrascript.Resource):
pass
class aws_api_gateway_usage_plan(terrascript.Resource):
pass
class aws_api_gateway_usage_plan_key(terrascript.Resource):
pass
class aws_api_gateway_vpc_link(terrascript.Resource):
pass
class aws_apigatewayv2_api(terrascript.Resource):
pass
class aws_apigatewayv2_api_mapping(terrascript.Resource):
pass
class aws_apigatewayv2_authorizer(terrascript.Resource):
pass
class aws_apigatewayv2_deployment(terrascript.Resource):
pass
class aws_apigatewayv2_domain_name(terrascript.Resource):
pass
class aws_apigatewayv2_integration(terrascript.Resource):
pass
class aws_apigatewayv2_integration_response(terrascript.Resource):
pass
class aws_apigatewayv2_model(terrascript.Resource):
pass
class aws_apigatewayv2_route(terrascript.Resource):
pass
class aws_apigatewayv2_route_response(terrascript.Resource):
pass
class aws_apigatewayv2_stage(terrascript.Resource):
pass
class aws_apigatewayv2_vpc_link(terrascript.Resource):
pass
class aws_app_cookie_stickiness_policy(terrascript.Resource):
pass
class aws_appautoscaling_target(terrascript.Resource):
pass
class aws_appautoscaling_policy(terrascript.Resource):
pass
class aws_appautoscaling_scheduled_action(terrascript.Resource):
pass
class aws_appmesh_gateway_route(terrascript.Resource):
pass
class aws_appmesh_mesh(terrascript.Resource):
pass
class aws_appmesh_route(terrascript.Resource):
pass
class aws_appmesh_virtual_gateway(terrascript.Resource):
pass
class aws_appmesh_virtual_node(terrascript.Resource):
pass
class aws_appmesh_virtual_router(terrascript.Resource):
pass
class aws_appmesh_virtual_service(terrascript.Resource):
pass
class aws_appsync_api_key(terrascript.Resource):
pass
class aws_appsync_datasource(terrascript.Resource):
pass
class aws_appsync_function(terrascript.Resource):
pass
class aws_appsync_graphql_api(terrascript.Resource):
pass
class aws_appsync_resolver(terrascript.Resource):
pass
class aws_athena_database(terrascript.Resource):
pass
class aws_athena_named_query(terrascript.Resource):
pass
class aws_athena_workgroup(terrascript.Resource):
pass
class aws_autoscaling_attachment(terrascript.Resource):
pass
class aws_autoscaling_group(terrascript.Resource):
pass
class aws_autoscaling_lifecycle_hook(terrascript.Resource):
pass
class aws_autoscaling_notification(terrascript.Resource):
pass
class aws_autoscaling_policy(terrascript.Resource):
pass
class aws_autoscaling_schedule(terrascript.Resource):
pass
class aws_autoscalingplans_scaling_plan(terrascript.Resource):
pass
class aws_backup_global_settings(terrascript.Resource):
pass
class aws_backup_plan(terrascript.Resource):
pass
class aws_backup_region_settings(terrascript.Resource):
pass
class aws_backup_selection(terrascript.Resource):
pass
class aws_backup_vault(terrascript.Resource):
pass
class aws_backup_vault_notifications(terrascript.Resource):
pass
class aws_backup_vault_policy(terrascript.Resource):
pass
class aws_budgets_budget(terrascript.Resource):
pass
class aws_cloud9_environment_ec2(terrascript.Resource):
pass
class aws_cloudformation_stack(terrascript.Resource):
pass
class aws_cloudformation_stack_set(terrascript.Resource):
pass
class aws_cloudformation_stack_set_instance(terrascript.Resource):
pass
class aws_cloudfront_cache_policy(terrascript.Resource):
pass
class aws_cloudfront_distribution(terrascript.Resource):
pass
class aws_cloudfront_origin_access_identity(terrascript.Resource):
pass
class aws_cloudfront_origin_request_policy(terrascript.Resource):
pass
class aws_cloudfront_public_key(terrascript.Resource):
pass
class aws_cloudfront_realtime_log_config(terrascript.Resource):
pass
class aws_cloudtrail(terrascript.Resource):
pass
class aws_cloudwatch_event_bus(terrascript.Resource):
pass
class aws_cloudwatch_event_permission(terrascript.Resource):
pass
class aws_cloudwatch_event_rule(terrascript.Resource):
pass
class aws_cloudwatch_event_target(terrascript.Resource):
pass
class aws_cloudwatch_event_archive(terrascript.Resource):
pass
class aws_cloudwatch_log_destination(terrascript.Resource):
pass
class aws_cloudwatch_log_destination_policy(terrascript.Resource):
pass
class aws_cloudwatch_log_group(terrascript.Resource):
pass
class aws_cloudwatch_log_metric_filter(terrascript.Resource):
pass
class aws_cloudwatch_log_resource_policy(terrascript.Resource):
pass
class aws_cloudwatch_log_stream(terrascript.Resource):
pass
class aws_cloudwatch_log_subscription_filter(terrascript.Resource):
pass
class aws_config_aggregate_authorization(terrascript.Resource):
pass
class aws_config_config_rule(terrascript.Resource):
pass
class aws_config_configuration_aggregator(terrascript.Resource):
pass
class aws_config_configuration_recorder(terrascript.Resource):
pass
class aws_config_configuration_recorder_status(terrascript.Resource):
pass
class aws_config_conformance_pack(terrascript.Resource):
pass
class aws_config_delivery_channel(terrascript.Resource):
pass
class aws_config_organization_custom_rule(terrascript.Resource):
pass
class aws_config_organization_managed_rule(terrascript.Resource):
pass
class aws_config_remediation_configuration(terrascript.Resource):
pass
class aws_cognito_identity_pool(terrascript.Resource):
pass
class aws_cognito_identity_pool_roles_attachment(terrascript.Resource):
pass
class aws_cognito_identity_provider(terrascript.Resource):
pass
class aws_cognito_resource_server(terrascript.Resource):
pass
class aws_cognito_user_group(terrascript.Resource):
pass
class aws_cognito_user_pool(terrascript.Resource):
pass
class aws_cognito_user_pool_client(terrascript.Resource):
pass
class aws_cognito_user_pool_domain(terrascript.Resource):
pass
class aws_cognito_user_pool_ui_customization(terrascript.Resource):
pass
class aws_cloudhsm_v2_cluster(terrascript.Resource):
pass
class aws_cloudhsm_v2_hsm(terrascript.Resource):
pass
class aws_cloudwatch_composite_alarm(terrascript.Resource):
pass
class aws_cloudwatch_metric_alarm(terrascript.Resource):
pass
class aws_cloudwatch_dashboard(terrascript.Resource):
pass
class aws_codedeploy_app(terrascript.Resource):
pass
class aws_codedeploy_deployment_config(terrascript.Resource):
pass
class aws_codedeploy_deployment_group(terrascript.Resource):
pass
class aws_codecommit_repository(terrascript.Resource):
pass
class aws_codecommit_trigger(terrascript.Resource):
pass
class aws_codeartifact_domain(terrascript.Resource):
pass
class aws_codeartifact_domain_permissions_policy(terrascript.Resource):
pass
class aws_codeartifact_repository(terrascript.Resource):
pass
class aws_codeartifact_repository_permissions_policy(terrascript.Resource):
pass
class aws_codebuild_project(terrascript.Resource):
pass
class aws_codebuild_report_group(terrascript.Resource):
pass
class aws_codebuild_source_credential(terrascript.Resource):
pass
class aws_codebuild_webhook(terrascript.Resource):
pass
class aws_codepipeline(terrascript.Resource):
pass
class aws_codepipeline_webhook(terrascript.Resource):
pass
class aws_codestarconnections_connection(terrascript.Resource):
pass
class aws_codestarnotifications_notification_rule(terrascript.Resource):
pass
class aws_cur_report_definition(terrascript.Resource):
pass
class aws_customer_gateway(terrascript.Resource):
pass
class aws_datapipeline_pipeline(terrascript.Resource):
pass
class aws_datasync_agent(terrascript.Resource):
pass
class aws_datasync_location_efs(terrascript.Resource):
pass
class aws_datasync_location_fsx_windows_file_system(terrascript.Resource):
pass
class aws_datasync_location_nfs(terrascript.Resource):
pass
class aws_datasync_location_s3(terrascript.Resource):
pass
class aws_datasync_location_smb(terrascript.Resource):
pass
class aws_datasync_task(terrascript.Resource):
pass
class aws_dax_cluster(terrascript.Resource):
pass
class aws_dax_parameter_group(terrascript.Resource):
pass
class aws_dax_subnet_group(terrascript.Resource):
pass
class aws_db_cluster_snapshot(terrascript.Resource):
pass
class aws_db_event_subscription(terrascript.Resource):
pass
class aws_db_instance(terrascript.Resource):
pass
class aws_db_instance_role_association(terrascript.Resource):
pass
class aws_db_option_group(terrascript.Resource):
pass
class aws_db_parameter_group(terrascript.Resource):
pass
class aws_db_proxy(terrascript.Resource):
pass
class aws_db_proxy_default_target_group(terrascript.Resource):
pass
class aws_db_proxy_target(terrascript.Resource):
pass
class aws_db_security_group(terrascript.Resource):
pass
class aws_db_snapshot(terrascript.Resource):
pass
class aws_db_subnet_group(terrascript.Resource):
pass
class aws_devicefarm_project(terrascript.Resource):
pass
class aws_directory_service_directory(terrascript.Resource):
pass
class aws_directory_service_conditional_forwarder(terrascript.Resource):
pass
class aws_directory_service_log_subscription(terrascript.Resource):
pass
class aws_dlm_lifecycle_policy(terrascript.Resource):
pass
class aws_dms_certificate(terrascript.Resource):
pass
class aws_dms_endpoint(terrascript.Resource):
pass
class aws_dms_event_subscription(terrascript.Resource):
pass
class aws_dms_replication_instance(terrascript.Resource):
pass
class aws_dms_replication_subnet_group(terrascript.Resource):
pass
class aws_dms_replication_task(terrascript.Resource):
pass
class aws_docdb_cluster(terrascript.Resource):
pass
class aws_docdb_cluster_instance(terrascript.Resource):
pass
class aws_docdb_cluster_parameter_group(terrascript.Resource):
pass
class aws_docdb_cluster_snapshot(terrascript.Resource):
pass
class aws_docdb_subnet_group(terrascript.Resource):
pass
class aws_dx_bgp_peer(terrascript.Resource):
pass
class aws_dx_connection(terrascript.Resource):
pass
class aws_dx_connection_association(terrascript.Resource):
pass
class aws_dx_gateway(terrascript.Resource):
pass
class aws_dx_gateway_association(terrascript.Resource):
pass
class aws_dx_gateway_association_proposal(terrascript.Resource):
pass
class aws_dx_hosted_private_virtual_interface(terrascript.Resource):
pass
class aws_dx_hosted_private_virtual_interface_accepter(terrascript.Resource):
pass
class aws_dx_hosted_public_virtual_interface(terrascript.Resource):
pass
class aws_dx_hosted_public_virtual_interface_accepter(terrascript.Resource):
pass
class aws_dx_hosted_transit_virtual_interface(terrascript.Resource):
pass
class aws_dx_hosted_transit_virtual_interface_accepter(terrascript.Resource):
pass
class aws_dx_lag(terrascript.Resource):
pass
class aws_dx_private_virtual_interface(terrascript.Resource):
pass
class aws_dx_public_virtual_interface(terrascript.Resource):
pass
class aws_dx_transit_virtual_interface(terrascript.Resource):
pass
class aws_dynamodb_table(terrascript.Resource):
pass
class aws_dynamodb_table_item(terrascript.Resource):
pass
class aws_dynamodb_global_table(terrascript.Resource):
pass
class aws_ebs_default_kms_key(terrascript.Resource):
pass
class aws_ebs_encryption_by_default(terrascript.Resource):
pass
class aws_ebs_snapshot(terrascript.Resource):
pass
class aws_ebs_snapshot_copy(terrascript.Resource):
pass
class aws_ebs_volume(terrascript.Resource):
pass
class aws_ec2_availability_zone_group(terrascript.Resource):
pass
class aws_ec2_capacity_reservation(terrascript.Resource):
pass
class aws_ec2_carrier_gateway(terrascript.Resource):
pass
class aws_ec2_client_vpn_authorization_rule(terrascript.Resource):
pass
class aws_ec2_client_vpn_endpoint(terrascript.Resource):
pass
class aws_ec2_client_vpn_network_association(terrascript.Resource):
pass
class aws_ec2_client_vpn_route(terrascript.Resource):
pass
class aws_ec2_fleet(terrascript.Resource):
pass
class aws_ec2_local_gateway_route(terrascript.Resource):
pass
class aws_ec2_local_gateway_route_table_vpc_association(terrascript.Resource):
pass
class aws_ec2_managed_prefix_list(terrascript.Resource):
pass
class aws_ec2_tag(terrascript.Resource):
pass
class aws_ec2_traffic_mirror_filter(terrascript.Resource):
pass
class aws_ec2_traffic_mirror_filter_rule(terrascript.Resource):
pass
class aws_ec2_traffic_mirror_target(terrascript.Resource):
pass
class aws_ec2_traffic_mirror_session(terrascript.Resource):
pass
class aws_ec2_transit_gateway(terrascript.Resource):
pass
class aws_ec2_transit_gateway_peering_attachment(terrascript.Resource):
pass
class aws_ec2_transit_gateway_peering_attachment_accepter(terrascript.Resource):
pass
class aws_ec2_transit_gateway_prefix_list_reference(terrascript.Resource):
pass
class aws_ec2_transit_gateway_route(terrascript.Resource):
pass
class aws_ec2_transit_gateway_route_table(terrascript.Resource):
pass
class aws_ec2_transit_gateway_route_table_association(terrascript.Resource):
pass
class aws_ec2_transit_gateway_route_table_propagation(terrascript.Resource):
pass
class aws_ec2_transit_gateway_vpc_attachment(terrascript.Resource):
pass
class aws_ec2_transit_gateway_vpc_attachment_accepter(terrascript.Resource):
pass
class aws_ecr_lifecycle_policy(terrascript.Resource):
pass
class aws_ecrpublic_repository(terrascript.Resource):
pass
class aws_ecr_repository(terrascript.Resource):
pass
class aws_ecr_repository_policy(terrascript.Resource):
pass
class aws_ecs_capacity_provider(terrascript.Resource):
pass
class aws_ecs_cluster(terrascript.Resource):
pass
class aws_ecs_service(terrascript.Resource):
pass
class aws_ecs_task_definition(terrascript.Resource):
pass
class aws_efs_access_point(terrascript.Resource):
pass
class aws_efs_file_system(terrascript.Resource):
pass
class aws_efs_file_system_policy(terrascript.Resource):
pass
class aws_efs_mount_target(terrascript.Resource):
pass
class aws_egress_only_internet_gateway(terrascript.Resource):
pass
class aws_eip(terrascript.Resource):
pass
class aws_eip_association(terrascript.Resource):
pass
class aws_eks_cluster(terrascript.Resource):
pass
class aws_eks_fargate_profile(terrascript.Resource):
pass
class aws_eks_node_group(terrascript.Resource):
pass
class aws_elasticache_cluster(terrascript.Resource):
pass
class aws_elasticache_global_replication_group(terrascript.Resource):
pass
class aws_elasticache_parameter_group(terrascript.Resource):
pass
class aws_elasticache_replication_group(terrascript.Resource):
pass
class aws_elasticache_security_group(terrascript.Resource):
pass
class aws_elasticache_subnet_group(terrascript.Resource):
pass
class aws_elastic_beanstalk_application(terrascript.Resource):
pass
class aws_elastic_beanstalk_application_version(terrascript.Resource):
pass
class aws_elastic_beanstalk_configuration_template(terrascript.Resource):
pass
class aws_elastic_beanstalk_environment(terrascript.Resource):
pass
class aws_elasticsearch_domain(terrascript.Resource):
pass
class aws_elasticsearch_domain_policy(terrascript.Resource):
pass
class aws_elastictranscoder_pipeline(terrascript.Resource):
pass
class aws_elastictranscoder_preset(terrascript.Resource):
pass
class aws_elb(terrascript.Resource):
pass
class aws_elb_attachment(terrascript.Resource):
pass
class aws_emr_cluster(terrascript.Resource):
pass
class aws_emr_instance_group(terrascript.Resource):
pass
class aws_emr_instance_fleet(terrascript.Resource):
pass
class aws_emr_managed_scaling_policy(terrascript.Resource):
pass
class aws_emr_security_configuration(terrascript.Resource):
pass
class aws_flow_log(terrascript.Resource):
pass
class aws_fsx_lustre_file_system(terrascript.Resource):
pass
class aws_fsx_windows_file_system(terrascript.Resource):
pass
class aws_fms_admin_account(terrascript.Resource):
pass
class aws_fms_policy(terrascript.Resource):
pass
class aws_gamelift_alias(terrascript.Resource):
pass
class aws_gamelift_build(terrascript.Resource):
pass
class aws_gamelift_fleet(terrascript.Resource):
pass
class aws_gamelift_game_session_queue(terrascript.Resource):
pass
class aws_glacier_vault(terrascript.Resource):
pass
class aws_glacier_vault_lock(terrascript.Resource):
pass
class aws_globalaccelerator_accelerator(terrascript.Resource):
pass
class aws_globalaccelerator_endpoint_group(terrascript.Resource):
pass
class aws_globalaccelerator_listener(terrascript.Resource):
pass
class aws_glue_catalog_database(terrascript.Resource):
pass
class aws_glue_catalog_table(terrascript.Resource):
pass
class aws_glue_classifier(terrascript.Resource):
pass
class aws_glue_connection(terrascript.Resource):
pass
class aws_glue_dev_endpoint(terrascript.Resource):
pass
class aws_glue_crawler(terrascript.Resource):
pass
class aws_glue_data_catalog_encryption_settings(terrascript.Resource):
pass
class aws_glue_job(terrascript.Resource):
pass
class aws_glue_ml_transform(terrascript.Resource):
pass
class aws_glue_partition(terrascript.Resource):
pass
class aws_glue_registry(terrascript.Resource):
pass
class aws_glue_resource_policy(terrascript.Resource):
pass
class aws_glue_schema(terrascript.Resource):
pass
class aws_glue_security_configuration(terrascript.Resource):
pass
class aws_glue_trigger(terrascript.Resource):
pass
class aws_glue_user_defined_function(terrascript.Resource):
pass
class aws_glue_workflow(terrascript.Resource):
pass
class aws_guardduty_detector(terrascript.Resource):
pass
class aws_guardduty_filter(terrascript.Resource):
pass
class aws_guardduty_invite_accepter(terrascript.Resource):
pass
class aws_guardduty_ipset(terrascript.Resource):
pass
class aws_guardduty_member(terrascript.Resource):
pass
class aws_guardduty_organization_admin_account(terrascript.Resource):
pass
class aws_guardduty_organization_configuration(terrascript.Resource):
pass
class aws_guardduty_publishing_destination(terrascript.Resource):
pass
class aws_guardduty_threatintelset(terrascript.Resource):
pass
class aws_iam_access_key(terrascript.Resource):
pass
class aws_iam_account_alias(terrascript.Resource):
pass
class aws_iam_account_password_policy(terrascript.Resource):
pass
class aws_iam_group_policy(terrascript.Resource):
pass
class aws_iam_group(terrascript.Resource):
pass
class aws_iam_group_membership(terrascript.Resource):
pass
class aws_iam_group_policy_attachment(terrascript.Resource):
pass
class aws_iam_instance_profile(terrascript.Resource):
pass
class aws_iam_openid_connect_provider(terrascript.Resource):
pass
class aws_iam_policy(terrascript.Resource):
pass
class aws_iam_policy_attachment(terrascript.Resource):
pass
class aws_iam_role_policy_attachment(terrascript.Resource):
pass
class aws_iam_role_policy(terrascript.Resource):
pass
class aws_iam_role(terrascript.Resource):
pass
class aws_iam_saml_provider(terrascript.Resource):
pass
class aws_iam_server_certificate(terrascript.Resource):
pass
class aws_iam_service_linked_role(terrascript.Resource):
pass
class aws_iam_user_group_membership(terrascript.Resource):
pass
class aws_iam_user_policy_attachment(terrascript.Resource):
pass
class aws_iam_user_policy(terrascript.Resource):
pass
class aws_iam_user_ssh_key(terrascript.Resource):
pass
class aws_iam_user(terrascript.Resource):
pass
class aws_iam_user_login_profile(terrascript.Resource):
pass
class aws_imagebuilder_component(terrascript.Resource):
pass
class aws_imagebuilder_distribution_configuration(terrascript.Resource):
pass
class aws_imagebuilder_image(terrascript.Resource):
pass
class aws_imagebuilder_image_pipeline(terrascript.Resource):
pass
class aws_imagebuilder_image_recipe(terrascript.Resource):
pass
class aws_imagebuilder_infrastructure_configuration(terrascript.Resource):
pass
class aws_inspector_assessment_target(terrascript.Resource):
pass
class aws_inspector_assessment_template(terrascript.Resource):
pass
class aws_inspector_resource_group(terrascript.Resource):
pass
class aws_instance(terrascript.Resource):
pass
class aws_internet_gateway(terrascript.Resource):
pass
class aws_iot_certificate(terrascript.Resource):
pass
class aws_iot_policy(terrascript.Resource):
pass
class aws_iot_policy_attachment(terrascript.Resource):
pass
class aws_iot_thing(terrascript.Resource):
pass
class aws_iot_thing_principal_attachment(terrascript.Resource):
pass
class aws_iot_thing_type(terrascript.Resource):
pass
class aws_iot_topic_rule(terrascript.Resource):
pass
class aws_iot_role_alias(terrascript.Resource):
pass
class aws_key_pair(terrascript.Resource):
pass
class aws_kinesis_analytics_application(terrascript.Resource):
pass
class aws_kinesisanalyticsv2_application(terrascript.Resource):
pass
class aws_kinesis_firehose_delivery_stream(terrascript.Resource):
pass
class aws_kinesis_stream(terrascript.Resource):
pass
class aws_kinesis_video_stream(terrascript.Resource):
pass
class aws_kms_alias(terrascript.Resource):
pass
class aws_kms_external_key(terrascript.Resource):
pass
class aws_kms_grant(terrascript.Resource):
pass
class aws_kms_key(terrascript.Resource):
pass
class aws_kms_ciphertext(terrascript.Resource):
pass
class aws_lakeformation_data_lake_settings(terrascript.Resource):
pass
class aws_lakeformation_permissions(terrascript.Resource):
pass
class aws_lakeformation_resource(terrascript.Resource):
pass
class aws_lambda_alias(terrascript.Resource):
pass
class aws_lambda_code_signing_config(terrascript.Resource):
pass
class aws_lambda_event_source_mapping(terrascript.Resource):
pass
class aws_lambda_function_event_invoke_config(terrascript.Resource):
pass
class aws_lambda_function(terrascript.Resource):
pass
class aws_lambda_layer_version(terrascript.Resource):
pass
class aws_lambda_permission(terrascript.Resource):
pass
class aws_lambda_provisioned_concurrency_config(terrascript.Resource):
pass
class aws_launch_configuration(terrascript.Resource):
pass
class aws_launch_template(terrascript.Resource):
pass
class aws_lex_bot(terrascript.Resource):
pass
class aws_lex_bot_alias(terrascript.Resource):
pass
class aws_lex_intent(terrascript.Resource):
pass
class aws_lex_slot_type(terrascript.Resource):
pass
class aws_licensemanager_association(terrascript.Resource):
pass
class aws_licensemanager_license_configuration(terrascript.Resource):
pass
class aws_lightsail_domain(terrascript.Resource):
pass
class aws_lightsail_instance(terrascript.Resource):
pass
class aws_lightsail_key_pair(terrascript.Resource):
pass
class aws_lightsail_static_ip(terrascript.Resource):
pass
class aws_lightsail_static_ip_attachment(terrascript.Resource):
pass
class aws_lb_cookie_stickiness_policy(terrascript.Resource):
pass
class aws_load_balancer_policy(terrascript.Resource):
pass
class aws_load_balancer_backend_server_policy(terrascript.Resource):
pass
class aws_load_balancer_listener_policy(terrascript.Resource):
pass
class aws_lb_ssl_negotiation_policy(terrascript.Resource):
pass
class aws_macie_member_account_association(terrascript.Resource):
pass
class aws_macie_s3_bucket_association(terrascript.Resource):
pass
class aws_main_route_table_association(terrascript.Resource):
pass
class aws_mq_broker(terrascript.Resource):
pass
class aws_mq_configuration(terrascript.Resource):
pass
class aws_media_convert_queue(terrascript.Resource):
pass
class aws_media_package_channel(terrascript.Resource):
pass
class aws_media_store_container(terrascript.Resource):
pass
class aws_media_store_container_policy(terrascript.Resource):
pass
class aws_msk_cluster(terrascript.Resource):
pass
class aws_msk_configuration(terrascript.Resource):
pass
class aws_msk_scram_secret_association(terrascript.Resource):
pass
class aws_nat_gateway(terrascript.Resource):
pass
class aws_network_acl(terrascript.Resource):
pass
class aws_default_network_acl(terrascript.Resource):
pass
class aws_neptune_cluster(terrascript.Resource):
pass
class aws_neptune_cluster_instance(terrascript.Resource):
pass
class aws_neptune_cluster_parameter_group(terrascript.Resource):
pass
class aws_neptune_cluster_snapshot(terrascript.Resource):
pass
class aws_neptune_event_subscription(terrascript.Resource):
pass
class aws_neptune_parameter_group(terrascript.Resource):
pass
class aws_neptune_subnet_group(terrascript.Resource):
pass
class aws_network_acl_rule(terrascript.Resource):
pass
class aws_network_interface(terrascript.Resource):
pass
class aws_network_interface_attachment(terrascript.Resource):
pass
class aws_networkfirewall_firewall(terrascript.Resource):
pass
class aws_networkfirewall_firewall_policy(terrascript.Resource):
pass
class aws_networkfirewall_logging_configuration(terrascript.Resource):
pass
class aws_networkfirewall_resource_policy(terrascript.Resource):
pass
class aws_networkfirewall_rule_group(terrascript.Resource):
pass
class aws_opsworks_application(terrascript.Resource):
pass
class aws_opsworks_stack(terrascript.Resource):
pass
class aws_opsworks_java_app_layer(terrascript.Resource):
pass
class aws_opsworks_haproxy_layer(terrascript.Resource):
pass
class aws_opsworks_static_web_layer(terrascript.Resource):
pass
class aws_opsworks_php_app_layer(terrascript.Resource):
pass
class aws_opsworks_rails_app_layer(terrascript.Resource):
pass
class aws_opsworks_nodejs_app_layer(terrascript.Resource):
pass
class aws_opsworks_memcached_layer(terrascript.Resource):
pass
class aws_opsworks_mysql_layer(terrascript.Resource):
pass
class aws_opsworks_ganglia_layer(terrascript.Resource):
pass
class aws_opsworks_custom_layer(terrascript.Resource):
pass
class aws_opsworks_instance(terrascript.Resource):
pass
class aws_opsworks_user_profile(terrascript.Resource):
pass
class aws_opsworks_permission(terrascript.Resource):
pass
class aws_opsworks_rds_db_instance(terrascript.Resource):
pass
class aws_organizations_organization(terrascript.Resource):
pass
class aws_organizations_account(terrascript.Resource):
pass
class aws_organizations_policy(terrascript.Resource):
pass
class aws_organizations_policy_attachment(terrascript.Resource):
pass
class aws_organizations_organizational_unit(terrascript.Resource):
pass
class aws_placement_group(terrascript.Resource):
pass
class aws_prometheus_workspace(terrascript.Resource):
pass
class aws_proxy_protocol_policy(terrascript.Resource):
pass
class aws_qldb_ledger(terrascript.Resource):
pass
class aws_quicksight_group(terrascript.Resource):
pass
class aws_quicksight_user(terrascript.Resource):
pass
class aws_ram_principal_association(terrascript.Resource):
pass
class aws_ram_resource_association(terrascript.Resource):
pass
class aws_ram_resource_share(terrascript.Resource):
pass
class aws_ram_resource_share_accepter(terrascript.Resource):
pass
class aws_rds_cluster(terrascript.Resource):
pass
class aws_rds_cluster_endpoint(terrascript.Resource):
pass
class aws_rds_cluster_instance(terrascript.Resource):
pass
class aws_rds_cluster_parameter_group(terrascript.Resource):
pass
class aws_rds_global_cluster(terrascript.Resource):
pass
class aws_redshift_cluster(terrascript.Resource):
pass
class aws_redshift_security_group(terrascript.Resource):
pass
class aws_redshift_parameter_group(terrascript.Resource):
pass
class aws_redshift_subnet_group(terrascript.Resource):
pass
class aws_redshift_snapshot_copy_grant(terrascript.Resource):
pass
class aws_redshift_snapshot_schedule(terrascript.Resource):
pass
class aws_redshift_snapshot_schedule_association(terrascript.Resource):
pass
class aws_redshift_event_subscription(terrascript.Resource):
pass
class aws_resourcegroups_group(terrascript.Resource):
pass
class aws_route53_delegation_set(terrascript.Resource):
pass
class aws_route53_hosted_zone_dnssec(terrascript.Resource):
pass
class aws_route53_key_signing_key(terrascript.Resource):
pass
class aws_route53_query_log(terrascript.Resource):
pass
class aws_route53_record(terrascript.Resource):
pass
class aws_route53_zone_association(terrascript.Resource):
pass
class aws_route53_vpc_association_authorization(terrascript.Resource):
pass
class aws_route53_zone(terrascript.Resource):
pass
class aws_route53_health_check(terrascript.Resource):
pass
class aws_route53_resolver_dnssec_config(terrascript.Resource):
pass
class aws_route53_resolver_endpoint(terrascript.Resource):
pass
class aws_route53_resolver_query_log_config(terrascript.Resource):
pass
class aws_route53_resolver_query_log_config_association(terrascript.Resource):
pass
class aws_route53_resolver_rule_association(terrascript.Resource):
pass
class aws_route53_resolver_rule(terrascript.Resource):
pass
class aws_route(terrascript.Resource):
pass
class aws_route_table(terrascript.Resource):
pass
class aws_default_route_table(terrascript.Resource):
pass
class aws_route_table_association(terrascript.Resource):
pass
class aws_sagemaker_app(terrascript.Resource):
pass
class aws_sagemaker_app_image_config(terrascript.Resource):
pass
class aws_sagemaker_code_repository(terrascript.Resource):
pass
class aws_sagemaker_domain(terrascript.Resource):
pass
class aws_sagemaker_endpoint(terrascript.Resource):
pass
class aws_sagemaker_endpoint_configuration(terrascript.Resource):
pass
class aws_sagemaker_feature_group(terrascript.Resource):
pass
class aws_sagemaker_image(terrascript.Resource):
pass
class aws_sagemaker_image_version(terrascript.Resource):
pass
class aws_sagemaker_model(terrascript.Resource):
pass
class aws_sagemaker_model_package_group(terrascript.Resource):
pass
class aws_sagemaker_notebook_instance_lifecycle_configuration(terrascript.Resource):
pass
class aws_sagemaker_notebook_instance(terrascript.Resource):
pass
class aws_sagemaker_user_profile(terrascript.Resource):
pass
class aws_secretsmanager_secret(terrascript.Resource):
pass
class aws_secretsmanager_secret_policy(terrascript.Resource):
pass
class aws_secretsmanager_secret_version(terrascript.Resource):
pass
class aws_secretsmanager_secret_rotation(terrascript.Resource):
pass
class aws_ses_active_receipt_rule_set(terrascript.Resource):
pass
class aws_ses_domain_identity(terrascript.Resource):
pass
class aws_ses_domain_identity_verification(terrascript.Resource):
pass
class aws_ses_domain_dkim(terrascript.Resource):
pass
class aws_ses_domain_mail_from(terrascript.Resource):
pass
class aws_ses_email_identity(terrascript.Resource):
pass
class aws_ses_identity_policy(terrascript.Resource):
pass
class aws_ses_receipt_filter(terrascript.Resource):
pass
class aws_ses_receipt_rule(terrascript.Resource):
pass
class aws_ses_receipt_rule_set(terrascript.Resource):
pass
class aws_ses_configuration_set(terrascript.Resource):
pass
class aws_ses_event_destination(terrascript.Resource):
pass
class aws_ses_identity_notification_topic(terrascript.Resource):
pass
class aws_ses_template(terrascript.Resource):
pass
class aws_s3_access_point(terrascript.Resource):
pass
class aws_s3_account_public_access_block(terrascript.Resource):
pass
class aws_s3_bucket(terrascript.Resource):
pass
class aws_s3_bucket_analytics_configuration(terrascript.Resource):
pass
class aws_s3_bucket_policy(terrascript.Resource):
pass
class aws_s3_bucket_public_access_block(terrascript.Resource):
pass
class aws_s3_bucket_object(terrascript.Resource):
pass
class aws_s3_bucket_ownership_controls(terrascript.Resource):
pass
class aws_s3_bucket_notification(terrascript.Resource):
pass
class aws_s3_bucket_metric(terrascript.Resource):
pass
class aws_s3_bucket_inventory(terrascript.Resource):
pass
class aws_s3_object_copy(terrascript.Resource):
pass
class aws_s3control_bucket(terrascript.Resource):
pass
class aws_s3control_bucket_policy(terrascript.Resource):
pass
class aws_s3control_bucket_lifecycle_configuration(terrascript.Resource):
pass
class aws_s3outposts_endpoint(terrascript.Resource):
pass
class aws_security_group(terrascript.Resource):
pass
class aws_network_interface_sg_attachment(terrascript.Resource):
pass
class aws_default_security_group(terrascript.Resource):
pass
class aws_security_group_rule(terrascript.Resource):
pass
class aws_securityhub_account(terrascript.Resource):
pass
class aws_securityhub_action_target(terrascript.Resource):
pass
class aws_securityhub_invite_accepter(terrascript.Resource):
pass
class aws_securityhub_member(terrascript.Resource):
pass
class aws_securityhub_organization_admin_account(terrascript.Resource):
pass
class aws_securityhub_product_subscription(terrascript.Resource):
pass
class aws_securityhub_standards_subscription(terrascript.Resource):
pass
class aws_servicecatalog_portfolio(terrascript.Resource):
pass
class aws_service_discovery_http_namespace(terrascript.Resource):
pass
class aws_service_discovery_private_dns_namespace(terrascript.Resource):
pass
class aws_service_discovery_public_dns_namespace(terrascript.Resource):
pass
class aws_service_discovery_service(terrascript.Resource):
pass
class aws_servicequotas_service_quota(terrascript.Resource):
pass
class aws_shield_protection(terrascript.Resource):
pass
class aws_signer_signing_job(terrascript.Resource):
pass
class aws_signer_signing_profile(terrascript.Resource):
pass
class aws_signer_signing_profile_permission(terrascript.Resource):
pass
class aws_simpledb_domain(terrascript.Resource):
pass
class aws_ssm_activation(terrascript.Resource):
pass
class aws_ssm_association(terrascript.Resource):
pass
class aws_ssm_document(terrascript.Resource):
pass
class aws_ssm_maintenance_window(terrascript.Resource):
pass
class aws_ssm_maintenance_window_target(terrascript.Resource):
pass
class aws_ssm_maintenance_window_task(terrascript.Resource):
pass
class aws_ssm_patch_baseline(terrascript.Resource):
pass
class aws_ssm_patch_group(terrascript.Resource):
pass
class aws_ssm_parameter(terrascript.Resource):
pass
class aws_ssm_resource_data_sync(terrascript.Resource):
pass
class aws_ssoadmin_account_assignment(terrascript.Resource):
pass
class aws_ssoadmin_managed_policy_attachment(terrascript.Resource):
pass
class aws_ssoadmin_permission_set(terrascript.Resource):
pass
class aws_ssoadmin_permission_set_inline_policy(terrascript.Resource):
pass
class aws_storagegateway_cache(terrascript.Resource):
pass
class aws_storagegateway_cached_iscsi_volume(terrascript.Resource):
pass
class aws_storagegateway_gateway(terrascript.Resource):
pass
class aws_storagegateway_nfs_file_share(terrascript.Resource):
pass
class aws_storagegateway_smb_file_share(terrascript.Resource):
pass
class aws_storagegateway_stored_iscsi_volume(terrascript.Resource):
pass
class aws_storagegateway_tape_pool(terrascript.Resource):
pass
class aws_storagegateway_upload_buffer(terrascript.Resource):
pass
class aws_storagegateway_working_storage(terrascript.Resource):
pass
class aws_spot_datafeed_subscription(terrascript.Resource):
pass
class aws_spot_instance_request(terrascript.Resource):
pass
class aws_spot_fleet_request(terrascript.Resource):
pass
class aws_sqs_queue(terrascript.Resource):
pass
class aws_sqs_queue_policy(terrascript.Resource):
pass
class aws_snapshot_create_volume_permission(terrascript.Resource):
pass
class aws_sns_platform_application(terrascript.Resource):
pass
class aws_sns_sms_preferences(terrascript.Resource):
pass
class aws_sns_topic(terrascript.Resource):
pass
class aws_sns_topic_policy(terrascript.Resource):
pass
class aws_sns_topic_subscription(terrascript.Resource):
pass
class aws_sfn_activity(terrascript.Resource):
pass
class aws_sfn_state_machine(terrascript.Resource):
pass
class aws_default_subnet(terrascript.Resource):
pass
class aws_subnet(terrascript.Resource):
pass
class aws_swf_domain(terrascript.Resource):
pass
class aws_synthetics_canary(terrascript.Resource):
pass
class aws_transfer_server(terrascript.Resource):
pass
class aws_transfer_ssh_key(terrascript.Resource):
pass
class aws_transfer_user(terrascript.Resource):
pass
class aws_volume_attachment(terrascript.Resource):
pass
class aws_vpc_dhcp_options_association(terrascript.Resource):
pass
class aws_default_vpc_dhcp_options(terrascript.Resource):
pass
class aws_vpc_dhcp_options(terrascript.Resource):
pass
class aws_vpc_peering_connection(terrascript.Resource):
pass
class aws_vpc_peering_connection_accepter(terrascript.Resource):
pass
class aws_vpc_peering_connection_options(terrascript.Resource):
pass
class aws_default_vpc(terrascript.Resource):
pass
class aws_vpc(terrascript.Resource):
pass
class aws_vpc_endpoint(terrascript.Resource):
pass
class aws_vpc_endpoint_connection_notification(terrascript.Resource):
pass
class aws_vpc_endpoint_route_table_association(terrascript.Resource):
pass
class aws_vpc_endpoint_subnet_association(terrascript.Resource):
pass
class aws_vpc_endpoint_service(terrascript.Resource):
pass
class aws_vpc_endpoint_service_allowed_principal(terrascript.Resource):
pass
class aws_vpc_ipv4_cidr_block_association(terrascript.Resource):
pass
class aws_vpn_connection(terrascript.Resource):
pass
class aws_vpn_connection_route(terrascript.Resource):
pass
class aws_vpn_gateway(terrascript.Resource):
pass
class aws_vpn_gateway_attachment(terrascript.Resource):
pass
class aws_vpn_gateway_route_propagation(terrascript.Resource):
pass
class aws_waf_byte_match_set(terrascript.Resource):
pass
class aws_waf_ipset(terrascript.Resource):
pass
class aws_waf_rate_based_rule(terrascript.Resource):
pass
class aws_waf_regex_match_set(terrascript.Resource):
pass
class aws_waf_regex_pattern_set(terrascript.Resource):
pass
class aws_waf_rule(terrascript.Resource):
pass
class aws_waf_rule_group(terrascript.Resource):
pass
class aws_waf_size_constraint_set(terrascript.Resource):
pass
class aws_waf_web_acl(terrascript.Resource):
pass
class aws_waf_xss_match_set(terrascript.Resource):
pass
class aws_waf_sql_injection_match_set(terrascript.Resource):
pass
class aws_waf_geo_match_set(terrascript.Resource):
pass
class aws_wafregional_byte_match_set(terrascript.Resource):
pass
class aws_wafregional_geo_match_set(terrascript.Resource):
pass
class aws_wafregional_ipset(terrascript.Resource):
pass
class aws_wafregional_rate_based_rule(terrascript.Resource):
pass
class aws_wafregional_regex_match_set(terrascript.Resource):
pass
class aws_wafregional_regex_pattern_set(terrascript.Resource):
pass
class aws_wafregional_rule(terrascript.Resource):
pass
class aws_wafregional_rule_group(terrascript.Resource):
pass
class aws_wafregional_size_constraint_set(terrascript.Resource):
pass
class aws_wafregional_sql_injection_match_set(terrascript.Resource):
pass
class aws_wafregional_xss_match_set(terrascript.Resource):
pass
class aws_wafregional_web_acl(terrascript.Resource):
pass
class aws_wafregional_web_acl_association(terrascript.Resource):
pass
class aws_wafv2_ip_set(terrascript.Resource):
pass
class aws_wafv2_regex_pattern_set(terrascript.Resource):
pass
class aws_wafv2_rule_group(terrascript.Resource):
pass
class aws_wafv2_web_acl(terrascript.Resource):
pass
class aws_wafv2_web_acl_association(terrascript.Resource):
pass
class aws_wafv2_web_acl_logging_configuration(terrascript.Resource):
pass
class aws_worklink_fleet(terrascript.Resource):
pass
class aws_worklink_website_certificate_authority_association(terrascript.Resource):
pass
class aws_workspaces_directory(terrascript.Resource):
pass
class aws_workspaces_workspace(terrascript.Resource):
pass
class aws_batch_compute_environment(terrascript.Resource):
pass
class aws_batch_job_definition(terrascript.Resource):
pass
class aws_batch_job_queue(terrascript.Resource):
pass
class aws_pinpoint_app(terrascript.Resource):
pass
class aws_pinpoint_adm_channel(terrascript.Resource):
pass
class aws_pinpoint_apns_channel(terrascript.Resource):
pass
class aws_pinpoint_apns_sandbox_channel(terrascript.Resource):
pass
class aws_pinpoint_apns_voip_channel(terrascript.Resource):
pass
class aws_pinpoint_apns_voip_sandbox_channel(terrascript.Resource):
pass
class aws_pinpoint_baidu_channel(terrascript.Resource):
pass
class aws_pinpoint_email_channel(terrascript.Resource):
pass
class aws_pinpoint_event_stream(terrascript.Resource):
pass
class aws_pinpoint_gcm_channel(terrascript.Resource):
pass
class aws_pinpoint_sms_channel(terrascript.Resource):
pass
class aws_xray_encryption_config(terrascript.Resource):
pass
class aws_xray_group(terrascript.Resource):
pass
class aws_xray_sampling_rule(terrascript.Resource):
pass
class aws_workspaces_ip_group(terrascript.Resource):
pass
class aws_alb(terrascript.Resource):
pass
class aws_lb(terrascript.Resource):
pass
class aws_alb_listener(terrascript.Resource):
pass
class aws_lb_listener(terrascript.Resource):
pass
class aws_alb_listener_certificate(terrascript.Resource):
pass
class aws_lb_listener_certificate(terrascript.Resource):
pass
class aws_alb_listener_rule(terrascript.Resource):
pass
class aws_lb_listener_rule(terrascript.Resource):
pass
class aws_alb_target_group(terrascript.Resource):
pass
class aws_lb_target_group(terrascript.Resource):
pass
class aws_alb_target_group_attachment(terrascript.Resource):
pass
class aws_lb_target_group_attachment(terrascript.Resource):
pass
| 16.746274
| 84
| 0.805965
| 5,536
| 46,069
| 6.328577
| 0.09104
| 0.156872
| 0.451006
| 0.548252
| 0.930412
| 0.926873
| 0.618838
| 0.258855
| 0.080776
| 0.033053
| 0
| 0.002457
| 0.134299
| 46,069
| 2,750
| 85
| 16.752364
| 0.876009
| 0.000434
| 0
| 0.499636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.500364
| 0.000727
| 0
| 0.500364
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
81f56af319908299d321117cf2d619eb092741e4
| 5,036
|
py
|
Python
|
src/genie/libs/parser/iosxe/tests/ShowLispInstanceServerRARDetail/cli/equal/golden_output3_expected.py
|
nielsvanhooy/genieparser
|
9a1955749697a6777ca614f0af4d5f3a2c254ccd
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/iosxe/tests/ShowLispInstanceServerRARDetail/cli/equal/golden_output3_expected.py
|
nielsvanhooy/genieparser
|
9a1955749697a6777ca614f0af4d5f3a2c254ccd
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/iosxe/tests/ShowLispInstanceServerRARDetail/cli/equal/golden_output3_expected.py
|
nielsvanhooy/genieparser
|
9a1955749697a6777ca614f0af4d5f3a2c254ccd
|
[
"Apache-2.0"
] | null | null | null |
expected_output = {
'lisp_id': {
0: {
'instance_id': {
1031: {
'site': 'wired',
'eid': {
'aabb.cc00.c900/48': {
'host_address': ['192.168.3.2','2001:192:168:3::2','FE80::A8BB:CCFF:FE00:C900'],
'first_registered': '05:06:20',
'last_registered': '05:06:20',
'registration_failures': {
'registration_failures' : {
'auth_failures' : 0
}
},
'etr': {
'local' : {
'last_registered' : '05:06:20',
'ttl': '00:01:00',
'xtr_id': 'N/A',
'site_id': 'N/A',
'registered_addr': ['192.168.3.2','2001:192:168:3::2','FE80::A8BB:CCFF:FE00:C900']
}
}
},
'aabb.cc00.ca00/48': {
'host_address': ['192.168.3.3','2001:192:168:3::3','FE80::A8BB:CCFF:FE00:CA00'],
'first_registered': '05:06:20',
'last_registered': '05:06:20',
'registration_failures': {
'registration_failures' : {
'auth_failures' : 0
}
},
'etr': {
'local' : {
'last_registered' : '05:06:20',
'ttl': '00:01:00',
'xtr_id': 'N/A',
'site_id': 'N/A',
'registered_addr': ['192.168.3.3','2001:192:168:3::3','FE80::A8BB:CCFF:FE00:CA00']
}
}
}
}
},
1032: {
'site': 'wired',
'eid': {
'aabb.cc00.cb00/48': {
'host_address': ['192.168.4.2','2001:192:168:4::2','FE80::A8BB:CCFF:FE00:CB00'],
'first_registered': '05:15:05',
'last_registered': '05:10:29',
'registration_failures': {
'registration_failures' : {
'auth_failures' : 0
}
},
'etr': {
'local' : {
'last_registered' : '05:10:29',
'ttl': '00:01:00',
'xtr_id': 'N/A',
'site_id': 'N/A',
'registered_addr': ['192.168.4.2','2001:192:168:4::2','FE80::A8BB:CCFF:FE00:CB00']
}
}
},
'aabb.cc00.cc00/48': {
'host_address': ['192.168.4.3','2001:192:168:4::3','FE80::A8BB:CCFF:FE00:CC00'],
'first_registered': '05:08:32',
'last_registered': '05:08:32',
'registration_failures': {
'registration_failures' : {
'auth_failures' : 0
}
},
'etr': {
'local' : {
'last_registered' : '05:08:32',
'ttl': '00:01:00',
'xtr_id': 'N/A',
'site_id': 'N/A',
'registered_addr': ['192.168.4.3','2001:192:168:4::3','FE80::A8BB:CCFF:FE00:CC00']
}
}
}
}
}
}
}
}
}
| 51.387755
| 120
| 0.231533
| 303
| 5,036
| 3.706271
| 0.171617
| 0.085485
| 0.049866
| 0.11398
| 0.899377
| 0.821015
| 0.774711
| 0.774711
| 0.774711
| 0.774711
| 0
| 0.202648
| 0.655083
| 5,036
| 98
| 121
| 51.387755
| 0.443869
| 0
| 0
| 0.489362
| 0
| 0
| 0.256899
| 0.073059
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
c3167f4f2a4ed38fbb1e3be513412ea3561678db
| 13,127
|
py
|
Python
|
Python/windwardrestapi/Model/Property.py
|
windward-studios/Windward-REST-version-2-Clients
|
8fd467e6f4ece6fcc435609ffb23448d07af3131
|
[
"MIT"
] | null | null | null |
Python/windwardrestapi/Model/Property.py
|
windward-studios/Windward-REST-version-2-Clients
|
8fd467e6f4ece6fcc435609ffb23448d07af3131
|
[
"MIT"
] | 1
|
2020-10-12T20:32:05.000Z
|
2020-10-12T20:38:04.000Z
|
Python/windwardrestapi/Model/Property.py
|
windward-studios/Windward-REST-version-2-Clients
|
8fd467e6f4ece6fcc435609ffb23448d07af3131
|
[
"MIT"
] | null | null | null |
__pyarmor__(__name__, __file__, b'\x50\x59\x41\x52\x4d\x4f\x52\x00\x00\x03\x08\x00\x55\x0d\x0d\x0a\x04\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x40\x00\x00\x00\x88\x0c\x00\x00\x00\x00\x00\x10\x6c\x71\x78\x09\xee\x8c\x8b\x1d\xcc\x73\xa6\x87\xdd\x55\x38\xc8\x00\x00\x00\x00\x00\x00\x00\x00\xf3\x9c\x05\x1e\xb9\x5c\x38\x0d\xa8\x8c\x99\x68\xc8\xf2\x13\x34\x83\x74\x8e\xca\x06\x45\x58\x0a\x4f\x04\x1e\xf8\x2f\x64\x64\x0a\x09\xcf\x7a\x85\x92\x6a\x0c\xcf\xd9\xa9\x4c\x82\x4d\x91\x92\x13\xc5\x54\x9e\x34\x4c\x8a\xd4\xa3\x1e\x7b\x68\xe6\x3b\x01\x9d\xe1\xf6\x2d\xe9\x2b\x82\x7c\x6c\xe0\x7d\xae\x22\x4b\xfd\xd2\xe1\x37\xf7\x8c\xe1\x2a\x80\x8a\xb9\x55\x26\x3d\x4d\x8f\x56\x8d\xdb\xce\x83\x5c\xd4\xb4\x37\xf5\x85\x21\xee\x52\xc6\x4f\xbd\x1e\xbb\x4e\xf6\xd5\xc3\x24\xc9\x83\x5f\x74\x54\x76\x1c\x2b\xff\x83\x81\xde\x51\x76\xc0\x7a\xdd\x8e\x24\x5f\x3a\xff\x3a\x43\x88\x10\xa2\xf6\x77\x14\x00\xbb\xdb\x63\x1b\x18\x5b\x6e\xad\xba\x2e\xb8\x07\x14\x68\x7d\xce\xd1\xbd\xb7\x97\xeb\x39\x3f\xe4\x5d\x56\xf4\xa1\xdf\x63\x7a\xf6\xcd\x3c\xeb\x29\x95\x66\xc0\x54\x34\x71\x5c\x2b\x1d\xd3\x65\xcd\xb6\x24\xc5\x2f\x3d\x17\x3c\x16\x4d\x42\x3d\x88\xc5\xad\x3c\xf4\x56\x75\xce\x0d\x2a\x0e\x05\x22\xc4\xb0\xa8\x93\xe5\xcc\xdb\xb7\x90\x95\x8e\xbd\xe7\x0a\x21\x1d\xbf\x42\x18\x28\x83\xbe\x20\xe5\x15\x4c\xdb\x6d\x9b\xc1\xcd\xaf\x55\x35\x1f\xed\x6d\x64\x13\xcd\x1b\x57\x2e\x9b\x23\xcc\x77\xca\xb2\x67\xc2\xc1\x57\xdd\x14\xe6\xa6\x4d\x4d\xbe\x42\xac\x69\xc6\xf8\x2c\x64\x51\x93\x07\x5c\x87\xda\x7b\x0f\xc9\xe6\xe3\x42\xff\x96\xb9\x0f\xea\x64\x28\xdd\x4b\x6a\xc7\xea\x72\x50\x49\x01\xf1\x4d\x1a\x87\x7b\x66\xfd\xe6\x89\x97\x8f\xde\x81\x87\x86\x4b\x67\x60\xec\x08\x6d\x2f\x6b\x13\xd2\x7f\xaf\x11\x78\xc4\x08\x52\x69\x92\x26\xba\x19\x86\xc6\x6f\x4c\x3e\x45\xea\x65\x8a\x71\xd7\x33\x1d\x82\x11\x7e\x26\x2e\xb5\xe4\xb7\x2d\x54\xba\xce\x32\xb0\x1c\xfd\x01\xce\x80\xad\x55\xd9\xd8\xcf\xe9\x4e\x6b\x3b\x09\x3c\xf7\xcb\x41\x45\xc8\x26\xa5\x1f\x62\xc5\x5d\x0b\x18\xf3\x9e\x29\x46\x54\x4c\x7f\x72\x1f\x7e\xb9\x5e\xe0\xda\x5d\x9d\x80\x7c\xad\x11\xbb\xb9\xa3\x22\x2b\xa0\x51\xf0\x99\x42\x3c\xd3\xb3\xe6\xd1\x18\x64\xf8\x15\x4e\x91\x3e\x7d\x8e\xee\x05\xe7\xf2\x05\x20\x46\x80\x41\x1c\x07\xf1\xee\x41\x1d\x86\x3e\x38\x1c\x7f\xae\xa9\xc6\xf0\x1b\x2b\x5c\x36\x8c\x46\x65\x0e\x07\xbd\x0a\x8b\xb8\x19\x3b\xac\xb7\xbb\xae\x7e\x39\x0d\xb3\xa7\x0f\xf4\xc6\x53\x26\xca\x88\x85\x41\x93\x26\x53\x0f\x22\xe0\xda\xbc\xe2\x3d\xbb\xf9\xcf\x2a\xbd\xa2\x4b\x62\xdf\x8d\xe1\xc9\xfb\x33\xcb\xb1\x9a\xe7\xa7\x15\xb7\x05\x67\x22\x12\x9d\x5c\xb1\x6e\xad\xa6\x20\xe9\x61\xd3\x36\xb9\xa9\xf7\x56\x2c\x08\xfb\x66\x21\x78\xc1\x15\xff\x3b\xc8\xf2\x80\x08\x5d\x50\xf6\x49\x70\x9f\xf0\xfb\xc3\x35\xc3\x47\x0d\xf2\xc8\x6c\x9f\x4b\x4c\xdf\x4d\x7f\xfa\x02\x90\x74\x5f\xd5\xed\xee\xc7\xd5\x3f\x02\x66\xd8\x77\xd1\x08\xd7\x04\x91\xc3\xda\x37\x73\x3c\xba\x33\x27\xcb\xb8\x9d\x8c\x12\x59\x41\x7b\x0e\x20\xde\x07\x72\x18\x54\xdb\x6f\x11\xb8\x0b\xbb\x8b\xc1\x55\x78\x2b\x09\x2e\x3f\x00\x36\x52\x00\xeb\x5c\xc9\x00\xd5\xd3\xe1\x98\xa5\xb0\xd0\x75\xde\xc3\x13\xd2\xbd\x06\xa8\xaa\xe3\xa5\x96\x57\x36\x9d\x64\xcc\xed\x5f\xbc\x36\x8f\xcb\x1e\x7e\x99\x4a\x6c\xd9\x78\x4b\x01\x18\x09\x12\x91\x5b\x01\x79\xf6\x0f\x03\x47\x1f\xca\xc2\xc0\x6e\x6a\xf1\x98\x4b\x74\xe4\x41\x6e\xe7\xff\x3d\x43\x92\xe0\xb0\x7e\xa2\x63\xcd\x6e\xe1\x9d\xae\xc6\xb5\x4b\x38\x7e\xfa\x0c\x4b\x80\x36\x9c\xdd\x98\x6b\xe2\xa0\x22\xfa\x4e\xf4\x78\x29\xee\x17\x38\x1e\x68\xc3\xb4\x23\xde\x99\xbc\x19\xf5\xbc\x3c\x0f\xe3\x47\x59\xd3\x6f\xb6\x0a\x0c\x5c\x41\xcb\x08\xcb\xd6\xaf\xf5\x68\x8f\x49\x28\xe4\x1f\x3c\x84\x74\xf5\x7b\x5f\x71\x44\x64\xa2\xd3\xd1\x31\xd1\x36\xad\xe6\x33\x05\x1a\x69\xe8\xe3\x64\x32\xad\x55\xbe\xad\x85\xab\x55\x0d\x2a\x59\xac\x7e\xab\x7b\xb2\xa2\xd5\x12\xf6\x50\xa6\xa3\x82\x2a\x25\x93\x4b\x5e\x3f\x7c\xe0\x99\x7f\xb4\x90\x43\x0a\xb7\xf9\x5a\x51\x79\xca\x83\x7e\x5f\x4e\xa1\xfe\xe2\x33\x1c\x42\x3e\xf1\x94\xa0\x80\xfa\x8b\x6c\xad\xaf\x32\x33\x7b\xfc\xd8\xde\xb6\x76\x2f\x8a\xe5\xe1\xe4\xfd\x7d\x63\xdb\x1b\xd6\x7b\x76\x18\x9c\x90\x43\x49\x74\xaa\xc6\xc6\x78\x68\x1a\xe7\x9d\x3f\x6a\xc5\x3a\x08\x32\xd5\xa3\x58\xf0\x21\x3c\x14\x74\x5d\x12\xa3\xbe\x0a\xd0\x63\xfb\x1a\xd7\xef\x54\xd4\x1b\x18\x2b\xa4\x9b\x73\xa3\x4e\x42\x40\x47\x11\x46\xcc\xea\x8f\x0e\xd3\x0d\x57\xb4\x90\xde\x91\xf2\x24\x4f\xe8\x06\xcf\x9b\x05\xe5\xae\xb6\x60\x73\xa1\x70\x8f\x63\x78\xc8\x9d\x09\xe1\xe5\xd9\x66\x91\x20\xe2\x65\xbe\x24\x9a\x58\x54\x26\x57\x10\xaf\x5e\xb9\xe5\x1c\x9a\x6b\xd3\x4b\xe6\xa7\x2f\x74\x86\x48\x35\x27\x2f\x67\xd0\xf7\x3e\x1f\x09\x9a\xf1\x8e\x86\x8f\x47\x13\x81\xde\xb1\xb3\xa6\x18\xcc\xa6\xbd\xf4\x9d\xce\x95\xd1\xf1\x74\x21\xf2\x71\xd4\xd3\x84\xe2\xac\xf1\xb3\x71\xa3\x34\x94\xae\xae\x74\x8f\xc0\x48\x92\x0a\xe7\xb6\x42\xc5\x27\x34\x1b\x68\x36\xcc\xe0\xe9\x98\x2b\xee\x46\x32\x92\xc4\x91\xf6\x64\xcd\x10\x82\x41\x45\xfd\x83\xdd\xca\x4d\xae\x24\x26\x95\x59\xe4\x1c\x7c\x37\x2e\x57\x92\x5d\x70\xba\x77\xc9\x01\xf8\xa4\xa4\x44\xc4\x28\x02\x22\xdb\x53\xa1\xff\xe9\xaa\xac\xdd\xa4\xed\x5f\x33\x7c\x91\x69\xfe\x4c\xe7\x69\xbe\x35\x21\x5f\xac\xe2\xea\x78\x75\xb6\x50\xb4\x3f\x59\xa7\x23\xb7\x4b\xf2\x3a\xf7\x33\x9c\xa5\x87\xf4\x89\x75\xc4\x9d\x53\x02\xe5\xe3\x08\x51\x91\xe1\xdb\x1c\xe4\x1c\x45\xc8\xf9\x2a\x7b\x0f\xdb\x8a\xa8\x5a\x6c\xfc\xef\x93\xfc\x0a\x6f\x02\x61\xf2\xc2\x0b\x99\xc3\xbd\x00\x78\x2e\x69\x4e\x7c\xb0\xeb\x33\x2a\x81\xdd\x21\x79\x98\x31\xc0\x7d\xcb\x72\xe2\x20\x0b\x92\x9b\x7b\x3e\x78\x6c\x30\x63\x97\x06\x72\x99\xe2\x7d\xab\x52\x3f\x04\x73\x78\x0a\x50\xed\x5c\x0b\xa5\x48\x6b\xbc\x21\xc5\x72\x6b\x19\xf1\x6d\xb8\x81\x8a\x42\x4f\xb6\xbf\xf7\xd0\x98\x0e\x1b\x8f\xdb\x9f\x4b\x89\xae\x7e\x7f\x27\x01\xfa\x5e\xc3\x57\x5a\x8e\xaf\x2b\x14\x82\x74\xd9\x2c\xab\x6c\xb6\xc4\x64\x53\xd6\x88\xd3\xbf\xcf\xfa\x39\x15\x5f\x6d\xdf\xc7\x6a\x0a\xae\xc4\x49\xdb\xa3\x34\xaa\x68\xc2\x2f\xbb\x03\xe5\xc8\xbd\xf4\x0a\xca\xdc\xd8\x8c\x04\xbd\xf9\x27\xeb\xd2\x58\xc8\xf4\x4a\x72\x58\x41\x3b\x7c\x0a\xbf\xa1\x26\xa9\xdf\xa4\x3a\xf4\xfc\xf0\x05\xe0\x3c\xff\x57\x29\xc9\x82\xc9\x16\xf3\xa0\xeb\xbb\x68\x48\x8b\x3f\x96\xb0\xb4\xbb\x6c\x11\x29\x56\xab\xff\xc3\x29\x92\x04\xa5\xd8\x2d\x6b\xf6\xe2\xae\xd0\x71\x05\x4a\x4b\xc0\x9c\xd0\x40\xde\x5a\xab\xec\xc7\xf6\xd4\xbc\xa1\xb6\xd5\x2f\x1b\x46\x71\x0e\xa0\x5c\x98\x61\x5e\xd0\x3c\xe3\x9d\x40\x84\xf5\x1d\x5d\x78\x39\xcc\x80\x31\xa0\x97\x75\xb3\xf8\x79\x86\xb5\xf6\x55\xd7\xd3\xd9\xb2\x5e\x69\x47\x04\x86\xa3\xf0\xc9\x01\x45\xb4\xa0\x15\x5f\x93\x83\xe5\x82\xf2\x6e\xe6\xe0\xb1\x13\x23\x09\x7e\x67\x24\x64\x69\xe4\xc4\xd6\x58\xf1\x82\x5b\x4a\x5e\x5c\x23\x79\x75\xc2\x22\xad\xd6\xa4\xa8\x51\xdf\xff\x35\x23\x63\x6d\xb2\xe2\x18\xeb\xd2\x9d\xc4\x61\x1d\xa7\x18\xcd\xbf\xbc\xf8\xc7\xbf\x81\x92\x1c\xe2\xee\x6e\xbe\x02\x40\x7b\xd0\x08\xbf\xfa\xbe\xc6\xfe\x73\x28\x72\x1e\x31\x98\x63\xaf\x78\xcf\x64\x6b\xc7\xd6\xe7\xeb\x84\x38\x9e\xd2\xe0\xbb\x29\xf7\x4f\x60\x30\x2b\xfc\x4e\x8d\xdc\x1c\x8d\x73\x2f\xc1\x84\xbc\x02\x89\x15\xef\xf4\x09\x9b\xc7\xad\xf9\x3f\xc9\x74\x1c\x67\x52\x7c\x5a\xdc\x11\x6c\x07\x69\xfc\x40\xa0\x61\xc4\x96\x17\xf5\x99\x4d\xbc\xf7\xe7\x61\xad\xca\x98\x01\x76\xc3\x1b\xc7\x5f\xdd\x15\xa0\x93\x9f\x9e\x90\x2e\xeb\x5c\x41\x51\xda\x96\x85\x1c\xcc\x8f\xaf\x75\x39\x17\xd1\x48\xa0\xe9\x0f\x7d\x78\x28\x1d\x6b\x37\xfa\xab\xd7\x6e\x21\xe6\x5f\x23\x73\x2e\xb9\x26\xe3\x79\x5c\xd6\x1d\x70\xde\xb3\x30\xdd\xa0\x1a\x16\x76\x2d\x8a\xce\x12\x02\xfd\x28\x24\x43\x87\x96\xca\xca\xdf\xf5\x90\x4f\x8d\xb4\xba\x3b\xa8\x7a\xd4\x59\x16\x86\x4a\xa9\xb1\xc9\x04\x44\x69\x56\x31\x37\xed\xd2\x58\x18\x34\xde\x64\xa5\x79\x20\x84\xa9\x85\x0e\x1b\x12\xc5\x00\xa8\xc0\xff\xb3\x49\x49\x71\xde\x1d\xa1\x4d\x08\xf8\x65\x76\x0d\x3d\x75\xfc\x37\x92\x45\xd9\x48\xbf\x89\x42\xa1\xfa\x40\x6f\x1f\x15\xcc\xa1\x4a\x39\x22\x90\xe7\xbb\x3d\x95\xd8\x97\x27\x8c\x58\x0c\xb9\x43\x00\xa7\xcd\x15\xf9\xb6\xc8\x0d\x3d\xc4\x6d\x8c\x18\x16\x7c\xf0\x78\xff\xb3\x85\xf3\x8c\x0e\xf2\x17\xb5\x30\x15\x79\xb2\x61\xf6\x8c\x15\xd2\xe1\x40\xc2\x9c\xdf\x79\x9a\x67\x3f\xdf\xa7\x38\x27\xde\xc1\x86\xca\x20\x4d\x23\x67\x68\x1c\x6a\x5a\x52\xc2\xc0\x81\xb1\x79\xf5\x88\xb1\x82\x5a\xbd\x5b\x82\x34\x6f\xe2\xba\x7a\x6a\x24\xf0\x83\x44\x0a\x9d\x14\x57\x36\x6e\x1a\xbf\x51\x98\xc2\x9d\x2a\xed\xe9\xf1\x4d\x31\xe7\x5f\x11\x56\xeb\x53\x55\x23\x6c\x60\x1d\x5f\xf1\x8d\x6d\x3c\xbd\x55\x7c\x4e\xe1\x1b\xa2\x3b\x99\x88\x96\x52\x01\xef\x8d\x56\x9b\x99\x8b\x3a\xc6\xc0\x69\xfc\x8f\xa7\x71\x3a\x03\x68\x89\x7d\x2e\x3b\x38\x72\x28\x0c\x1e\x7b\x88\x84\x1f\xcb\xec\x59\x7b\xd7\xfc\xf0\x03\xe5\xde\xa2\x15\x70\x7b\x22\x54\x45\xea\x6a\x2b\x6b\x42\x07\x69\x76\xcd\xd4\x75\x21\x94\x33\xa5\xf6\x2e\x9e\x14\x60\xac\x3f\xff\x5b\x01\xd3\x57\x1f\xce\x30\x01\x3c\x10\x3e\x19\xd4\x29\x43\x67\xcb\xb1\x90\xfe\xd8\xe3\xa9\xe5\x20\x6e\x6c\x28\x5a\x80\x3c\xe9\x20\xcd\x53\x97\xd6\xd4\x2d\xbc\xbb\xd2\x52\xdc\x60\xe4\xde\x9b\x3f\xb4\xea\x1f\x0e\x33\x56\x24\x0a\x7e\xb4\x92\xec\x43\xe1\xb5\x3b\xbe\x0b\xc1\xe7\xe9\x86\x74\x95\x6e\xb2\xb3\xc5\xef\xb7\xba\x4d\xac\xdd\xd6\x2c\x54\x5c\x86\x63\x38\x68\x99\xbb\xa2\x4c\x63\x7e\x6e\x96\xf1\xea\x1c\x2b\xdc\x24\xc9\x52\xe3\xde\x29\x17\xf6\x65\xc4\x27\xbe\xb1\x83\x69\x4e\x59\x3d\xfd\x64\xda\x06\x4a\xe9\x11\x69\x09\x24\x10\xdb\x5d\xda\x12\x43\x21\x08\x43\xd1\x2e\xca\x99\xf5\xdf\x49\x16\x4d\xdf\x7e\xa3\x88\x15\x04\x76\xdf\x9c\xce\x60\xec\x94\x74\x1e\x10\x7e\xc1\x55\xdb\xbf\x64\x45\x30\x0f\xa6\x3c\xdb\x2c\x73\xad\x80\x1c\xb5\x23\x5a\x5c\x88\xfa\xab\xe4\x50\xec\xf3\x27\x33\x6d\x2b\xac\x05\xa6\x5e\x68\x53\x9b\x03\x76\x0b\xa7\x9f\xfb\x9a\x5a\x31\xa0\xa8\xfa\xd5\xf6\xfe\x44\x1f\xb5\x97\x2a\x32\x33\x0d\x7d\x5a\x37\xc7\xee\xe0\x9f\xd5\x74\x5d\xc7\xec\xc0\xa8\x3f\xbd\x28\x8e\x93\xce\x90\xe0\x75\xf6\x79\x6b\xd3\x1c\x5d\x3e\x7b\x6e\x24\xb0\x33\xe9\xf3\xc0\x1e\xef\x60\xc5\xfb\xf8\xd0\xc8\x32\x20\x43\x9e\x19\xaf\xad\x8a\x0e\xe9\x0d\xbb\x1e\x7f\xd2\xfc\x09\x9b\x51\x46\x78\x68\x4c\x22\xa9\xe5\xe6\x91\xd3\x10\xa6\xda\x50\x01\x2e\x06\x5b\x0c\x22\x22\xaf\xaa\x24\x30\xe1\x64\xe5\xeb\xc2\x46\x83\x6f\xc8\x22\x30\xd2\x24\xa1\x3f\xdd\x4d\x73\xda\xac\x09\x88\x84\xfc\x5b\x30\xd8\x8c\xa2\x7f\x15\x8a\xec\x37\x82\xf4\x73\xb6\xce\xb6\x21\x5c\xf3\xa8\xd4\x51\xe3\xc1\x31\xb4\x6f\x79\x86\x12\x67\x95\xa3\x60\xdf\x89\x9e\x2a\x84\x0d\x22\x86\x61\x16\x90\x24\x39\x78\xee\xd6\xab\x90\xd4\x1f\x5e\xbc\xa1\xa6\xaa\x24\x84\x2c\x5c\x4d\x36\x0b\x33\x2e\x2d\x75\x5c\xc6\x9d\xf1\x0e\xc2\x11\x53\x40\xec\xae\x59\xb1\x78\x2a\xf4\x3d\x50\xf6\x4e\x55\x1e\x60\x5f\x4f\x0e\xeb\x66\xbc\xcf\xe5\x38\x67\x28\x30\x39\x69\xa6\xee\x10\xec\x80\x8c\xe3\xa5\x4b\x00\xa5\xa3\xa6\x24\xee\x6d\xa2\x04\x40\x3a\x26\x39\xba\x70\xa6\x51\x9d\x99\x83\xf0\xf6\x40\x9a\x5e\x72\x4d\xe9\x70\x5b\x1f\x72\x7a\x6f\xe0\x3d\x03\x66\x2d\x5d\xb9\xc2\x03\x77\xaa\xb6\x2c\x7d\x6d\x28\x6c\x23\x4a\xf9\xb7\xcc\x34\xcc\xaa\x4a\xbe\x74\x34\x5e\x21\xad\x44\x58\x78\xf0\x2b\xab\xf8\xf5\x30\x5f\x64\xb1\x81\xba\x2b\x0e\x5d\x51\x61\x46\xb4\x5a\x18\x9f\xda\xd1\xc8\x93\x57\xf2\x4f\x8c\x4e\x4d\x96\x63\x12\x8a\xe5\xcb\xfd\x46\x74\xb5\xd0\x75\xa6\x08\xe5\xd9\x90\xb0\xec\xf4\x8f\xe6\xc3\xe0\xc1\x14\xba\x89\x23\x31\x7c\x74\xb7\xab\x0c\xc5\x7a\x54\xec\xcf\xef\x31\x6c\x89\x4e\x41\xe3\xef\x34\x49\xdc\x6f\xb4\x92\x60\xce\xd2\x08\xa1\x5e\x39\xf9\xa1\x73\xa5\x9f\x8d\x52\x00\xa5\xa5\x3a\xc0\x32\xf4\x82\xf1\xa8\x85\x7e\x21\x6b\x8e\x1c\x77\xf4\x2d\xb5\x57\x3d\x70\xdd\x87\xcd\x93\x8f\x8c\xa1\x96\xc4\x5b\x03\x3d\x9b\xe3\x10\xf8\xf9\x1a\xf6\x98\xd3\x11\xe7\x4f\xd0\x98\xc1\x1c\x2f\x58\xea\x9c\x9a\x1e\xae\xdd\x42\x0e\xbf\x6e\x4f\x7a\xb9\x05\x93\xc6\xd3\xdf\xa9\x70\x06\xbc\x39\xd1\x30\x55\x27\x78\xe4\x27\x31\x65\x32\x29\xdb\xfd\x3a\xe5\x40\x14\xbb\xa3\x30\x28\xd7\x8b\x38\xee\x7b\x2c\xdc\x75\xa2\x2c\x5f\xef\xd2\x8c\xf6\xa5\x81\x88\x07\x34\x1a\xc5\x24\x3a\x54\xd3\xd4\xf1\x67\xf4\xc3\xb5\x7d\xaf\x13\xa4\xd4\x62\x37\x62\x52\x46\x70\x7b\x46\x38\x69\xf5\x69\xfa\xfc\x6a\xc6\x1b\x13\x57\x3a\xc7\x25\xb3\x69\x31\xf0\x2f\xbf\xfa\x59\xed\xa1\x92\x69\x11\xb5\x54\xf2\xb8\x9a\xc3\x7f\xff\x45\xd1\xff\x9d\x9f\xcd\x0c\xb4\xe1\xb1\xc1\xd5\xf0\x2a\x46\x22\xf7\xc4\x97\xb3\x06\x7f\xcd\xe2\xc4\x77\xfd\xc2\x29\x1d\x59\x44\xed\x3d\x8c\x71\x57\x4d\x58\x07\x57\x04\x56\x2f\x9d\xf9\xb3\x65\x25\x63\x9e\x48\x76\x19\xe9\x08\xf5\x99\x50\xc5\xf2\xac\x4f\xa6\xf1\xc1\x60\xa8\x7c\xe3\x23\xc2\x9e\x89\x62\x9a\x41\xf3\xa2\x7e\x5d\x0c\x58\x79\xe7\x35\xca\x8b\x76\x62\xd8\xa6\x72\xac\x3f\xcc\x8a\x3b\x3b\x89\x70\x8b\xf0\x9b\x84\x7c\xbb\xe8\x3e\x7b\x20\x8d\xfc\x1f\xd8\x85\xf2\xb7\x07\xa0\xf5\xfa\x58\xc4\xd5\xf0\x01\x7e\x8c\xa5\xe3\x0c\xc5\x43\x24\x35\xe7\xef\xf3\x8d\x77\xd9\x32\x16\x3b\xe3\x03\xb6\x19\x9b\x23\xae\x35\xb8\xe1\x1c\xfc\x6a\x2f\xa1\x5e\xcd\x3b\xb2\xb0\xde\xd8\xc2\xff\x18\x57\x71\xa3\xf4\x62\xd5\x7b\x64\x9b\x78\x5d\xe7\x5b\xc0\xc1\x04\x6a\x6f\x25\xb0\xb9\x84\xbe\x9a\x2f\x83\x03\x87\xad\x6a\x61\x57\xaf\xec\xc4\xb3\xee\x05\x9f\xf8\x95\xbf\x79\x78\x8a\x16\x16\x7c\x28\x7b\x22\xfd\xdc\xdd\x77\xf7\x6d\x40\xbe\x85\xc4\xca\x88\x4b\x68\x35\x28\x4a\x34\xe1\x46\x13\x7c\xa0\x45\xf2\xbc\xb9\xc5\xbd\xde\xb2\xc3\x94\x44\x8d\x71\xb3\x1a\x57\xce\x88\xb2\x3e\xec\x76\x80\x95\x27\x88\x05\x82\xf5\x01\xea\x63\x72\xc8\xf1\x03\x5c\x04\x61\xba\x66\x14\xfa\xcf\x87\xa4\xaa\x0f\x40\xd5\x6f\x96\x4f\x8b\xc0\x96\xf1\xad\x5a\x39\xcd\x59\x0c\x68\x46\x86\x87\x91\x12\xbd\x26\x30\xd5\xc1\x85\x56\x8a\x33\x79\x39\x48\x41\x7c\xb0\x80\x5f\x68\x74\x65\x57\xfe\x93\x09\x7c\xba\x12\x9a\x22\x2b\xd8\x99\xf5\x98\x29\xea\xb2\x8a\x6b\xc5\x43\xfe\x12\x41\x84\xcb\x3c\x76\xec\x1f\x67\x3e\x75\x3a\x3e\x69\x2e\x80\xb6\x71\x1d\x61\xfe\xfa\xf2\x8d\xaa\xe1\xe9\x75\x6f', 2)
| 13,127
| 13,127
| 0.749981
| 3,277
| 13,127
| 3.00061
| 0.079646
| 0.013424
| 0.014645
| 0.013424
| 0.006102
| 0.004576
| 0.004576
| 0
| 0
| 0
| 0
| 0.310347
| 0.000229
| 13,127
| 1
| 13,127
| 13,127
| 0.438891
| 0
| 0
| 0
| 0
| 1
| 0.996953
| 0.996953
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
c36c1de598e23c27eb94909ba79870c758794b99
| 114
|
py
|
Python
|
integration/tests/assert_regex.py
|
youhavethewrong/hurl
|
91cc14882a5f1ef7fa86be09a9f5581cef680559
|
[
"Apache-2.0"
] | 1,013
|
2020-08-27T12:38:48.000Z
|
2022-03-31T23:12:23.000Z
|
integration/tests/assert_regex.py
|
youhavethewrong/hurl
|
91cc14882a5f1ef7fa86be09a9f5581cef680559
|
[
"Apache-2.0"
] | 217
|
2020-08-31T11:18:10.000Z
|
2022-03-30T17:50:30.000Z
|
integration/tests/assert_regex.py
|
youhavethewrong/hurl
|
91cc14882a5f1ef7fa86be09a9f5581cef680559
|
[
"Apache-2.0"
] | 54
|
2020-09-02T09:41:06.000Z
|
2022-03-19T15:33:05.000Z
|
# coding=utf-8
from tests import app
@app.route("/assert-regex")
def assert_regex():
return 'Hello World!'
| 12.666667
| 27
| 0.684211
| 17
| 114
| 4.529412
| 0.823529
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010526
| 0.166667
| 114
| 8
| 28
| 14.25
| 0.8
| 0.105263
| 0
| 0
| 0
| 0
| 0.252525
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| true
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
6f10f2e1211795dfd296c68da82887ac6acefe63
| 104
|
py
|
Python
|
cygraphblas_ss/lib/constants/__init__.py
|
eriknw/cygraphblas
|
81ae37591ec38aa698d5f37716464a6c366076f9
|
[
"Apache-2.0"
] | 3
|
2020-09-03T21:47:25.000Z
|
2021-08-06T20:24:19.000Z
|
cygraphblas_ss/lib/constants/__init__.py
|
eriknw/cygraphblas
|
81ae37591ec38aa698d5f37716464a6c366076f9
|
[
"Apache-2.0"
] | null | null | null |
cygraphblas_ss/lib/constants/__init__.py
|
eriknw/cygraphblas
|
81ae37591ec38aa698d5f37716464a6c366076f9
|
[
"Apache-2.0"
] | 2
|
2020-09-03T21:47:52.000Z
|
2021-08-06T20:24:20.000Z
|
from . import desc_field, desc_value, format_value, info, mode, option_field, print_level, thread_model
| 52
| 103
| 0.817308
| 16
| 104
| 4.9375
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105769
| 104
| 1
| 104
| 104
| 0.849462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
48d97cbdd7dd29afffce5a6cbb85bd414e3f28e4
| 135
|
py
|
Python
|
graphsage/__init__.py
|
a-bean-sprout/GraphSAGE_commit
|
aee05d4e5e15ff305f586e333a80f3d64f140a45
|
[
"MIT"
] | 7
|
2020-09-16T04:25:16.000Z
|
2021-06-18T02:20:12.000Z
|
graphsage/__init__.py
|
a-bean-sprout/GraphSAGE_commit
|
aee05d4e5e15ff305f586e333a80f3d64f140a45
|
[
"MIT"
] | null | null | null |
graphsage/__init__.py
|
a-bean-sprout/GraphSAGE_commit
|
aee05d4e5e15ff305f586e333a80f3d64f140a45
|
[
"MIT"
] | 3
|
2021-06-21T09:24:27.000Z
|
2021-11-01T01:31:01.000Z
|
# -*- coding: UTF-8 -*-
from __future__ import print_function # 规定即使在python2 print需加括号使用 即:与python3对齐
from __future__ import division
| 33.75
| 78
| 0.785185
| 16
| 135
| 6.0625
| 0.8125
| 0.206186
| 0.329897
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025641
| 0.133333
| 135
| 3
| 79
| 45
| 0.803419
| 0.437037
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 8
|
5b07d1330970bcd3a29a69643a49bf8c5eb9f3ec
| 153
|
py
|
Python
|
src/ssp/spark/streaming/nlp/__init__.py
|
gyan42/spark-streaming-playground
|
147ef9cbc31b7aed242663dee36143ebf0e8043f
|
[
"Apache-2.0"
] | 10
|
2020-03-12T11:51:46.000Z
|
2022-03-24T04:56:05.000Z
|
src/ssp/spark/streaming/nlp/__init__.py
|
gyan42/spark-streaming-playground
|
147ef9cbc31b7aed242663dee36143ebf0e8043f
|
[
"Apache-2.0"
] | 12
|
2020-04-23T07:28:14.000Z
|
2022-03-12T00:20:24.000Z
|
src/ssp/spark/streaming/nlp/__init__.py
|
gyan42/spark-streaming-playground
|
147ef9cbc31b7aed242663dee36143ebf0e8043f
|
[
"Apache-2.0"
] | 1
|
2020-04-20T14:48:38.000Z
|
2020-04-20T14:48:38.000Z
|
from ssp.spark.streaming.nlp.spark_dl_text_classification import SreamingTextClassifier
from ssp.spark.streaming.nlp.ner_extraction import NerExtraction
| 51
| 87
| 0.895425
| 20
| 153
| 6.65
| 0.65
| 0.105263
| 0.180451
| 0.315789
| 0.360902
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.052288
| 153
| 2
| 88
| 76.5
| 0.917241
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d287c0d5eb41ae796a8b1ef8da90978f9a667f89
| 32,983
|
py
|
Python
|
geokey/contributions/tests/comments/test_views.py
|
universityofsussex/geokey
|
25e161dbc81841c57c148053dbe99facc81e84b8
|
[
"Apache-2.0"
] | null | null | null |
geokey/contributions/tests/comments/test_views.py
|
universityofsussex/geokey
|
25e161dbc81841c57c148053dbe99facc81e84b8
|
[
"Apache-2.0"
] | null | null | null |
geokey/contributions/tests/comments/test_views.py
|
universityofsussex/geokey
|
25e161dbc81841c57c148053dbe99facc81e84b8
|
[
"Apache-2.0"
] | null | null | null |
"""Tests for views of contributions (comments)."""
import json
from django.test import TestCase
from django.contrib.auth.models import AnonymousUser
from django.core.urlresolvers import reverse
from django.core.exceptions import PermissionDenied
from nose.tools import raises
from rest_framework.test import APITestCase
from rest_framework.test import APIRequestFactory, force_authenticate
from rest_framework import status
from rest_framework.renderers import JSONRenderer
from geokey.projects.tests.model_factories import UserFactory, ProjectFactory
from geokey.projects.models import Project
from geokey.contributions.models import Comment, Observation
from geokey.users.models import User
from geokey.users.tests.model_factories import UserGroupFactory
from ..model_factories import ObservationFactory, CommentFactory
from geokey.contributions.views.comments import (
CommentsAPIView,
SingleCommentAPIView,
CommentAbstractAPIView
)
class CommentAbstractAPIViewTest(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
self.admin = UserFactory.create()
self.creator = UserFactory.create()
self.moderator = UserFactory.create()
self.commenter = UserFactory.create()
self.project = ProjectFactory(
add_admins=[self.admin],
add_contributors=[self.creator, self.commenter]
)
self.moderators = UserGroupFactory(add_users=[self.moderator], **{
'project': self.project,
'can_moderate': True
})
self.contribution = ObservationFactory.create(**{
'project': self.project,
'creator': self.creator,
'status': 'active'
})
self.comment = CommentFactory.create(**{
'commentto': self.contribution,
'creator': self.commenter
})
def render(self, response):
response.accepted_renderer = JSONRenderer()
response.accepted_media_type = 'application/json'
response.renderer_context = {'blah': 'blubb'}
return response.render()
def test_create_comment_with_admin(self):
url = reverse('api:project_comments', kwargs={
'project_id': self.project.id,
'contribution_id': self.contribution.id
})
request = self.factory.post(url, {'text': 'Comment'})
request.user = self.admin
request.data = {'text': 'Comment'}
view = CommentAbstractAPIView()
response = self.render(
view.create_and_respond(request, self.contribution)
)
self.assertEqual(json.loads(response.content).get('text'), 'Comment')
def test_create_reviewcomment_with_admin(self):
url = reverse('api:project_comments', kwargs={
'project_id': self.project.id,
'contribution_id': self.contribution.id
})
request = self.factory.post(
url, {'text': 'Comment', 'review_status': 'open'}
)
request.user = self.admin
request.data = {'text': 'Comment', 'review_status': 'open'}
view = CommentAbstractAPIView()
response = self.render(
view.create_and_respond(request, self.contribution)
)
self.assertEqual(json.loads(response.content).get('text'), 'Comment')
ref = Observation.objects.get(pk=self.contribution.id)
self.assertEqual(ref.status, 'review')
def test_create_reviewcomment_to_empty_obs_with_admin(self):
self.contribution.properties = {}
self.contribution.save()
url = reverse('api:project_comments', kwargs={
'project_id': self.project.id,
'contribution_id': self.contribution.id
})
request = self.factory.post(
url, {'text': 'Comment', 'review_status': 'open'}
)
request.user = self.admin
request.data = {'text': 'Comment', 'review_status': 'open'}
view = CommentAbstractAPIView()
response = self.render(
view.create_and_respond(request, self.contribution)
)
self.assertEqual(json.loads(response.content).get('text'), 'Comment')
ref = Observation.objects.get(pk=self.contribution.id)
self.assertEqual(ref.status, 'review')
def test_update_comment_with_admin(self):
url = reverse('api:project_single_comment', kwargs={
'project_id': self.project.id,
'contribution_id': self.contribution.id,
'comment_id': self.comment.id
})
request = self.factory.patch(url, {'text': 'Updated'})
request.user = self.admin
request.data = {'text': 'Updated'}
view = CommentAbstractAPIView()
response = self.render(
view.update_and_respond(request, self.contribution, self.comment)
)
self.assertEqual(json.loads(response.content).get('text'), 'Updated')
self.assertEqual(
Comment.objects.get(pk=self.comment.id).text,
'Updated'
)
def test_update_comment_with_commenter(self):
url = reverse('api:project_single_comment', kwargs={
'project_id': self.project.id,
'contribution_id': self.contribution.id,
'comment_id': self.comment.id
})
request = self.factory.patch(url, {'text': 'Updated'})
request.user = self.commenter
request.data = {'text': 'Updated'}
view = CommentAbstractAPIView()
response = self.render(
view.update_and_respond(request, self.contribution, self.comment)
)
self.assertEqual(json.loads(response.content).get('text'), 'Updated')
self.assertEqual(
Comment.objects.get(pk=self.comment.id).text,
'Updated'
)
def test_update_comment_with_moderator(self):
url = reverse('api:project_single_comment', kwargs={
'project_id': self.project.id,
'contribution_id': self.contribution.id,
'comment_id': self.comment.id
})
request = self.factory.patch(url, {'text': 'Updated'})
request.user = self.moderator
request.data = {'text': 'Updated'}
view = CommentAbstractAPIView()
response = self.render(
view.update_and_respond(request, self.contribution, self.comment)
)
self.assertEqual(json.loads(response.content).get('text'), 'Updated')
self.assertEqual(
Comment.objects.get(pk=self.comment.id).text,
'Updated'
)
@raises(PermissionDenied)
def test_update_comment_with_creator(self):
url = reverse('api:project_single_comment', kwargs={
'project_id': self.project.id,
'contribution_id': self.contribution.id,
'comment_id': self.comment.id
})
request = self.factory.patch(url, {'text': 'Updated'})
request.user = self.creator
request.data = {'text': 'Updated'}
view = CommentAbstractAPIView()
response = self.render(
view.update_and_respond(request, self.contribution, self.comment)
)
self.assertEqual(
json.loads(response.content).get('text'),
self.comment.text
)
self.assertEqual(
Comment.objects.get(pk=self.comment.id).text,
self.comment.text
)
def test_update_invalid_comment(self):
self.project.isprivate = False
self.project.save()
url = reverse('api:project_single_comment', kwargs={
'project_id': self.project.id,
'contribution_id': self.contribution.id,
'comment_id': self.comment.id
})
request = self.factory.patch(
url, {'text': 'Updated', 'review_status': 'blah'}
)
force_authenticate(request, user=self.commenter)
view = SingleCommentAPIView.as_view()
response = view(
request,
project_id=self.project.id,
contribution_id=self.contribution.id,
comment_id=self.comment.id
).render()
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
class CommentAbstractAPIViewResolveTest(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
self.admin = UserFactory.create()
self.creator = UserFactory.create()
self.moderator = UserFactory.create()
self.commenter = UserFactory.create()
self.project = ProjectFactory(
add_admins=[self.admin],
add_contributors=[self.creator, self.commenter]
)
self.moderators = UserGroupFactory(add_users=[self.moderator], **{
'project': self.project,
'can_moderate': True
})
self.contribution = ObservationFactory.create(**{
'project': self.project,
'creator': self.creator,
'status': 'review'
})
self.comment = CommentFactory.create(**{
'commentto': self.contribution,
'creator': self.commenter,
'review_status': 'open'
})
def render(self, response):
response.accepted_renderer = JSONRenderer()
response.accepted_media_type = 'application/json'
response.renderer_context = {'blah': 'blubb'}
return response.render()
def test_resolve_comment_with_admin(self):
url = reverse('api:project_single_comment', kwargs={
'project_id': self.project.id,
'contribution_id': self.contribution.id,
'comment_id': self.comment.id
})
request = self.factory.patch(url, {'text': 'Updated'})
request.user = self.admin
request.data = {'review_status': 'resolved'}
view = CommentAbstractAPIView()
response = self.render(
view.update_and_respond(request, self.contribution, self.comment)
)
self.assertEqual(
json.loads(response.content).get('review_status'),
'resolved'
)
reference = Observation.objects.get(pk=self.contribution.id)
self.assertEqual(reference.status, 'active')
self.assertIsNotNone(reference.properties)
def test_resolve_comment_with_invalid_review_status(self):
url = reverse('api:project_single_comment', kwargs={
'project_id': self.project.id,
'contribution_id': self.contribution.id,
'comment_id': self.comment.id
})
request = self.factory.patch(url, {'text': 'Updated'})
request.user = self.admin
request.data = {'review_status': 'closed'}
view = CommentAbstractAPIView()
response = self.render(
view.update_and_respond(request, self.contribution, self.comment)
)
ref = Comment.objects.get(pk=self.comment.id)
self.assertEqual(response.status_code, 400)
self.assertEqual(ref.review_status, 'open')
def test_resolve_one_of_two_comment_with_admin(self):
CommentFactory.create(**{
'commentto': self.contribution,
'creator': self.creator,
'review_status': 'open'
})
url = reverse('api:project_single_comment', kwargs={
'project_id': self.project.id,
'contribution_id': self.contribution.id,
'comment_id': self.comment.id
})
request = self.factory.patch(url, {'text': 'Updated'})
request.user = self.admin
request.data = {'review_status': 'resolved'}
view = CommentAbstractAPIView()
response = self.render(
view.update_and_respond(request, self.contribution, self.comment)
)
self.assertEqual(
json.loads(response.content).get('review_status'),
'resolved'
)
self.assertEqual(
Observation.objects.get(pk=self.contribution.id).status,
'review'
)
def test_resolve_comment_with_moderator(self):
url = reverse('api:project_single_comment', kwargs={
'project_id': self.project.id,
'contribution_id': self.contribution.id,
'comment_id': self.comment.id
})
request = self.factory.patch(url, {'text': 'Updated'})
request.user = self.moderator
request.data = {'review_status': 'resolved'}
view = CommentAbstractAPIView()
response = self.render(
view.update_and_respond(request, self.contribution, self.comment)
)
self.assertEqual(
json.loads(response.content).get('review_status'),
'resolved'
)
self.assertEqual(
Observation.objects.get(pk=self.contribution.id).status,
'active'
)
@raises(PermissionDenied)
def test_resolve_comment_with_creator(self):
url = reverse('api:project_single_comment', kwargs={
'project_id': self.project.id,
'contribution_id': self.contribution.id,
'comment_id': self.comment.id
})
request = self.factory.patch(url, {'text': 'Updated'})
request.user = self.creator
request.data = {'review_status': 'resolved'}
view = CommentAbstractAPIView()
view.update_and_respond(request, self.contribution, self.comment)
@raises(PermissionDenied)
def test_resolve_comment_with_commenter(self):
url = reverse('api:project_single_comment', kwargs={
'project_id': self.project.id,
'contribution_id': self.contribution.id,
'comment_id': self.comment.id
})
request = self.factory.patch(url, {'text': 'Updated'})
request.user = self.commenter
request.data = {'review_status': 'resolved'}
view = CommentAbstractAPIView()
view.update_and_respond(request, self.contribution, self.comment)
@raises(PermissionDenied)
def test_resolve_comment_with_anonymous(self):
url = reverse('api:project_single_comment', kwargs={
'project_id': self.project.id,
'contribution_id': self.contribution.id,
'comment_id': self.comment.id
})
request = self.factory.patch(url, {'text': 'Updated'})
request.user = AnonymousUser()
request.data = {'review_status': 'resolved'}
view = CommentAbstractAPIView()
view.update_and_respond(request, self.contribution, self.comment)
class SingleCommentAPIViewTest(TestCase):
def setUp(self):
self.admin = UserFactory.create()
self.creator = UserFactory.create()
self.project = ProjectFactory(
add_admins=[self.admin],
add_contributors=[self.creator]
)
self.contribution = ObservationFactory.create(**{
'project': self.project,
'creator': self.creator
})
def test_get_contribution_with_admin(self):
view = SingleCommentAPIView()
contribution = view.get_contribution(
self.admin, self.project.id, self.contribution.id)
self.assertEqual(contribution, self.contribution)
def test_get_contribution_with_creator(self):
view = SingleCommentAPIView()
view.get_contribution(
self.creator,
self.project.id,
self.contribution.id
)
@raises(Project.DoesNotExist)
def test_get_contribution_with_some_dude(self):
some_dude = UserFactory.create()
view = SingleCommentAPIView()
view.get_contribution(
some_dude,
self.project.id,
self.contribution.id
)
class GetProjectComments(APITestCase):
def setUp(self):
self.contributor = UserFactory.create()
self.admin = UserFactory.create()
self.non_member = UserFactory.create()
self.project = ProjectFactory(
add_admins=[self.admin],
add_contributors=[self.contributor]
)
self.contribution = ObservationFactory.create(**{
'project': self.project,
'creator': self.contributor
})
comment = CommentFactory.create(**{
'commentto': self.contribution
})
response = CommentFactory.create(**{
'commentto': self.contribution,
'respondsto': comment
})
CommentFactory.create(**{
'commentto': self.contribution,
'respondsto': response
})
CommentFactory.create(**{
'commentto': self.contribution,
'respondsto': comment
})
CommentFactory.create(**{
'commentto': self.contribution
})
def get_response(self, user):
factory = APIRequestFactory()
request = factory.get(
'/api/projects/%s/contributions/%s/comments/' %
(self.project.id, self.contribution.id)
)
force_authenticate(request, user=user)
view = CommentsAPIView.as_view()
return view(
request,
project_id=self.project.id,
contribution_id=self.contribution.id
).render()
def test_get_comments_with_admin(self):
response = self.get_response(self.admin)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_get_comments_with_contributor(self):
response = self.get_response(self.contributor)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_get_comments_with_non_member(self):
response = self.get_response(self.non_member)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
class AddCommentToPrivateProjectTest(APITestCase):
def setUp(self):
self.contributor = UserFactory.create()
self.admin = UserFactory.create()
self.non_member = UserFactory.create()
self.project = ProjectFactory(
add_admins=[self.admin],
add_contributors=[self.contributor]
)
self.contribution = ObservationFactory.create(**{
'project': self.project,
'creator': self.contributor
})
def get_response(self, user):
factory = APIRequestFactory()
request = factory.post(
'/api/projects/%s/contributions/%s/comments/' %
(self.project.id, self.contribution.id),
{'text': 'A comment to the contribution.'}
)
force_authenticate(request, user=user)
view = CommentsAPIView.as_view()
return view(
request,
project_id=self.project.id,
contribution_id=self.contribution.id
).render()
def test_add_comment_to_contribution_with_admin(self):
response = self.get_response(self.admin)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_add_comment_to_contribution_with_contributor(self):
response = self.get_response(self.contributor)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_add_review_comment_to_contribution_with_contributor(self):
factory = APIRequestFactory()
request = factory.post(
'/api/projects/%s/contributions/%s/comments/' %
(self.project.id, self.contribution.id),
{
'text': 'A review comment to the contribution.',
'review_status': 'open'
}
)
force_authenticate(request, user=self.contributor)
view = CommentsAPIView.as_view()
response = view(
request,
project_id=self.project.id,
contribution_id=self.contribution.id
).render()
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(
Observation.objects.get(pk=self.contribution.id).status,
'review'
)
def test_add_closed_review_comment_to_contribution_with_contributor(self):
factory = APIRequestFactory()
request = factory.post(
'/api/projects/%s/contributions/%s/comments/' %
(self.project.id, self.contribution.id),
{
'text': 'A review comment to the contribution.',
'review_status': 'resolved'
}
)
force_authenticate(request, user=self.contributor)
view = CommentsAPIView.as_view()
response = view(
request,
project_id=self.project.id,
contribution_id=self.contribution.id
).render()
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(
Observation.objects.get(pk=self.contribution.id).status,
'active'
)
def test_add_comment_to_contribution_with_non_member(self):
response = self.get_response(self.non_member)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_add_comment_to_draft(self):
self.contribution.status = 'draft'
self.contribution.save()
response = self.get_response(self.contribution.creator)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
class AddCommentToPublicProjectTest(APITestCase):
def setUp(self):
self.contributor = UserFactory.create()
self.admin = UserFactory.create()
self.non_member = UserFactory.create()
self.project = ProjectFactory(
add_admins=[self.admin],
add_contributors=[self.contributor],
**{'isprivate': False}
)
self.contribution = ObservationFactory.create(**{
'project': self.project,
'creator': self.contributor
})
def get_response(self, user):
if user.is_anonymous and not User.objects.filter(
display_name='AnonymousUser').exists():
UserFactory.create(display_name='AnonymousUser')
factory = APIRequestFactory()
request = factory.post(
'/api/projects/%s/maps/all-contributions/%s/comments/' %
(self.project.id, self.contribution.id),
{'text': 'A comment to the contribution.'}
)
force_authenticate(request, user=user)
view = CommentsAPIView.as_view()
return view(
request,
project_id=self.project.id,
contribution_id=self.contribution.id
).render()
def test_add_comment_to_contribution_with_admin(self):
response = self.get_response(self.admin)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_add_comment_to_contribution_with_contributor(self):
response = self.get_response(self.contributor)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_add_comment_to_contribution_with_non_member(self):
response = self.get_response(self.non_member)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_add_comment_to_contribution_with_anonymous(self):
response = self.get_response(AnonymousUser())
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
class AddCommentToWrongProjectContribution(APITestCase):
def test(self):
admin = UserFactory.create()
project = ProjectFactory(add_admins=[admin])
contribution = ObservationFactory.create()
factory = APIRequestFactory()
request = factory.post(
'/api/projects/%s/contributions/%s/comments/' %
(project.id, contribution.id),
{'text': 'A comment to the contribution.'}
)
force_authenticate(request, user=admin)
view = CommentsAPIView.as_view()
response = view(
request,
project_id=project.id,
contribution_id=contribution.id
).render()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
class AddResponseToProjectCommentTest(APITestCase):
def test(self):
admin = UserFactory.create()
project = ProjectFactory(add_admins=[admin])
contribution = ObservationFactory.create(**{
'project': project
})
comment = CommentFactory.create(**{
'commentto': contribution
})
factory = APIRequestFactory()
request = factory.post(
'/api/projects/%s/contributions/%s/comments/' %
(project.id, contribution.id),
{
'text': 'Response to a comment',
'respondsto': comment.id
}
)
force_authenticate(request, user=admin)
view = CommentsAPIView.as_view()
response = view(
request,
project_id=project.id,
contribution_id=contribution.id
).render()
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(
json.loads(response.content).get('respondsto'),
comment.id
)
class AddResponseToWrongProjectCommentTest(APITestCase):
def test(self):
admin = UserFactory.create()
project = ProjectFactory(add_admins=[admin])
contribution = ObservationFactory.create(**{
'project': project
})
comment = CommentFactory.create()
factory = APIRequestFactory()
request = factory.post(
'/api/projects/%s/contributions/%s/comments/' %
(project.id, contribution.id),
{
'text': 'Response to a comment',
'respondsto': comment.id
}
)
force_authenticate(request, user=admin)
view = CommentsAPIView.as_view()
response = view(
request,
project_id=project.id,
contribution_id=contribution.id
).render()
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(
json.loads(response.content).get('error'),
'The comment you try to respond to is not a comment to the '
'contribution.'
)
class DeleteProjectCommentTest(APITestCase):
def setUp(self):
self.contributor = UserFactory.create()
self.admin = UserFactory.create()
self.non_member = UserFactory.create()
self.project = ProjectFactory(
add_admins=[self.admin],
add_contributors=[self.contributor],
**{'isprivate': False}
)
self.contribution = ObservationFactory.create(**{
'project': self.project,
'creator': self.contributor
})
self.comment = CommentFactory.create(**{
'commentto': self.contribution
})
self.comment_to_remove = CommentFactory.create(**{
'commentto': self.contribution,
'creator': self.contributor
})
def get_response(self, user, comment_to_remove=None):
if not comment_to_remove:
comment_to_remove = self.comment_to_remove
factory = APIRequestFactory()
request = factory.delete(
'/api/projects/%s/contributions/%s/comments/%s/' %
(self.project.id, self.contribution.id, comment_to_remove.id),
{'text': 'A comment to the contribution.'}
)
force_authenticate(request, user=user)
view = SingleCommentAPIView.as_view()
return view(
request,
project_id=self.project.id,
contribution_id=self.contribution.id,
comment_id=comment_to_remove.id
).render()
def test_delete_comment_with_anonymous(self):
response = self.get_response(AnonymousUser())
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
contribution = Observation.objects.get(pk=self.contribution.id)
self.assertIn(self.comment_to_remove, contribution.comments.all())
def test_delete_comment_with_admin(self):
response = self.get_response(self.admin)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
contribution = Observation.objects.get(pk=self.contribution.id)
self.assertIn(self.comment, contribution.comments.all())
self.assertNotIn(self.comment_to_remove, contribution.comments.all())
def test_delete_anonymous_comment_with_comment_creator(self):
comment_to_remove = CommentFactory.create(**{
'commentto': self.contribution,
'creator': User.objects.get(display_name='AnonymousUser')
})
response = self.get_response(AnonymousUser(), comment_to_remove)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
contribution = Observation.objects.get(pk=self.contribution.id)
self.assertIn(comment_to_remove, contribution.comments.all())
def test_delete_comment_with_comment_creator(self):
response = self.get_response(self.contributor)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
contribution = Observation.objects.get(pk=self.contribution.id)
self.assertIn(self.comment, contribution.comments.all())
self.assertNotIn(self.comment_to_remove, contribution.comments.all())
def test_delete_review_comment_with_comment_creator(self):
self.comment_to_remove.review_status = 'open'
self.comment_to_remove.save()
self.contribution.status = 'review'
self.contribution.save()
response = self.get_response(self.contributor)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
contribution = Observation.objects.get(pk=self.contribution.id)
self.assertEqual(contribution.status, 'active')
self.assertIn(self.comment, contribution.comments.all())
self.assertNotIn(self.comment_to_remove, contribution.comments.all())
def test_delete_comment_but_not_change_status_from_pending(self):
self.contribution.status = 'pending'
self.contribution.save()
response = self.get_response(self.contributor)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
contribution = Observation.objects.get(pk=self.contribution.id)
self.assertEqual(contribution.status, 'pending')
def test_delete_comment_and_change_status_from_review(self):
self.contribution.status = 'review'
self.contribution.save()
response = self.get_response(self.contributor)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
contribution = Observation.objects.get(pk=self.contribution.id)
self.assertEqual(contribution.status, 'active')
def test_delete_one_review_comment_with_comment_creator(self):
self.comment.review_status = 'open'
self.comment.save()
self.comment_to_remove.review_status = 'open'
self.comment_to_remove.save()
self.contribution.status = 'review'
self.contribution.save()
response = self.get_response(self.contributor)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
contribution = Observation.objects.get(pk=self.contribution.id)
self.assertEqual(contribution.status, 'review')
self.assertIn(self.comment, contribution.comments.all())
self.assertNotIn(self.comment_to_remove, contribution.comments.all())
def test_resolve_nested_comment_with_admin(self):
self.comment.respondsto = self.comment_to_remove
self.comment.review_status = 'open'
self.comment.save()
self.comment_to_remove.review_status = None
self.comment_to_remove.save()
self.contribution.status = 'review'
self.contribution.save()
response = self.get_response(self.admin)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(
Observation.objects.get(pk=self.contribution.id).status,
'active'
)
class DeleteWrongProjectComment(APITestCase):
def test(self):
admin = UserFactory.create()
project = ProjectFactory(add_admins=[admin])
contribution = ObservationFactory.create(**{
'project': project
})
comment = CommentFactory.create()
factory = APIRequestFactory()
request = factory.delete(
'/api/projects/%s/contributions/%s/comments/%s/' %
(project.id, contribution.id, comment.id),
{'text': 'A comment to the contribution.'}
)
force_authenticate(request, user=admin)
view = SingleCommentAPIView.as_view()
response = view(
request,
project_id=project.id,
contribution_id=contribution.id,
comment_id=comment.id
).render()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
| 36.485619
| 78
| 0.631507
| 3,293
| 32,983
| 6.144549
| 0.054965
| 0.075121
| 0.041811
| 0.030641
| 0.881289
| 0.85045
| 0.839923
| 0.827617
| 0.814915
| 0.804685
| 0
| 0.003441
| 0.259831
| 32,983
| 903
| 79
| 36.526024
| 0.82538
| 0.001334
| 0
| 0.75547
| 0
| 0
| 0.091337
| 0.022986
| 0
| 0
| 0
| 0
| 0.088803
| 1
| 0.073359
| false
| 0
| 0.021879
| 0
| 0.117117
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d28fe5c263138535e92843c3a26c6a9626474917
| 128,365
|
py
|
Python
|
task/tests.py
|
wichmannpas/todoscheduler
|
e78f90cf96c4080d9c9a9b52b6fe23ca680202a0
|
[
"Apache-2.0"
] | 1
|
2018-10-30T09:43:47.000Z
|
2018-10-30T09:43:47.000Z
|
task/tests.py
|
wichmannpas/todoscheduler
|
e78f90cf96c4080d9c9a9b52b6fe23ca680202a0
|
[
"Apache-2.0"
] | 144
|
2017-11-11T22:04:19.000Z
|
2021-05-20T19:24:18.000Z
|
task/tests.py
|
wichmannpas/todoscheduler
|
e78f90cf96c4080d9c9a9b52b6fe23ca680202a0
|
[
"Apache-2.0"
] | 1
|
2018-01-28T14:33:13.000Z
|
2018-01-28T14:33:13.000Z
|
from datetime import date, timedelta
from decimal import Decimal
from io import StringIO
from urllib.parse import urlencode
from django.contrib.auth import get_user_model
from django.core.exceptions import ObjectDoesNotExist
from django.core.management import call_command
from django.db.models import Q
from django.http import HttpRequest
from django.test import TestCase
from freezegun import freeze_time
from rest_framework import status
from rest_framework.request import Request
from base.tests import AuthenticatedApiTest
from label.models import Label
from .models import Task, TaskChunk, TaskChunkSeries
from .serializers import TaskChunkSeriesSerializer
class ManagementTest(TestCase):
def setUp(self):
self.user1 = get_user_model().objects.create(
username='johndoe',
workhours_weekday=Decimal(10),
workhours_weekend=Decimal(5),
default_schedule_duration=Decimal(1),
default_schedule_full_duration_max=Decimal(3),
)
self.user2 = get_user_model().objects.create(
username='foobar',
default_schedule_duration=Decimal(2),
default_schedule_full_duration_max=Decimal(5),
)
@freeze_time('2010-05-03')
def test_schedule_task_chunk_series(self):
task1 = Task.objects.create(
user=self.user1,
name='Testtask',
duration=Decimal(2))
series1 = TaskChunkSeries.objects.create(
task=task1,
start=date(2010, 5, 3),
duration=Decimal('0.5'),
rule='interval',
interval_days=182) # 3 chunks will be scheduled within the next year
task2 = Task.objects.create(
user=self.user1,
name='Testtask 2',
duration=Decimal(4))
series2 = TaskChunkSeries.objects.create(
task=task2,
start=date(2011, 4, 3),
duration=Decimal('2.5'),
rule='interval',
interval_days=25) # 2 chunks will be scheduled within the next year
task3 = Task.objects.create(
user=self.user2,
name='Testtask 3',
duration=Decimal(4))
series3 = TaskChunkSeries.objects.create(
task=task3,
start=date(2010, 7, 3),
end=date(2010, 8, 3),
duration=Decimal('2.5'),
rule='interval',
interval_days=1) # 32 chunks will be scheduled within the next year
self.assertEqual(
TaskChunk.objects.count(),
0)
out = StringIO()
call_command('scheduletaskchunkseries', stdout=out)
self.assertEqual(
TaskChunk.objects.filter(series=series1).count(),
3)
self.assertEqual(
TaskChunk.objects.filter(series=series2).count(),
2)
self.assertEqual(
TaskChunk.objects.filter(series=series3).count(),
32)
self.assertEqual(
TaskChunk.objects.count(),
37)
self.assertIn('scheduled 37 chunks for 3 series', out.getvalue())
class TaskViewSetTest(AuthenticatedApiTest):
def test_create_task(self):
"""
Test the creation of a new task.
"""
resp = self.client.post('/task/task/', {
'name': 'Testtask',
'duration': '2.5',
'priority': 7,
'start': '2018-05-23',
'deadline': '2018-05-29',
})
self.assertEqual(
resp.status_code,
status.HTTP_201_CREATED)
self.assertEqual(
Task.objects.count(),
1)
task = Task.objects.first()
self.assertEqual(
task.user,
self.user)
self.assertEqual(
task.name,
'Testtask')
self.assertEqual(
task.duration,
Decimal('2.5'))
self.assertEqual(
task.priority,
7)
self.assertEqual(
task.start,
date(2018, 5, 23))
self.assertEqual(
task.deadline,
date(2018, 5, 29))
def test_create_task_invalid_deadline(self):
"""
Test the creation of a new task with a deadline that is before the start.
"""
resp = self.client.post('/task/task/', {
'name': 'Testtask',
'start': '2018-05-23',
'deadline': '2018-05-21',
})
self.assertEqual(
resp.status_code,
status.HTTP_400_BAD_REQUEST)
self.assertSetEqual(
set(resp.data),
{
'start',
'deadline',
})
self.assertEqual(
Task.objects.count(),
0)
def test_create_task_invalid_duration(self):
"""
Test the creation of a new task with an invalid duration.
"""
resp = self.client.post('/task/task/', {
'name': 'Testtask',
'duration': '-2',
})
self.assertEqual(
resp.status_code,
status.HTTP_400_BAD_REQUEST)
self.assertEqual(
Task.objects.count(),
0)
def test_create_task_invalid_priority(self):
"""
Test the creation of a new task with a deadline that is before the start.
"""
resp = self.client.post('/task/task/', {
'name': 'Testtask',
'priority': -6,
})
self.assertEqual(
resp.status_code,
status.HTTP_400_BAD_REQUEST)
self.assertSetEqual(
set(resp.data),
{
'priority',
})
self.assertEqual(
Task.objects.count(),
0)
resp = self.client.post('/task/task/', {
'name': 'Testtask',
'priority': 14,
})
self.assertEqual(
resp.status_code,
status.HTTP_400_BAD_REQUEST)
self.assertSetEqual(
set(resp.data),
{
'priority',
})
self.assertEqual(
Task.objects.count(),
0)
self.assertEqual(
Task.objects.count(),
0)
resp = self.client.post('/task/task/', {
'name': 'Testtask',
'priority': 'not a number',
})
self.assertEqual(
resp.status_code,
status.HTTP_400_BAD_REQUEST)
self.assertSetEqual(
set(resp.data),
{
'priority',
})
self.assertEqual(
Task.objects.count(),
0)
def test_create_task_with_labels(self):
"""
Test the creation of a new task.
"""
label1 = Label.objects.create(
user=self.user,
title='Test Label',
color='333333')
label2 = Label.objects.create(
user=self.user,
title='Second Label',
color='003333')
resp = self.client.post('/task/task/', {
'name': 'Testtask',
'duration': '2.5',
'priority': 7,
'start': '2018-05-23',
'deadline': '2018-05-29',
'labels': [label1.pk, label2.pk],
})
self.assertEqual(
resp.status_code,
status.HTTP_201_CREATED)
self.assertEqual(
Task.objects.count(),
1)
task = Task.objects.first()
self.assertEqual(
task.user,
self.user)
self.assertEqual(
task.name,
'Testtask')
self.assertEqual(
task.duration,
Decimal('2.5'))
self.assertEqual(
task.priority,
7)
self.assertEqual(
task.start,
date(2018, 5, 23))
self.assertEqual(
task.deadline,
date(2018, 5, 29))
self.assertEqual(
set(label.pk for label in task.labels.all()),
{label1.pk, label2.pk})
resp = self.client.post('/task/task/', {
'name': 'Testtask',
'duration': '2.5',
'priority': 7,
'start': '2018-05-23',
'deadline': '2018-05-29',
'labels': [label2.pk],
})
self.assertEqual(
resp.status_code,
status.HTTP_201_CREATED)
self.assertEqual(
Task.objects.count(),
2)
task = Task.objects.filter(~Q(pk=task.pk)).first()
self.assertEqual(
task.user,
self.user)
self.assertEqual(
task.name,
'Testtask')
self.assertEqual(
task.duration,
Decimal('2.5'))
self.assertEqual(
task.priority,
7)
self.assertEqual(
task.start,
date(2018, 5, 23))
self.assertEqual(
task.deadline,
date(2018, 5, 29))
self.assertEqual(
set(label.pk for label in task.labels.all()),
{label2.pk})
def test_update_task_duration_less_than_scheduled(self):
"""
Test setting the duration of a task that is less than the
scheduled duration.
"""
task = Task.objects.create(
user=self.user,
name='Testtask',
duration=Decimal(2))
TaskChunk.objects.create(
task=task,
day=date(2000, 1, 2),
day_order=1,
duration=Decimal(1))
resp = self.client.patch('/task/task/{}/'.format(task.pk), {
'duration': '0.5',
})
self.assertEqual(
resp.status_code,
status.HTTP_400_BAD_REQUEST)
task.refresh_from_db()
self.assertEqual(
task.duration,
Decimal(2))
def test_update_task_duration_to_scheduled(self):
"""
Test setting the duration of a task to exactly the scheduled duration.
"""
task = Task.objects.create(
user=self.user,
name='Testtask',
duration=Decimal(2))
TaskChunk.objects.create(
task=task,
day=date(2000, 1, 2),
day_order=1,
duration=Decimal('0.5'))
resp = self.client.patch('/task/task/{}/'.format(task.pk), {
'duration': '0.5',
})
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
task.refresh_from_db()
self.assertEqual(
task.duration,
Decimal('0.5'))
def test_update_task_invalid_start(self):
"""
Test updating a task to an invalid start.
"""
task = Task.objects.create(
user=self.user,
name='Testtask',
duration=Decimal(2),
start=date(2018, 5, 23),
deadline=date(2018, 5, 29))
resp = self.client.patch('/task/task/{}/'.format(task.pk), {
'start': '2018-06-10',
})
self.assertEqual(
resp.status_code,
status.HTTP_400_BAD_REQUEST)
self.assertSetEqual(
set(resp.data),
{'start'})
task.refresh_from_db()
self.assertEqual(
task.start,
date(2018, 5, 23))
def test_partially_update_invalid_task_dates(self):
"""
Test that it is allowed to update a task that already has an invalid
start/deadline pair if neither start nor deadline are updated in the
request.
"""
task = Task.objects.create(
user=self.user,
name='Testtask',
duration=Decimal(2),
start=date(2018, 5, 30),
deadline=date(2018, 3, 29))
resp = self.client.patch('/task/task/{}/'.format(task.pk), {
'name': 'renamed',
})
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
task.refresh_from_db()
self.assertEqual(
task.name,
'renamed')
def test_update_task_invalid_deadline(self):
"""
Test updating a task to an invalid deadline.
"""
task = Task.objects.create(
user=self.user,
name='Testtask',
duration=Decimal(2),
start=date(2018, 5, 23),
deadline=date(2018, 5, 29))
resp = self.client.patch('/task/task/{}/'.format(task.pk), {
'deadline': '2018-05-10',
})
self.assertEqual(
resp.status_code,
status.HTTP_400_BAD_REQUEST)
self.assertSetEqual(
set(resp.data),
{'deadline'})
task.refresh_from_db()
self.assertEqual(
task.deadline,
date(2018, 5, 29))
def test_update_task_invalid_priority(self):
"""
Test updating a task to an invalid deadline.
"""
task = Task.objects.create(
user=self.user,
name='Testtask',
duration=Decimal(2))
resp = self.client.patch('/task/task/{}/'.format(task.pk), {
'priority': 42,
})
self.assertEqual(
resp.status_code,
status.HTTP_400_BAD_REQUEST)
self.assertSetEqual(
set(resp.data),
{'priority'})
task.refresh_from_db()
self.assertEqual(
task.priority,
5, # default priority
)
def test_get_task(self):
"""
Test the retrieval of an existing task.
"""
task = Task.objects.create(
user=self.user,
name='Testtask',
duration=Decimal(2))
resp = self.client.get('/task/task/{}/'.format(task.id))
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertEqual(
resp.data['name'],
'Testtask')
self.assertEqual(
resp.data['duration'],
'2.00')
def test_no_getting_of_foreign_task(self):
"""
Test the retrieval of an existing task of a different user.
This is expected to be not found.
"""
foreign_user = get_user_model().objects.create(
username='foreign')
task = Task.objects.create(
user=foreign_user,
name='Testtask',
duration=Decimal(2))
resp = self.client.get('/task/task/{}/'.format(task.id))
self.assertEqual(
resp.status_code,
status.HTTP_404_NOT_FOUND)
def test_delete_task(self):
"""
Test the deletion of an existing task.
"""
task = Task.objects.create(
user=self.user,
name='Testtask',
duration=Decimal(2))
resp = self.client.delete('/task/task/{}/'.format(task.id))
self.assertEqual(
resp.status_code,
status.HTTP_204_NO_CONTENT)
self.assertRaises(
ObjectDoesNotExist,
task.refresh_from_db)
def test_update_task(self):
"""
Test the update of an existing task using both
POST and PATCH.
"""
task = Task.objects.create(
user=self.user,
name='Testtask',
duration=Decimal(2))
resp = self.client.patch('/task/task/{}/'.format(task.id), {
'name': 'Renamed testtask',
})
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
task.refresh_from_db()
self.assertEqual(
task.name,
'Renamed testtask')
resp = self.client.put('/task/task/{}/'.format(task.id), {
'name': 'Renamed testtask',
'duration': '42',
})
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
task.refresh_from_db()
self.assertEqual(
task.name,
'Renamed testtask')
self.assertEqual(
task.duration,
Decimal(42))
def test_partially_update_task_with_labels(self):
"""
Test the update of the labels of a task.
"""
label1 = Label.objects.create(
user=self.user,
title='Test Label',
color='333333')
label2 = Label.objects.create(
user=self.user,
title='Second Label',
color='003333')
task = Task.objects.create(
user=self.user,
name='Testtask')
resp = self.client.patch('/task/task/{}/'.format(task.id), {
'labels': [label1.pk],
})
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
task.refresh_from_db()
self.assertEqual(
set(label.pk for label in task.labels.all()),
{label1.pk})
resp = self.client.patch('/task/task/{}/'.format(task.id), {
'labels': [],
})
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
task.refresh_from_db()
self.assertEqual(
set(label.pk for label in task.labels.all()),
set())
resp = self.client.patch('/task/task/{}/'.format(task.id), {
'labels': [label2.pk, label1.pk],
})
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
task.refresh_from_db()
self.assertEqual(
set(label.pk for label in task.labels.all()),
{label1.pk, label2.pk})
resp = self.client.patch('/task/task/{}/'.format(task.id), {
'labels': [label2.pk],
})
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
task.refresh_from_db()
self.assertEqual(
set(label.pk for label in task.labels.all()),
{label2.pk})
resp = self.client.patch('/task/task/{}/'.format(task.id), {
'labels': [label1.pk],
})
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
task.refresh_from_db()
self.assertEqual(
set(label.pk for label in task.labels.all()),
{label1.pk})
def test_update_task_with_labels(self):
"""
Test that PUTting a task without specifying labels does not alter
the labels.
"""
label1 = Label.objects.create(
user=self.user,
title='Test Label',
color='333333')
label2 = Label.objects.create(
user=self.user,
title='Second Label',
color='003333')
task = Task.objects.create(
user=self.user,
name='Testtask')
task.labels.add(label1)
task.labels.add(label2)
resp = self.client.put('/task/task/{}/'.format(task.id), {
'name': 'Testtask',
'duration': 5,
})
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
task.refresh_from_db()
self.assertEqual(
set(label.pk for label in task.labels.all()),
{label1.pk, label2.pk})
resp = self.client.put('/task/task/{}/'.format(task.id), {
'name': 'Testtask',
'duration': 5,
'labels': [label2.pk],
})
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
task.refresh_from_db()
self.assertEqual(
set(label.pk for label in task.labels.all()),
{label2.pk})
resp = self.client.put('/task/task/{}/'.format(task.id), {
'name': 'Testtask',
'duration': 5,
'labels': [],
})
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
task.refresh_from_db()
self.assertEqual(
set(label.pk for label in task.labels.all()),
set())
def test_list_all_tasks(self):
"""
Test the listing of all own tasks.
"""
Task.objects.create(
user=self.user,
name='Testtask',
duration=Decimal(2))
Task.objects.create(
user=self.user,
name='Second Testtask',
duration=Decimal(3))
resp = self.client.get('/task/task/')
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertEqual(
len(resp.data),
2)
@freeze_time('2001-02-03')
def test_list_all_tasks_ordering(self):
"""
Test that the ordering of the task listing is
correct.
"""
Task.objects.create(
user=self.user,
name='A Testtask',
duration=Decimal(2))
Task.objects.create(
user=self.user,
name='B Testtask',
duration=Decimal(3))
Task.objects.create(
user=self.user,
name='0 Testtask',
duration=Decimal(3),
start=date(2001, 2, 10))
Task.objects.create(
user=self.user,
name='1 Testtask',
duration=Decimal(3),
start=date(2001, 2, 9))
resp = self.client.get('/task/task/')
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertListEqual(
[
item['name']
for item in resp.data
],
[
'A Testtask',
'B Testtask',
'1 Testtask',
'0 Testtask',
])
def test_list_incomplete_tasks(self):
"""
Test the filtering for incomplete tasks.
"""
task = Task.objects.create(
user=self.user,
name='Testtask',
duration=Decimal(2))
TaskChunk.objects.create(
task=task,
day=date(2010, 5, 14),
duration=Decimal(2))
Task.objects.create(
user=self.user,
name='Second Testtask',
duration=Decimal(3))
foreign_user = get_user_model().objects.create(
username='foreign')
Task.objects.create(
user=foreign_user,
name='Foreign Testtask',
duration=Decimal(3))
resp = self.client.get('/task/task/')
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertEqual(
len(resp.data),
2)
resp = self.client.get('/task/task/?incomplete')
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertEqual(
len(resp.data),
1)
def test_no_listing_of_foreign_tasks(self):
"""
Test that tasks of other users are not listed.
"""
other_user = get_user_model().objects.create(
username='other',
workhours_weekday=Decimal(10),
workhours_weekend=Decimal(5),
default_schedule_duration=Decimal(1),
default_schedule_full_duration_max=Decimal(3),
)
Task.objects.create(
user=other_user,
name='foreign task',
duration=2)
Task.objects.create(
user=self.user,
name='own task',
duration=2)
resp = self.client.get('/task/task/')
self.assertEqual(
len(resp.data),
1)
self.assertEqual(
resp.data[0]['name'],
'own task')
resp = self.client.get('/task/task/?incomplete')
self.assertEqual(
len(resp.data),
1)
self.assertEqual(
resp.data[0]['name'],
'own task')
def test_merge_task(self):
task1 = Task.objects.create(
user=self.user,
name='Testtask',
duration=Decimal(3))
task2 = Task.objects.create(
user=self.user,
name='To be merged Testtask',
duration=Decimal(2))
TaskChunk.objects.create(
task=task1,
duration=Decimal(1),
day=date(2010, 12, 24)),
TaskChunk.objects.create(
task=task1,
duration=Decimal('0.5'),
day=date(2010, 12, 24)),
TaskChunk.objects.create(
task=task1,
duration=Decimal('0.5'),
day=date(2010, 12, 24)),
TaskChunk.objects.create(
task=task2,
duration=Decimal('1.5'),
day=date(2010, 12, 24)),
TaskChunk.objects.create(
task=task2,
duration=Decimal('0.5'),
day=date(2010, 12, 24)),
resp = self.client.post('/task/task/{}/merge/{}/'.format(task1.pk, task2.pk))
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertEqual(
len(resp.data),
5)
for chunk in resp.data:
self.assertEqual(
chunk['task']['id'],
task1.pk)
def test_merge_foreign_task(self):
task1 = Task.objects.create(
user=self.user,
name='Testtask',
duration=Decimal(3))
task2 = Task.objects.create(
user=get_user_model().objects.create(username='foreign'),
name='Foreign Testtask',
duration=Decimal(2))
TaskChunk.objects.create(
task=task1,
duration=Decimal(1),
day=date(2010, 12, 24)),
TaskChunk.objects.create(
task=task1,
duration=Decimal('0.5'),
day=date(2010, 12, 24)),
TaskChunk.objects.create(
task=task1,
duration=Decimal('0.5'),
day=date(2010, 12, 24)),
TaskChunk.objects.create(
task=task2,
duration=Decimal('1.5'),
day=date(2010, 12, 24)),
TaskChunk.objects.create(
task=task2,
duration=Decimal('0.5'),
day=date(2010, 12, 24)),
resp = self.client.post('/task/task/{}/merge/{}/'.format(task1.pk, task2.pk))
self.assertEqual(
resp.status_code,
status.HTTP_404_NOT_FOUND)
class TaskChunkViewSetTest(AuthenticatedApiTest):
def setUp(self):
super().setUp()
self.day = date(2001, 2, 3)
self.day2 = date(2001, 2, 4)
self.task = Task.objects.create(
user=self.user,
name='Testtask',
duration=Decimal(2))
def test_split_task_chunk(self):
"""Test splitting a task chunk."""
chunk = TaskChunk.objects.create(
task=self.task,
day=self.day,
day_order=1,
duration=Decimal(3))
resp = self.client.post('/task/chunk/{}/split/'.format(chunk.pk))
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertEqual(
len(resp.data),
2)
resp_chunk = next(filter(lambda item: item['id'] == chunk.id, resp.data))
self.assertEqual(
Decimal(resp_chunk['duration']),
Decimal(1),
'the original chunk should have a duration of 1')
resp_new_chunk = next(filter(lambda item: item['id'] != chunk.id, resp.data))
self.assertEqual(
Decimal(resp_new_chunk['duration']),
Decimal(2),
'the new chunk should have the remaining duration')
self.assertEqual(
TaskChunk.objects.count(),
2)
chunk.refresh_from_db()
self.assertEqual(
chunk.duration,
Decimal(1))
self.assertEqual(
chunk.day_order,
1)
new_chunk = TaskChunk.objects.get(~Q(pk=chunk.pk))
self.assertEqual(
new_chunk.duration,
Decimal(2))
self.assertEqual(
new_chunk.day_order,
2)
def test_split_task_chunk_custom_duration(self):
"""Test splitting a task chunk."""
chunk = TaskChunk.objects.create(
task=self.task,
day=self.day,
day_order=1,
duration=Decimal(1))
resp = self.client.post('/task/chunk/{}/split/?duration=0.3'.format(chunk.pk))
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertEqual(
len(resp.data),
2)
self.assertEqual(
TaskChunk.objects.count(),
2)
chunk.refresh_from_db()
self.assertEqual(
chunk.duration,
Decimal('0.3'))
self.assertEqual(
chunk.day_order,
1)
new_chunk = TaskChunk.objects.get(~Q(pk=chunk.pk))
self.assertEqual(
new_chunk.duration,
Decimal('0.7'))
self.assertEqual(
new_chunk.day_order,
2)
def test_split_task_chunk_invalid(self):
"""Test splitting a task chunk."""
chunk = TaskChunk.objects.create(
task=self.task,
day=self.day,
day_order=1,
duration=Decimal(1))
# no duration left for split
resp = self.client.post('/task/chunk/{}/split/'.format(chunk.pk))
self.assertEqual(
resp.status_code,
status.HTTP_400_BAD_REQUEST)
self.assertSetEqual(
set(resp.data),
{'duration'})
resp = self.client.post('/task/chunk/{}/split/?duration=2'.format(chunk.pk))
self.assertEqual(
resp.status_code,
status.HTTP_400_BAD_REQUEST)
self.assertSetEqual(
set(resp.data),
{'duration'})
resp = self.client.post('/task/chunk/{}/split/?duration=invalid'.format(chunk.pk))
self.assertEqual(
resp.status_code,
status.HTTP_400_BAD_REQUEST)
self.assertSetEqual(
set(resp.data),
{'duration'})
chunk.finished = True
chunk.save()
resp = self.client.post('/task/chunk/{}/split/'.format(chunk.pk))
self.assertEqual(
resp.status_code,
status.HTTP_400_BAD_REQUEST)
self.assertEqual(
resp.data['detail'],
'finished chunks can not be split')
def test_finish_task_chunk(self):
"""Test finishing a task chunk."""
task_chunk = TaskChunk.objects.create(
task=self.task,
day=self.day,
day_order=1,
duration=Decimal(1))
self.assertEqual(
task_chunk.finished,
False)
resp = self.client.patch('/task/chunk/{}/'.format(task_chunk.pk), {
'finished': True,
})
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertEqual(
Decimal(resp.data['task']['duration']),
Decimal(2))
self.assertEqual(
Decimal(resp.data['task']['scheduled_duration']),
Decimal(1))
self.assertEqual(
Decimal(resp.data['task']['finished_duration']),
Decimal(1))
task_chunk.refresh_from_db()
self.assertEqual(
task_chunk.finished,
True)
def test_unfinish_task_chunk(self):
"""Test unfinishing a task chunk."""
task_chunk = TaskChunk.objects.create(
task=self.task,
day=self.day,
day_order=1,
duration=Decimal(1),
finished=True)
self.assertEqual(
task_chunk.finished,
True)
resp = self.client.patch('/task/chunk/{}/'.format(task_chunk.pk), {
'finished': False,
})
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertEqual(
Decimal(resp.data['task']['duration']),
Decimal(2))
self.assertEqual(
Decimal(resp.data['task']['scheduled_duration']),
Decimal(1))
self.assertEqual(
Decimal(resp.data['task']['finished_duration']),
Decimal(0))
task_chunk.refresh_from_db()
self.assertEqual(
task_chunk.finished,
False)
def test_delete_task_chunk(self):
"""Test the deletion of a task chunk without postponing."""
task_chunk = TaskChunk.objects.create(
task=self.task,
day=self.day,
day_order=1,
duration=Decimal(1))
resp = self.client.delete('/task/chunk/{}/?postpone=0'.format(task_chunk.pk))
self.assertEqual(
resp.status_code,
status.HTTP_204_NO_CONTENT)
self.assertRaises(
ObjectDoesNotExist,
task_chunk.refresh_from_db)
self.task.refresh_from_db()
self.assertEqual(
self.task.duration,
Decimal(1))
def test_delete_task_chunk_with_task(self):
"""
Test the deletion of a task chunk without postponing.
The task should be deleted as well because the task chunk
duration matches the task duration.
"""
task_chunk = TaskChunk.objects.create(
task=self.task,
day=self.day,
day_order=1,
duration=Decimal(2))
resp = self.client.delete('/task/chunk/{}/?postpone=0'.format(task_chunk.pk))
self.assertEqual(
resp.status_code,
status.HTTP_204_NO_CONTENT)
self.assertRaises(
ObjectDoesNotExist,
task_chunk.refresh_from_db)
self.assertRaises(
ObjectDoesNotExist,
self.task.refresh_from_db)
def test_postpone_task_chunk(self):
"""Test the deletion of a task chunk with postponing."""
task_chunk = TaskChunk.objects.create(
task=self.task,
day=self.day,
day_order=1,
duration=Decimal(1))
resp = self.client.delete('/task/chunk/{}/?postpone=1'.format(task_chunk.pk))
self.assertEqual(
resp.status_code,
status.HTTP_204_NO_CONTENT)
self.assertRaises(
ObjectDoesNotExist,
task_chunk.refresh_from_db)
self.task.refresh_from_db()
self.assertEqual(
self.task.duration,
Decimal(2))
def test_change_duration(self):
"""
Test changing the duration of the task chunk.
This should change the task duration as well.
"""
task_chunk = TaskChunk.objects.create(
task=self.task,
day=self.day,
day_order=1,
duration=Decimal(1),
finished=True)
self.assertEqual(
task_chunk.finished,
True)
resp = self.client.patch('/task/chunk/{}/'.format(task_chunk.pk), {
'duration': 4,
})
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertEqual(
Decimal(resp.data['task']['duration']),
Decimal(5))
self.assertEqual(
Decimal(resp.data['task']['scheduled_duration']),
Decimal(4))
self.assertEqual(
Decimal(resp.data['task']['finished_duration']),
Decimal(4))
task_chunk.refresh_from_db()
self.assertEqual(
task_chunk.duration,
Decimal(4))
self.task.refresh_from_db()
self.assertEqual(
self.task.duration,
Decimal(5)) # 2 + (4 - 1)
resp = self.client.patch('/task/chunk/{}/'.format(task_chunk.pk), {
'duration': '0.5',
})
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
task_chunk.refresh_from_db()
self.assertEqual(
task_chunk.duration,
Decimal('0.5'))
self.task.refresh_from_db()
self.assertEqual(
self.task.duration,
Decimal('1.5')) # 5 + (0.5 - 4)
def test_change_duration_invalid(self):
"""
Test changing the duration of the task chunk to an invalid value.
"""
task_chunk = TaskChunk.objects.create(
task=self.task,
day=self.day,
day_order=1,
duration=Decimal(1),
finished=True)
self.assertEqual(
task_chunk.finished,
True)
resp = self.client.patch('/task/chunk/{}/'.format(task_chunk.pk), {
'duration': -4,
})
self.assertEqual(
resp.status_code,
status.HTTP_400_BAD_REQUEST)
task_chunk.refresh_from_db()
self.assertEqual(
task_chunk.duration,
Decimal(1))
def test_move_task_chunk(self):
"""Test moving a task chunk within a single day."""
task_chunk1 = TaskChunk.objects.create(
task=self.task,
day=self.day,
duration=Decimal(1),
day_order=1
)
task_chunk2 = TaskChunk.objects.create(
task=self.task,
day=self.day,
duration=Decimal(1),
day_order=2
)
resp = self.client.patch('/task/chunk/{}/'.format(task_chunk1.pk), {
'day_order': 2,
})
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
task_chunk1.refresh_from_db()
self.assertEqual(
task_chunk1.day_order,
2)
task_chunk2.refresh_from_db()
self.assertEqual(
task_chunk2.day_order,
3)
# as that day order is not yet taken, no other day orders should be changed
resp = self.client.put('/task/chunk/{}/'.format(task_chunk1.pk), {
'task_id': self.task.id,
'day': '2001-02-03',
'duration': 1,
'day_order': 1,
})
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
task_chunk1.refresh_from_db()
self.assertEqual(
task_chunk1.day_order,
1)
task_chunk2.refresh_from_db()
self.assertEqual(
task_chunk2.day_order,
3)
def test_task_chunk_change_day(self):
"""
Test moving a chunk to another day without specifying a day order.
"""
task_chunk1 = TaskChunk.objects.create(
task=self.task,
day=self.day,
duration=Decimal(1),
day_order=1
)
task_chunk2 = TaskChunk.objects.create(
task=self.task,
day=self.day,
duration=Decimal(1),
day_order=2
)
task_chunk3 = TaskChunk.objects.create(
task=self.task,
day=self.day,
duration=Decimal(1),
day_order=3
)
task_chunk4 = TaskChunk.objects.create(
task=self.task,
day=self.day,
duration=Decimal(1),
day_order=4
)
task_chunk5 = TaskChunk.objects.create(
task=self.task,
day=self.day2,
duration=Decimal(1),
day_order=1
)
resp = self.client.patch('/task/chunk/{}/'.format(task_chunk5.pk), {
'day': self.day,
})
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
task_chunk5.refresh_from_db()
self.assertEqual(
task_chunk5.day,
self.day)
self.assertEqual(
task_chunk5.day_order,
5)
task_chunk1.refresh_from_db()
self.assertEqual(
task_chunk1.day_order,
1)
task_chunk2.refresh_from_db()
self.assertEqual(
task_chunk2.day_order,
2)
task_chunk3.refresh_from_db()
self.assertEqual(
task_chunk3.day_order,
3)
task_chunk4.refresh_from_db()
self.assertEqual(
task_chunk4.day_order,
4)
def test_task_chunk_change_day_explicit_order(self):
"""
Test changing the day of a task chunk. All conflicting chunks
of that day should be moved down.
"""
task_chunk1 = TaskChunk.objects.create(
task=self.task,
day=self.day,
duration=Decimal(1),
day_order=1
)
task_chunk2 = TaskChunk.objects.create(
task=self.task,
day=self.day2,
duration=Decimal(1),
day_order=1
)
task_chunk3 = TaskChunk.objects.create(
task=self.task,
day=self.day2,
duration=Decimal(1),
day_order=2
)
resp = self.client.put('/task/chunk/{}/'.format(task_chunk1.pk), {
'task_id': self.task.id,
'day': '2001-02-04', # self.day2
'duration': 1,
'day_order': 1,
})
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
task_chunk1.refresh_from_db()
self.assertEqual(
task_chunk1.day_order,
1)
self.assertEqual(
task_chunk1.day,
date(2001, 2, 4))
task_chunk2.refresh_from_db()
self.assertEqual(
task_chunk2.day_order,
2)
task_chunk3.refresh_from_db()
self.assertEqual(
task_chunk3.day_order,
3)
# if moved to an empty day, it gets a new order automatically assigned
resp = self.client.patch('/task/chunk/{}/'.format(task_chunk1.pk), {
'day': '2001-02-01',
})
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
task_chunk1.refresh_from_db()
self.assertEqual(
task_chunk1.day_order,
1)
self.assertEqual(
task_chunk1.day,
date(2001, 2, 1))
def test_explicit_creation(self):
"""
Test the explicit creation of a new task chunk.
"""
resp = self.client.post('/task/chunk/', {
'task_id': self.task.id,
'day': '2001-02-03',
'day_order': 1,
'duration': 2,
})
self.assertEqual(
resp.status_code,
status.HTTP_201_CREATED)
task_chunk = TaskChunk.objects.get(
pk=resp.data['id'])
self.assertEqual(
task_chunk.task,
self.task)
self.assertEqual(
task_chunk.day,
self.day)
self.assertEqual(
task_chunk.day_order,
1)
self.assertEqual(
task_chunk.duration,
Decimal(2))
self.task.refresh_from_db()
self.assertEqual(
self.task.duration,
Decimal(2))
def test_explicit_creation_task_duration_increase(self):
"""
Test the explicit creation of a new task chunk with a duration longer
than the task duration. The task duration should be increased.
"""
resp = self.client.post('/task/chunk/', {
'task_id': self.task.id,
'day': '2001-02-03',
'day_order': 1,
'duration': 5,
})
self.assertEqual(
resp.status_code,
status.HTTP_201_CREATED)
task_chunk = TaskChunk.objects.get(
pk=resp.data['id'])
self.assertEqual(
task_chunk.task,
self.task)
self.assertEqual(
task_chunk.day,
self.day)
self.assertEqual(
task_chunk.day_order,
1)
self.assertEqual(
task_chunk.duration,
Decimal(5))
self.task.refresh_from_db()
self.assertEqual(
self.task.duration,
Decimal(5))
def test_explicit_creation_day_order(self):
"""
Test that the day order is set correctly when explicitly
creating multiple task chunks without specifying a day order.
"""
resp = self.client.post('/task/chunk/', {
'task_id': self.task.id,
'day': '2001-02-03',
'duration': '0.5',
})
self.assertEqual(
resp.status_code,
status.HTTP_201_CREATED)
task_chunk1 = TaskChunk.objects.get(
pk=resp.data['id'])
resp = self.client.post('/task/chunk/', {
'task_id': self.task.id,
'day': '2001-02-04',
'duration': '0.5',
})
self.assertEqual(
resp.status_code,
status.HTTP_201_CREATED)
task_chunk2 = TaskChunk.objects.get(
pk=resp.data['id'])
resp = self.client.post('/task/chunk/', {
'task_id': self.task.id,
'day': '2001-02-03',
'duration': '0.1',
})
self.assertEqual(
resp.status_code,
status.HTTP_201_CREATED)
task_chunk3 = TaskChunk.objects.get(
pk=resp.data['id'])
resp = self.client.post('/task/chunk/', {
'task_id': self.task.id,
'day': '2001-02-03',
'duration': '0.1',
})
self.assertEqual(
resp.status_code,
status.HTTP_201_CREATED)
task_chunk4 = TaskChunk.objects.get(
pk=resp.data['id'])
self.assertGreater(
task_chunk4.day_order,
task_chunk3.day_order,
task_chunk1.day_order)
# both task chunks were the first of their day
self.assertEqual(
task_chunk1.day_order,
task_chunk2.day_order)
def test_explicit_creation_too_high_duration(self):
"""
Explicitly create a new task chunk.
Try to set the duration higher than the incomplete duration of
the task.
The task duration should be increased in that case.
"""
TaskChunk.objects.create(
task=self.task,
day=self.day,
duration=Decimal(1),
finished=True)
resp = self.client.post('/task/chunk/', {
'task_id': self.task.id,
'day': '2001-02-03',
'duration': 5,
})
self.assertEqual(
resp.status_code,
status.HTTP_201_CREATED)
task_chunk = TaskChunk.objects.get(
pk=resp.data['id'])
self.assertEqual(
task_chunk.task,
self.task)
self.assertEqual(
task_chunk.day,
self.day)
self.assertEqual(
task_chunk.duration,
Decimal(5))
self.task.refresh_from_db()
self.assertEqual(
self.task.duration,
Decimal(6)) # 1 + (5 - 1)
@freeze_time('2001-02-03')
def test_schedule_for_today(self):
"""Test scheduling for current day."""
resp = self.client.post('/task/chunk/', {
'task_id': self.task.id,
'day': 'today',
'duration': 2,
})
self.assertEqual(
resp.status_code,
status.HTTP_201_CREATED)
task_chunk = TaskChunk.objects.get(
pk=resp.data['id'])
self.assertEqual(
task_chunk.task,
self.task)
self.assertEqual(
task_chunk.day,
self.day)
self.assertEqual(
task_chunk.duration,
Decimal(2))
self.task.refresh_from_db()
self.assertEqual(
self.task.duration,
Decimal(2))
@freeze_time('2001-02-03')
def test_schedule_for_tomorrow(self):
"""Test scheduling for the next day."""
resp = self.client.post('/task/chunk/', {
'task_id': self.task.id,
'day': 'tomorrow',
'duration': 2,
})
self.assertEqual(
resp.status_code,
status.HTTP_201_CREATED)
task_chunk = TaskChunk.objects.get(
pk=resp.data['id'])
self.assertEqual(
task_chunk.task,
self.task)
self.assertEqual(
task_chunk.day,
self.day + timedelta(days=1))
self.assertEqual(
task_chunk.duration,
Decimal(2))
self.task.refresh_from_db()
self.assertEqual(
self.task.duration,
Decimal(2))
@freeze_time('2001-02-03')
def test_schedule_next_free_capacity(self):
"""Test scheduling for the next free capacity."""
task2 = Task.objects.create(
user=self.user,
name='Other Testtask',
duration=Decimal(30))
TaskChunk.objects.create(
task=task2,
day=self.day, # Saturday
duration=Decimal(5))
TaskChunk.objects.create(
task=task2,
day=self.day + timedelta(days=1), # Sunday
duration=Decimal(5))
TaskChunk.objects.create(
task=task2,
day=self.day + timedelta(days=2), # Monday
duration=Decimal(7))
self.task.duration = 10
self.task.save()
resp = self.client.post('/task/chunk/', {
'task_id': self.task.id,
'day': 'next_free_capacity',
'duration': 9,
})
self.assertEqual(
resp.status_code,
status.HTTP_201_CREATED)
task_chunk = TaskChunk.objects.get(
pk=resp.data['id'])
self.assertEqual(
task_chunk.task,
self.task)
self.assertEqual(
task_chunk.day,
self.day + timedelta(days=3)) # Tuesday
self.assertEqual(
task_chunk.duration,
Decimal(9))
self.task.refresh_from_db()
self.assertEqual(
self.task.duration,
Decimal(10))
@freeze_time('2001-02-03')
def test_schedule_next_free_capacity_exact(self):
"""
Test scheduling for the next free capacity when the chunk fits
exactly into a day.
"""
task2 = Task.objects.create(
user=self.user,
name='Other Testtask',
duration=Decimal(30))
TaskChunk.objects.create(
task=task2,
day=self.day, # Saturday
duration=Decimal(5))
TaskChunk.objects.create(
task=task2,
day=self.day + timedelta(days=1), # Sunday
duration=Decimal(5))
TaskChunk.objects.create(
task=task2,
day=self.day + timedelta(days=2), # Monday
duration=Decimal(7))
self.task.duration = self.user.workhours_weekday
self.task.save()
resp = self.client.post('/task/chunk/', {
'task_id': self.task.id,
'day': 'next_free_capacity',
'duration': self.user.workhours_weekday,
})
self.assertEqual(
resp.status_code,
status.HTTP_201_CREATED)
task_chunk = TaskChunk.objects.get(
pk=resp.data['id'])
self.assertEqual(
task_chunk.task,
self.task)
self.assertEqual(
task_chunk.day,
self.day + timedelta(days=3)) # Tuesday
self.assertEqual(
task_chunk.duration,
self.user.workhours_weekday)
self.task.refresh_from_db()
self.assertEqual(
self.task.duration,
Decimal(10))
@freeze_time('2001-02-03')
def test_schedule_next_free_capacity_unavailable(self):
"""Test scheduling for the next free capacity."""
task2 = Task.objects.create(
user=self.user,
name='Other Testtask',
duration=Decimal(30))
for offset in range(100):
day = self.day + timedelta(days=offset)
TaskChunk.objects.create(
task=task2,
day=day,
duration=self.user.capacity_of_day(day))
self.task.duration = 10
self.task.save()
resp = self.client.post('/task/chunk/', {
'task_id': self.task.id,
'day': 'next_free_capacity',
'duration': 5,
})
self.assertEqual(
resp.status_code,
status.HTTP_400_BAD_REQUEST)
self.assertSetEqual(
set(resp.data),
{'day'})
@freeze_time('2001-02-03')
def test_schedule_next_free_capacity_too_long(self):
"""Test scheduling for the next free capacity."""
self.task.duration = 25
self.task.save()
resp = self.client.post('/task/chunk/', {
'task_id': self.task.id,
'day': 'next_free_capacity',
'duration': 25,
})
self.assertEqual(
resp.status_code,
status.HTTP_400_BAD_REQUEST)
self.assertSetEqual(
set(resp.data),
{'day'})
def test_schedule_invalid(self):
"""Test scheduling for the next free capacity."""
task2 = Task.objects.create(
user=self.user,
name='Other Testtask',
duration=Decimal(30))
TaskChunk.objects.create(
task=task2,
day=self.day, # Saturday
duration=Decimal(5))
TaskChunk.objects.create(
task=task2,
day=self.day + timedelta(days=1), # Sunday
duration=Decimal(5))
TaskChunk.objects.create(
task=task2,
day=self.day + timedelta(days=2), # Monday
duration=Decimal(7))
self.task.duration = 10
self.task.save()
resp = self.client.post('/task/chunk/', {
'task_id': task2.pk,
'day': 'next_free_capacity',
})
self.assertEqual(
resp.status_code,
status.HTTP_400_BAD_REQUEST)
self.assertSetEqual(
set(resp.data),
{'day'})
resp = self.client.post('/task/chunk/', {
'day': 'next_free_capacity',
'duration': 9,
})
self.assertEqual(
resp.status_code,
status.HTTP_400_BAD_REQUEST)
self.assertSetEqual(
set(resp.data),
{'task_id', 'day'})
resp = self.client.post('/task/chunk/', {
'task_id': -100,
'day': 'next_free_capacity',
'duration': 9,
})
self.assertEqual(
resp.status_code,
status.HTTP_400_BAD_REQUEST)
self.assertSetEqual(
set(resp.data),
{'task_id', 'day'})
resp = self.client.post('/task/chunk/', {
'task_id': self.task.pk,
'day': 'next_free_capacity',
'duration': 'not a decimal',
})
self.assertEqual(
resp.status_code,
status.HTTP_400_BAD_REQUEST)
self.assertSetEqual(
set(resp.data),
{'day', 'duration'})
def test_task_chunk_nonstrict_date_filter(self):
"""
Test that unfinished chunks from days prior to min_date are
included without strict date filtering.
"""
TaskChunk.objects.create(
task=self.task,
duration=4,
day=date(2018, 1, 15))
TaskChunk.objects.create(
task=self.task,
duration=8,
day=date(2018, 1, 12),
finished=True)
TaskChunk.objects.create(
task=self.task,
duration=2,
day=date(2018, 1, 17),
finished=True)
resp = self.client.get('/task/chunk/')
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertEqual(
len(resp.data),
3)
resp = self.client.get('/task/chunk/?' + urlencode({
'min_date': '2018-01-16',
}))
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertEqual(
len(resp.data),
2)
self.assertSetEqual(
{ex['day'] for ex in resp.data},
{
'2018-01-15',
'2018-01-17',
})
resp = self.client.get('/task/chunk/?' + urlencode({
'min_date': '2018-02-14',
}))
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertEqual(
len(resp.data),
1)
self.assertSetEqual(
{ex['day'] for ex in resp.data},
{
'2018-01-15',
})
def test_task_chunk_strict_date_filter(self):
"""
Test that unfinished chunks from days prior to min_date are
not included with strict date filtering.
"""
TaskChunk.objects.create(
task=self.task,
duration=4,
day=date(2018, 1, 15))
TaskChunk.objects.create(
task=self.task,
duration=8,
day=date(2018, 1, 12),
finished=True)
TaskChunk.objects.create(
task=self.task,
duration=2,
day=date(2018, 1, 17),
finished=True)
resp = self.client.get('/task/chunk/?' + urlencode({
'strict_date': True,
}))
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertEqual(
len(resp.data),
3)
resp = self.client.get('/task/chunk/?' + urlencode({
'min_date': '2018-01-16',
'strict_date': True,
}))
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertEqual(
len(resp.data),
1)
self.assertSetEqual(
{ex['day'] for ex in resp.data},
{
'2018-01-17',
})
resp = self.client.get('/task/chunk/?' + urlencode({
'min_date': '2018-02-14',
'strict_date': True,
}))
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertEqual(
len(resp.data),
0)
def test_task_chunk_min_filter(self):
TaskChunk.objects.create(
task=self.task,
duration=4,
day=date(2018, 1, 15),
finished=True)
TaskChunk.objects.create(
task=self.task,
duration=8,
day=date(2018, 1, 12),
finished=True)
TaskChunk.objects.create(
task=self.task,
duration=2,
day=date(2018, 1, 17),
finished=True)
resp = self.client.get('/task/chunk/')
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertEqual(
len(resp.data),
3)
resp = self.client.get('/task/chunk/?' + urlencode({
'min_date': '2018-01-14',
}))
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertEqual(
len(resp.data),
2)
self.assertSetEqual(
{ex['day'] for ex in resp.data},
{
'2018-01-15',
'2018-01-17',
})
resp = self.client.get('/task/chunk/?' + urlencode({
'min_date': '2018-02-14',
}))
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertEqual(
len(resp.data),
0)
def test_task_chunk_max_filter(self):
TaskChunk.objects.create(
task=self.task,
duration=4,
day=date(2018, 1, 15))
TaskChunk.objects.create(
task=self.task,
duration=8,
day=date(2018, 1, 12))
TaskChunk.objects.create(
task=self.task,
duration=2,
day=date(2018, 1, 17))
resp = self.client.get('/task/chunk/')
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertEqual(
len(resp.data),
3)
resp = self.client.get('/task/chunk/?' + urlencode({
'max_date': '2018-01-14',
}))
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertEqual(
len(resp.data),
1)
self.assertSetEqual(
{ex['day'] for ex in resp.data},
{
'2018-01-12',
})
resp = self.client.get('/task/chunk/?' + urlencode({
'max_date': '2016-02-14',
}))
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertEqual(
len(resp.data),
0)
def test_task_chunk_min_max_filter(self):
TaskChunk.objects.create(
task=self.task,
duration=4,
day=date(2018, 1, 15),
finished=True)
TaskChunk.objects.create(
task=self.task,
duration=8,
day=date(2018, 1, 12),
finished=True)
TaskChunk.objects.create(
task=self.task,
duration=2,
day=date(2018, 1, 17),
finished=True)
resp = self.client.get('/task/chunk/')
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertEqual(
len(resp.data),
3)
resp = self.client.get('/task/chunk/?' + urlencode({
'min_date': '2018-01-14',
'max_date': '2018-01-17',
}))
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertEqual(
len(resp.data),
2)
self.assertSetEqual(
{ex['day'] for ex in resp.data},
{
'2018-01-15',
'2018-01-17',
})
resp = self.client.get('/task/chunk/?' + urlencode({
'min_date': '2018-01-13',
'max_date': '2018-01-14',
}))
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertEqual(
len(resp.data),
0)
def test_task_chunk_task_filter(self):
task2 = Task.objects.create(
user=self.user,
name='Another Testtask',
duration=Decimal(4))
task3 = Task.objects.create(
user=self.user,
name='Yet Another Testtask',
duration=Decimal(5))
TaskChunk.objects.create(
task=self.task,
duration=4,
day=date(2018, 1, 15))
TaskChunk.objects.create(
task=self.task,
duration=8,
day=date(2018, 1, 12))
TaskChunk.objects.create(
task=self.task,
duration=2,
day=date(2018, 1, 17))
TaskChunk.objects.create(
task=task2,
duration=2,
day=date(2018, 2, 17))
TaskChunk.objects.create(
task=task2,
duration=2,
day=date(2018, 1, 17))
TaskChunk.objects.create(
task=task3,
duration=2,
day=date(2019, 12, 24))
resp = self.client.get('/task/chunk/')
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertEqual(
len(resp.data),
6)
resp = self.client.get('/task/chunk/?' + urlencode({
'task_ids': [task2.pk],
}, True))
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertEqual(
len(resp.data),
2)
self.assertSetEqual(
{ex['task']['id'] for ex in resp.data},
{
task2.pk,
})
resp = self.client.get('/task/chunk/?' + urlencode({
'task_ids': [task3.pk, task2.pk],
}, True))
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertEqual(
len(resp.data),
3)
self.assertSetEqual(
{ex['task']['id'] for ex in resp.data},
{
task2.pk,
task3.pk,
})
resp = self.client.get('/task/chunk/?' + urlencode({
'task_ids': [-100],
}, True))
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertEqual(
len(resp.data),
0)
def test_task_chunk_invalid_filter(self):
resp = self.client.get('/task/chunk/?' + urlencode({
'task_ids': [
'not an integer!',
],
}, True))
self.assertEqual(
resp.status_code,
status.HTTP_400_BAD_REQUEST)
self.assertSetEqual(
set(resp.data),
{
'task_ids',
})
resp = self.client.get('/task/chunk/?' + urlencode({
'task_ids': [
'not an integer!',
],
'min_date': 'not a date...',
'max_date': 'not a date...',
}, True))
self.assertEqual(
resp.status_code,
status.HTTP_400_BAD_REQUEST)
self.assertSetEqual(
set(resp.data),
{
'task_ids',
'min_date',
'max_date',
})
resp = self.client.get('/task/chunk/?' + urlencode({
'min_date': '2018-05-12',
'max_date': 'not a date...',
}, True))
self.assertEqual(
resp.status_code,
status.HTTP_400_BAD_REQUEST)
self.assertSetEqual(
set(resp.data),
{
'max_date',
})
resp = self.client.get('/task/chunk/?' + urlencode({
'min_date': '2018-05-12',
'max_date': '2018-05-11', # before min_date
}, True))
self.assertEqual(
resp.status_code,
status.HTTP_400_BAD_REQUEST,
'the max date should not be allowed to be before the min date')
self.assertSetEqual(
set(resp.data),
{
'max_date',
},
'the max date should not be allowed to be before the min date')
class TaskTest(TestCase):
def setUp(self):
self.user1 = get_user_model().objects.create(
username='johndoe',
workhours_weekday=Decimal(10),
workhours_weekend=Decimal(5),
default_schedule_duration=Decimal(1),
default_schedule_full_duration_max=Decimal(3),
)
self.user2 = get_user_model().objects.create(
username='foobar',
default_schedule_duration=Decimal(2),
default_schedule_full_duration_max=Decimal(5),
)
self.weekdaydate1 = date(2017, 11, 6)
def test_str(self):
task = Task(user=self.user1, name='Testtask')
self.assertEqual(
str(task),
'johndoe: Testtask')
def test_state(self):
task = Task.objects.create(user=self.user1, duration=10)
self.assertFalse(task.completely_scheduled)
self.assertFalse(task.finished)
chunk1 = TaskChunk.objects.create(
task=task,
day=self.weekdaydate1,
day_order=1,
duration=4,
finished=False)
self.assertFalse(task.completely_scheduled)
self.assertFalse(task.finished)
TaskChunk.objects.create(
task=task,
day=self.weekdaydate1 + timedelta(days=1),
day_order=1,
duration=6,
finished=True)
self.assertTrue(task.completely_scheduled)
self.assertFalse(task.finished)
chunk1.finished = True
chunk1.save()
self.assertTrue(task.completely_scheduled)
self.assertTrue(task.finished)
def test_finished_duration(self):
task1 = Task.objects.create(user=self.user1, duration=42)
self.assertEqual(
task1.finished_duration,
0)
chunk1 = TaskChunk.objects.create(
task=task1,
day=self.weekdaydate1,
day_order=1,
duration=4,
finished=False)
self.assertEqual(
task1.finished_duration,
0)
chunk1.finished = True
chunk1.save()
self.assertEqual(
task1.finished_duration,
4)
chunk2 = TaskChunk.objects.create(
task=task1,
day=self.weekdaydate1,
day_order=1,
duration=2,
finished=False)
self.assertEqual(
task1.finished_duration,
4)
chunk2.finished = True
chunk2.save()
self.assertEqual(
task1.finished_duration,
6)
chunk1.finished = False
chunk1.save()
self.assertEqual(
task1.finished_duration,
2)
def test_unfinished_duration(self):
task1 = Task.objects.create(user=self.user1, duration=42)
self.assertEqual(
task1.unfinished_duration,
Decimal(42))
chunk1 = TaskChunk.objects.create(
task=task1,
day=self.weekdaydate1,
day_order=1,
duration=4,
finished=False)
self.assertEqual(
task1.unfinished_duration,
Decimal(42))
chunk1.finished = True
chunk1.save()
self.assertEqual(
task1.unfinished_duration,
Decimal(38))
chunk2 = TaskChunk.objects.create(
task=task1,
day=self.weekdaydate1,
day_order=1,
duration=2,
finished=False)
self.assertEqual(
task1.unfinished_duration,
Decimal(38))
chunk2.finished = True
chunk2.save()
self.assertEqual(
task1.unfinished_duration,
Decimal(36))
chunk1.finished = False
chunk1.save()
self.assertEqual(
task1.unfinished_duration,
Decimal(40))
def test_scheduled_duration(self):
task1 = Task.objects.create(user=self.user1, duration=42)
self.assertEqual(
task1.scheduled_duration,
0)
chunk1 = TaskChunk.objects.create(
task=task1,
day=self.weekdaydate1,
day_order=1,
duration=4,
finished=False)
self.assertEqual(
task1.scheduled_duration,
4)
chunk1.finished = True
chunk1.save()
self.assertEqual(
task1.scheduled_duration,
4)
chunk2 = TaskChunk.objects.create(
task=task1,
day=self.weekdaydate1,
day_order=1,
duration=2,
finished=False)
self.assertEqual(
task1.scheduled_duration,
6)
chunk2.finished = True
chunk2.save()
self.assertEqual(
task1.scheduled_duration,
6)
chunk1.finished = False
chunk1.save()
self.assertEqual(
task1.scheduled_duration,
6)
def test_unscheduled_duration(self):
task1 = Task.objects.create(user=self.user1, duration=42)
self.assertEqual(
task1.unscheduled_duration,
42)
chunk1 = TaskChunk.objects.create(
task=task1,
day=self.weekdaydate1,
day_order=1,
duration=4,
finished=False)
self.assertEqual(
task1.unscheduled_duration,
38)
chunk1.finished = True
chunk1.save()
self.assertEqual(
task1.unscheduled_duration,
38)
chunk2 = TaskChunk.objects.create(
task=task1,
day=self.weekdaydate1,
day_order=1,
duration=2,
finished=False)
self.assertEqual(
task1.unscheduled_duration,
36)
chunk2.finished = True
chunk2.save()
self.assertEqual(
task1.unscheduled_duration,
36)
chunk1.finished = False
chunk1.save()
self.assertEqual(
task1.unscheduled_duration,
36)
def test_incompletely_scheduled_tasks(self):
self.assertEqual(
set(self.user1.tasks.incompletely_scheduled()),
set())
self.assertEqual(
set(self.user2.tasks.incompletely_scheduled()),
set())
task1 = Task.objects.create(user=self.user1, duration=2)
self.assertEqual(
set(self.user1.tasks.incompletely_scheduled()),
{task1})
self.assertEqual(
set(self.user2.tasks.incompletely_scheduled()),
set())
chunk1 = TaskChunk.objects.create(
task=task1,
day=self.weekdaydate1,
day_order=1,
duration=1,
finished=False)
self.assertEqual(
set(self.user1.tasks.incompletely_scheduled()),
{task1})
self.assertEqual(
set(self.user2.tasks.incompletely_scheduled()),
set())
chunk2 = TaskChunk.objects.create(
task=task1,
day=self.weekdaydate1,
day_order=1,
duration=1,
finished=False)
self.assertEqual(
set(self.user1.tasks.incompletely_scheduled()),
set())
self.assertEqual(
set(self.user2.tasks.incompletely_scheduled()),
set())
chunk2.finished = True
chunk2.save()
self.assertEqual(
set(self.user1.tasks.incompletely_scheduled()),
set())
self.assertEqual(
set(self.user2.tasks.incompletely_scheduled()),
set())
chunk1.finished = True
chunk1.save()
self.assertEqual(
set(self.user1.tasks.incompletely_scheduled()),
set())
self.assertEqual(
set(self.user2.tasks.incompletely_scheduled()),
set())
def test_duration_types(self):
task1 = Task.objects.create(user=self.user1, duration=42)
self.assertIsInstance(
task1.finished_duration,
Decimal)
self.assertIsInstance(
task1.scheduled_duration,
Decimal)
self.assertIsInstance(
task1.unscheduled_duration,
Decimal)
TaskChunk.objects.create(
task=task1,
day=self.weekdaydate1,
day_order=1,
duration=4,
finished=False)
self.assertIsInstance(
task1.finished_duration,
Decimal)
self.assertIsInstance(
task1.scheduled_duration,
Decimal)
self.assertIsInstance(
task1.unscheduled_duration,
Decimal)
def test_merge_tasks(self):
task1 = Task.objects.create(
user=self.user1,
name='Testtask',
duration=Decimal(3))
task2 = Task.objects.create(
user=self.user1,
name='To be merged Testtask',
duration=Decimal(2))
chunks = [
TaskChunk.objects.create(
task=task1,
duration=Decimal(1),
day=self.weekdaydate1),
TaskChunk.objects.create(
task=task1,
duration=Decimal('0.5'),
day=self.weekdaydate1),
TaskChunk.objects.create(
task=task1,
duration=Decimal('0.5'),
day=self.weekdaydate1),
TaskChunk.objects.create(
task=task2,
duration=Decimal('1.5'),
day=self.weekdaydate1),
TaskChunk.objects.create(
task=task2,
duration=Decimal('0.5'),
day=self.weekdaydate1),
]
affected_chunks = task1.merge(task2)
self.assertSetEqual(
set(affected_chunks),
set(chunks))
for chunk in affected_chunks:
self.assertEqual(
chunk.task,
task1)
for chunk in chunks:
chunk.refresh_from_db()
self.assertEqual(
chunk.task,
task1)
# task 2 should not exist anymore
self.assertRaises(
ObjectDoesNotExist,
task2.refresh_from_db)
task1.refresh_from_db()
self.assertEqual(
task1.name,
'Testtask')
self.assertEqual(
task1.duration,
Decimal(5))
def test_merge_tasks_no_chunks(self):
task1 = Task.objects.create(
user=self.user1,
name='Testtask',
duration=Decimal(3))
task2 = Task.objects.create(
user=self.user1,
name='To be merged Testtask',
duration=Decimal(2))
affected_chunks = task1.merge(task2)
self.assertSetEqual(
set(affected_chunks),
set())
# task 2 should not exist anymore
self.assertRaises(
ObjectDoesNotExist,
task2.refresh_from_db)
task1.refresh_from_db()
self.assertEqual(
task1.name,
'Testtask')
self.assertEqual(
task1.duration,
Decimal(5))
def test_merge_tasks_invalid(self):
task1 = Task.objects.create(
user=self.user1,
name='Testtask',
duration=Decimal(3))
task2 = Task.objects.create(
user=self.user2,
name='To be merged Testtask',
duration=Decimal(2))
# can't merge tasks of different users
self.assertRaises(
AssertionError,
task1.merge,
task2)
# can't merge task with itself
self.assertRaises(
AssertionError,
task1.merge,
task1)
class TaskChunkSeriesTest(TestCase):
def setUp(self):
self.user = get_user_model().objects.create(
username='johndoe',
workhours_weekday=Decimal(10),
workhours_weekend=Decimal(5),
default_schedule_duration=Decimal(1),
default_schedule_full_duration_max=Decimal(3),
)
self.task = Task.objects.create(
user=self.user,
name='Testtask',
duration=Decimal(1))
def test_str(self):
series = TaskChunkSeries(
task=self.task,
rule='interval')
self.assertEqual(
str(series),
'johndoe: Testtask: interval')
def test_add_months(self):
self.assertEqual(
TaskChunkSeries._add_months(
date(2010, 7, 15),
3),
date(2010, 10, 15))
self.assertEqual(
TaskChunkSeries._add_months(
date(2010, 7, 15),
12),
date(2011, 7, 15))
self.assertEqual(
TaskChunkSeries._add_months(
date(2010, 7, 15),
17),
date(2011, 12, 15))
self.assertEqual(
TaskChunkSeries._add_months(
date(2010, 1, 31),
1),
date(2010, 2, 28))
self.assertEqual(
TaskChunkSeries._add_months(
date(2012, 1, 31),
1),
date(2012, 2, 29))
def test_advance_to_weekday(self):
self.assertEqual(
TaskChunkSeries._advance_to_weekday(
date(2010, 7, 5),
3),
date(2010, 7, 8))
self.assertEqual(
TaskChunkSeries._advance_to_weekday(
date(2010, 7, 8),
3),
date(2010, 7, 8))
self.assertEqual(
TaskChunkSeries._advance_to_weekday(
date(2010, 7, 9),
3),
date(2010, 7, 15))
def test_replace_day(self):
self.assertEqual(
TaskChunkSeries._replace_day(
date(2010, 7, 15),
3),
date(2010, 7, 3))
self.assertEqual(
TaskChunkSeries._replace_day(
date(2012, 2, 15),
31),
date(2012, 2, 29))
self.assertEqual(
TaskChunkSeries._replace_day(
date(2012, 2, 15),
30),
date(2012, 2, 29))
self.assertEqual(
TaskChunkSeries._replace_day(
date(2011, 2, 15),
30),
date(2011, 2, 28))
def test_apply_rule_interval(self):
series = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 2, 24),
rule='interval',
interval_days=10)
# interval without previous occurrence schedules for start
self.assertEqual(
series.apply_rule(),
series.start)
self.assertEqual(
series.apply_rule(date(2010, 3, 16)),
date(2010, 3, 26))
self.assertEqual(
series.apply_rule(date(2010, 3, 26)),
date(2010, 4, 5))
def test_apply_rule_interval_before_start(self):
series = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 12, 24),
end=date(2011, 12, 24),
rule='interval',
interval_days=10)
# last before start is ignored
self.assertEqual(
series.apply_rule(date(2005, 3, 26)),
series.start)
def test_apply_rule_interval_end(self):
series = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 2, 24),
end=date(2010, 12, 24),
rule='interval',
interval_days=10)
# exactly on end date
self.assertEqual(
series.apply_rule(date(2010, 12, 14)),
date(2010, 12, 24))
# would be after end date
self.assertEqual(
series.apply_rule(date(2010, 12, 15)),
None)
self.assertEqual(
series.apply_rule(date(2010, 12, 24)),
None)
def test_apply_rule_monthly(self):
series = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 2, 7),
rule='monthly',
monthly_day=7,
monthly_months=1)
# interval without previous occurrence schedules for month of start
self.assertEqual(
series.apply_rule(),
date(2010, 2, 7))
self.assertEqual(
series.apply_rule(date(2010, 3, 16)),
date(2010, 4, 7))
self.assertEqual(
series.apply_rule(date(2010, 3, 5)),
date(2010, 4, 7))
def test_apply_rule_monthly_last(self):
series = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 2, 28),
rule='monthly',
monthly_day=31,
monthly_months=1)
# interval without previous occurrence schedules for month of start
self.assertEqual(
series.apply_rule(),
date(2010, 2, 28))
self.assertEqual(
series.apply_rule(date(2010, 3, 16)),
date(2010, 4, 30))
self.assertEqual(
series.apply_rule(date(2010, 3, 5)),
date(2010, 4, 30))
self.assertEqual(
series.apply_rule(date(2012, 1, 5)),
date(2012, 2, 29))
self.assertEqual(
series.apply_rule(date(2012, 9, 5)),
date(2012, 10, 31))
def test_apply_rule_monthly_before_start(self):
series = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 2, 7),
rule='monthly',
monthly_day=7,
monthly_months=1)
# last before start is ignored
self.assertEqual(
series.apply_rule(date(2009, 10, 5)),
date(2010, 2, 7))
def test_apply_rule_monthly_end(self):
series = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 2, 24),
end=date(2010, 12, 24),
rule='monthly',
monthly_day=24,
monthly_months=1)
# exactly on end date
self.assertEqual(
series.apply_rule(date(2010, 11, 7)),
series.end)
series.end = date(2010, 12, 23)
series.save()
# would be after end date
self.assertEqual(
series.apply_rule(date(2010, 12, 1)),
None)
self.assertEqual(
series.apply_rule(date(2010, 11, 24)),
None)
def test_apply_rule_multiple_monthly(self):
series = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 2, 24),
rule='monthly',
monthly_day=24,
monthly_months=4)
# interval without previous occurrence schedules for month of start
self.assertEqual(
series.apply_rule(),
date(2010, 2, 24))
series.start = date(2010, 2, 5)
series.save()
self.assertEqual(
series.apply_rule(),
date(2010, 2, 24))
self.assertEqual(
series.apply_rule(date(2010, 3, 7)),
date(2010, 7, 24))
self.assertEqual(
series.apply_rule(date(2010, 11, 18)),
date(2011, 3, 24))
def test_apply_rule_multi_monthly_verylong(self):
"""
Test for correct behaviour when using a very long interval.
"""
series = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 2, 24),
rule='monthly',
monthly_day=24,
monthly_months=120) # 10 years
self.assertEqual(
series.apply_rule(date(2010, 3, 7)),
date(2020, 3, 24))
self.assertEqual(
series.apply_rule(date(2010, 11, 18)),
date(2020, 11, 24))
def test_apply_rule_multi_monthly_before_start(self):
series = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 2, 24),
rule='monthly',
monthly_day=24,
monthly_months=4)
# last before start is ignored
self.assertEqual(
series.apply_rule(date(2009, 10, 5)),
date(2010, 2, 24))
def test_apply_rule_multi_monthly_end(self):
series = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 2, 24),
end=date(2010, 12, 24),
rule='monthly',
monthly_day=24,
monthly_months=4)
# exactly on end date
self.assertEqual(
series.apply_rule(date(2010, 8, 24)),
series.end)
series.end = date(2010, 12, 23)
series.save()
# would be after end date
self.assertEqual(
series.apply_rule(date(2010, 8, 24)),
None)
def test_apply_rule_monthlyweekday_1st(self):
series = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 2, 1),
rule='monthlyweekday',
monthly_months=1,
monthlyweekday_weekday=5,
monthlyweekday_nth=1)
# interval without previous occurrence schedules for month of start
self.assertEqual(
series.apply_rule(),
date(2010, 2, 6))
self.assertEqual(
series.apply_rule(date(2010, 3, 16)),
date(2010, 4, 3))
self.assertEqual(
series.apply_rule(date(2010, 3, 5)),
date(2010, 4, 3))
self.assertEqual(
series.apply_rule(date(2010, 5, 16)),
date(2010, 6, 5))
def test_apply_rule_monthlyweekday_2nd(self):
series = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 2, 1),
rule='monthlyweekday',
monthly_months=1,
monthlyweekday_weekday=5,
monthlyweekday_nth=2)
# interval without previous occurrence schedules for month of start
self.assertEqual(
series.apply_rule(),
date(2010, 2, 13))
self.assertEqual(
series.apply_rule(date(2010, 3, 16)),
date(2010, 4, 10))
self.assertEqual(
series.apply_rule(date(2010, 3, 5)),
date(2010, 4, 10))
self.assertEqual(
series.apply_rule(date(2010, 5, 16)),
date(2010, 6, 12))
def test_apply_rule_monthlyweekday_last(self):
series = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 2, 1),
rule='monthlyweekday',
monthly_months=1,
monthlyweekday_weekday=5,
monthlyweekday_nth=6)
# interval without previous occurrence schedules for month of start
self.assertEqual(
series.apply_rule(),
date(2010, 2, 27))
self.assertEqual(
series.apply_rule(date(2010, 3, 16)),
date(2010, 4, 24))
self.assertEqual(
series.apply_rule(date(2010, 3, 5)),
date(2010, 4, 24))
self.assertEqual(
series.apply_rule(date(2010, 5, 16)),
date(2010, 6, 26))
@freeze_time('2010-02-24')
def test_apply_rule_monthlyweekday_start_before_first_potential_schedule(self):
"""
Test applying the monthlyweekday rule if the instance of
the start month would be before the actual start date.
"""
series = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 1, 28),
rule='monthlyweekday',
monthly_months=1,
monthlyweekday_weekday=1,
monthlyweekday_nth=1)
# interval without previous occurrence schedules for month of start
self.assertEqual(
series.apply_rule(),
date(2010, 2, 2))
self.assertEqual(
series.apply_rule(series.start),
date(2010, 2, 2))
self.assertEqual(
series.apply_rule(date(2010, 2, 5)),
date(2010, 3, 2))
def test_schedule_limit_count(self):
series = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 2, 24),
rule='interval',
interval_days=10)
self.assertEqual(
TaskChunk.objects.count(),
0)
scheduled = series.schedule(max_count=5)
self.assertEqual(
len(scheduled),
5)
self.assertEqual(
TaskChunk.objects.count(),
5)
scheduled = series.schedule(max_count=5)
self.assertEqual(
len(scheduled),
5)
self.assertEqual(
TaskChunk.objects.count(),
10)
def test_schedule_limit_advance(self):
series = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 2, 24),
rule='interval',
interval_days=10)
self.assertEqual(
TaskChunk.objects.count(),
0)
with freeze_time('2010-02-24'):
scheduled = series.schedule(max_advance=timedelta(days=10))
self.assertEqual(
len(scheduled),
2)
self.assertEqual(
TaskChunk.objects.count(),
2)
with freeze_time('2010-03-06'):
scheduled = series.schedule(max_advance=timedelta(days=10))
self.assertEqual(
len(scheduled),
1)
self.assertEqual(
TaskChunk.objects.count(),
3)
self.assertSetEqual(
set(chunk['day'] for chunk in TaskChunk.objects.values('day')),
{
date(2010, 2, 24),
date(2010, 2, 24) + timedelta(days=10),
date(2010, 2, 24) + timedelta(days=20),
})
def test_schedule_end(self):
series = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 2, 24),
end=date(2010, 4, 24),
rule='interval',
interval_days=7)
self.assertEqual(
TaskChunk.objects.count(),
0)
scheduled = series.schedule()
self.assertEqual(
len(scheduled),
9)
self.assertEqual(
TaskChunk.objects.count(),
9)
scheduled = series.schedule(max_advance=timedelta(days=10))
self.assertEqual(
len(scheduled),
0)
self.assertEqual(
TaskChunk.objects.count(),
9)
self.assertSetEqual(
set(chunk['day'] for chunk in TaskChunk.objects.values('day')),
{
date(2010, 2, 24) + timedelta(days=7 * n)
for n in range(9)
})
@freeze_time('2010-02-24')
def test_schedule_infinite(self):
series = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 2, 24),
rule='interval',
interval_days=7)
self.assertEqual(
TaskChunk.objects.count(),
0)
for n in range(10):
scheduled = series.schedule(max_advance=timedelta(days=3650))
self.assertEqual(
len(scheduled),
50)
self.assertEqual(
TaskChunk.objects.count(),
50 * (n + 1))
self.assertSetEqual(
set(chunk['day'] for chunk in TaskChunk.objects.values('day')),
{
date(2010, 2, 24) + timedelta(days=7 * n)
for n in range(10 * 50)
})
def test_schedule_increases_task_duration(self):
"""
Test that newly scheduled task chunks increase the task
duration.
"""
initial_task_duration = self.task.duration
series = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 2, 24),
rule='interval',
interval_days=7)
scheduled = series.schedule(max_advance=timedelta(days=3650))
self.assertEqual(
len(scheduled),
50)
self.task.refresh_from_db()
self.assertEqual(
self.task.duration,
initial_task_duration + series.duration * 50)
class TaskChunkSeriesSerializerTest(TestCase):
def setUp(self):
self.user = get_user_model().objects.create(
username='johndoe',
workhours_weekday=Decimal(10),
workhours_weekend=Decimal(5),
default_schedule_duration=Decimal(1),
default_schedule_full_duration_max=Decimal(3),
)
self.task = Task.objects.create(
user=self.user,
name='Testtask',
duration=Decimal(1))
# we need an authenticated request
self.request = Request(HttpRequest())
self.request.user = self.user
self.context = {
'request': self.request,
}
@freeze_time('2010-05-03')
def test_validation_invalid_rule(self):
serializer = TaskChunkSeriesSerializer(data={
'task_id': self.task.pk,
'start': '2010-05-03',
'rule': 'invalid',
}, context=self.context)
self.assertFalse(
serializer.is_valid())
self.assertSetEqual(
set(serializer.errors.keys()),
{'rule'})
@freeze_time('2010-05-03')
def test_validation_interval(self):
serializer = TaskChunkSeriesSerializer(data={
'task_id': self.task.pk,
'start': '2010-05-03',
'rule': 'interval',
# interval_days missing
}, context=self.context)
self.assertFalse(
serializer.is_valid())
self.assertSetEqual(
set(serializer.errors.keys()),
{'interval_days'})
serializer = TaskChunkSeriesSerializer(data={
'task_id': self.task.pk,
'start': '2010-05-03',
'rule': 'interval',
'interval_days': 7,
# fields for other rules
'monthly_day': 17,
'monthly_months': 1,
'monthlyweekday_weekday': 0,
'monthlyweekday_nth': 2,
}, context=self.context)
self.assertFalse(
serializer.is_valid())
self.assertSetEqual(
set(serializer.errors.keys()),
{
'monthly_day',
'monthly_months',
'monthlyweekday_weekday',
'monthlyweekday_nth'
})
serializer = TaskChunkSeriesSerializer(data={
'task_id': self.task.pk,
'start': '2010-05-03',
'rule': 'interval',
'interval_days': 7,
}, context=self.context)
self.assertTrue(
serializer.is_valid())
@freeze_time('2010-05-03')
def test_validation_monthly(self):
serializer = TaskChunkSeriesSerializer(data={
'task_id': self.task.pk,
'start': '2010-05-03',
'rule': 'monthly',
# monthly_day missing
# monthly_months missing
}, context=self.context)
self.assertFalse(
serializer.is_valid())
self.assertSetEqual(
set(serializer.errors.keys()),
{
'monthly_day',
'monthly_months',
})
serializer = TaskChunkSeriesSerializer(data={
'task_id': self.task.pk,
'start': '2010-05-03',
'rule': 'monthly',
'monthly_day': 3,
'monthly_months': 1,
# fields for other rules
'interval_days': 7,
'monthlyweekday_weekday': 0,
'monthlyweekday_nth': 2,
}, context=self.context)
self.assertFalse(
serializer.is_valid())
self.assertSetEqual(
set(serializer.errors.keys()),
{
'interval_days',
'monthlyweekday_weekday',
'monthlyweekday_nth',
})
serializer = TaskChunkSeriesSerializer(data={
'task_id': self.task.pk,
'start': '2010-05-03',
'rule': 'monthly',
'monthly_day': 7, # not the same day as start date
'monthly_months': 1,
}, context=self.context)
self.assertFalse(
serializer.is_valid())
self.assertSetEqual(
set(serializer.errors.keys()),
{'monthly_day'})
serializer = TaskChunkSeriesSerializer(data={
'task_id': self.task.pk,
'start': '2010-05-03',
'rule': 'monthly',
'monthly_day': 3,
'monthly_months': 1,
}, context=self.context)
self.assertTrue(
serializer.is_valid())
@freeze_time('2008-01-03')
def test_validation_monthly_last_day_of_month(self):
serializer = TaskChunkSeriesSerializer(data={
'task_id': self.task.pk,
'start': '2010-01-31',
'rule': 'monthly',
'monthly_day': 31,
'monthly_months': 1,
}, context=self.context)
self.assertTrue(
serializer.is_valid())
serializer = TaskChunkSeriesSerializer(data={
'task_id': self.task.pk,
'start': '2010-02-28',
'rule': 'monthly',
'monthly_day': 31, # not the same as start date, but end of month
'monthly_months': 1,
}, context=self.context)
self.assertTrue(
serializer.is_valid())
serializer = TaskChunkSeriesSerializer(data={
'task_id': self.task.pk,
'start': '2008-02-29', # lap year
'rule': 'monthly',
'monthly_day': 31, # not the same as start date, but end of month
'monthly_months': 1,
}, context=self.context)
self.assertTrue(
serializer.is_valid())
serializer = TaskChunkSeriesSerializer(data={
'task_id': self.task.pk,
'start': '2010-04-30',
'rule': 'monthly',
'monthly_day': 31, # not the same as start date, but end of month
'monthly_months': 1,
}, context=self.context)
self.assertTrue(
serializer.is_valid())
serializer = TaskChunkSeriesSerializer(data={
'task_id': self.task.pk,
'start': '2010-01-30',
'rule': 'monthly',
'monthly_day': 31, # not the same as start date and not end of month
'monthly_months': 1,
}, context=self.context)
self.assertFalse(
serializer.is_valid())
self.assertSetEqual(
set(serializer.errors.keys()),
{'monthly_day'})
@freeze_time('2010-05-03')
def test_validation_monthlyweekday(self):
serializer = TaskChunkSeriesSerializer(data={
'task_id': self.task.pk,
'start': '2010-05-03',
'rule': 'monthlyweekday',
# monthly_months missing
# monthlyweekday_weekday missing
# monthlyweekday_nth missing
}, context=self.context)
self.assertFalse(
serializer.is_valid())
self.assertSetEqual(
set(serializer.errors.keys()),
{
'monthly_months',
'monthlyweekday_weekday',
'monthlyweekday_nth',
})
serializer = TaskChunkSeriesSerializer(data={
'task_id': self.task.pk,
'start': '2010-05-03',
'rule': 'monthlyweekday',
'monthly_months': 1,
'monthlyweekday_weekday': 0,
'monthlyweekday_nth': 2,
# fields for other rules
'interval_days': 7,
'monthly_day': 7,
}, context=self.context)
self.assertFalse(
serializer.is_valid())
self.assertSetEqual(
set(serializer.errors.keys()),
{
'interval_days',
'monthly_day',
})
serializer = TaskChunkSeriesSerializer(data={
'task_id': self.task.pk,
'start': '2010-05-03',
'rule': 'monthlyweekday',
'monthly_months': 1,
'monthlyweekday_weekday': 0,
'monthlyweekday_nth': 2,
}, context=self.context)
self.assertTrue(
serializer.is_valid())
def test_update_interval(self):
instance = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 2, 24),
rule='interval',
interval_days=10)
serializer = TaskChunkSeriesSerializer(instance=instance, data={
'task_id': self.task.pk,
'duration': 1,
'start': '2010-02-24',
'rule': 'interval',
'interval_days': 7,
}, context=self.context)
self.assertTrue(
serializer.is_valid())
serializer.save()
instance.refresh_from_db()
self.assertEqual(
instance.task,
self.task)
self.assertEqual(
instance.start,
date(2010, 2, 24))
self.assertEqual(
instance.rule,
'interval')
self.assertEqual(
instance.interval_days,
7)
def test_validation_update_change_rule(self):
instance = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 2, 24),
rule='interval',
interval_days=10)
serializer = TaskChunkSeriesSerializer(instance=instance, data={
'task_id': self.task.pk,
'start': '2010-02-24',
'rule': 'monthly',
'monthly_day': 15,
'monthly_months': 2,
}, context=self.context)
self.assertFalse(
serializer.is_valid())
self.assertSetEqual(
set(serializer.errors.keys()),
{'rule'})
self.assertRaises(
AssertionError,
serializer.save)
def test_validation_update_change_start(self):
instance = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 2, 24),
rule='interval',
interval_days=10)
serializer = TaskChunkSeriesSerializer(instance=instance, data={
'task_id': self.task.pk,
'start': '2010-02-14',
'rule': 'interval',
'interval_days': 10,
}, context=self.context)
self.assertFalse(
serializer.is_valid())
self.assertSetEqual(
set(serializer.errors.keys()),
{'start'})
self.assertRaises(
AssertionError,
serializer.save)
def test_validation_update_change_task(self):
instance = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 2, 24),
rule='interval',
interval_days=10)
task2 = Task.objects.create(
user=self.user,
name='Second Testtask',
duration=Decimal(1))
serializer = TaskChunkSeriesSerializer(instance=instance, data={
'task_id': task2.pk,
'start': '2010-02-24',
'rule': 'interval',
'interval_days': 10,
}, context=self.context)
self.assertFalse(
serializer.is_valid())
self.assertSetEqual(
set(serializer.errors.keys()),
{'task_id'})
self.assertRaises(
AssertionError,
serializer.save)
def test_validation_update_change_end(self):
instance = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 2, 24),
rule='interval',
interval_days=10)
serializer = TaskChunkSeriesSerializer(instance=instance, data={
'task_id': self.task.pk,
'duration': 1,
'start': '2010-02-24',
'end': '2010-05-01',
'rule': 'interval',
'interval_days': 10,
}, context=self.context)
self.assertTrue(
serializer.is_valid())
serializer.save()
instance.refresh_from_db()
self.assertEqual(
instance.start,
date(2010, 2, 24))
self.assertEqual(
instance.end,
date(2010, 5, 1))
@freeze_time('2010-01-03')
def test_validation_create_start_past(self):
serializer = TaskChunkSeriesSerializer(data={
'task_id': self.task.pk,
'start': '2010-01-01', # in past
'rule': 'interval',
'interval_days': 10,
}, context=self.context)
self.assertFalse(
serializer.is_valid())
self.assertSetEqual(
set(serializer.errors.keys()),
{
'start',
})
@freeze_time('2010-01-03')
def test_validation_create_start_after_end(self):
serializer = TaskChunkSeriesSerializer(data={
'task_id': self.task.pk,
'start': '2010-02-24',
'end': '2010-01-01',
'rule': 'interval',
'interval_days': 10,
}, context=self.context)
self.assertFalse(
serializer.is_valid())
self.assertSetEqual(
set(serializer.errors.keys()),
{
'start',
'end',
})
def test_validation_update_start_after_end(self):
instance = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 2, 24),
rule='interval',
interval_days=10)
serializer = TaskChunkSeriesSerializer(instance=instance, data={
'task_id': self.task.pk,
'start': '2010-02-24',
'end': '2010-01-01',
'rule': 'interval',
'interval_days': 10,
}, context=self.context)
self.assertFalse(
serializer.is_valid())
self.assertSetEqual(
set(serializer.errors.keys()),
{
'start',
'end',
})
class TaskChunkSeriesViewSetTest(AuthenticatedApiTest):
def setUp(self):
super().setUp()
self.task = Task.objects.create(
user=self.user,
name='Testtask',
duration=Decimal(2))
@freeze_time('2010-05-03')
def test_defined_ids(self):
"""
Test that the scheduled chunks returned from the API contain id values.
This test *will fail* on database backends that are not supported,
such as sqlite.
"""
self.assertEqual(
TaskChunkSeries.objects.count(),
0)
self.assertEqual(
TaskChunk.objects.count(),
0)
resp = self.client.post('/task/chunk/series/', {
'task_id': self.task.pk,
'duration': '2',
'start': '2010-05-23',
'end': '2010-06-23',
'rule': 'interval',
'interval_days': 1,
})
self.assertEqual(
resp.status_code,
status.HTTP_201_CREATED)
self.assertSetEqual(
set(resp.data.keys()),
{'series', 'scheduled', 'task'})
self.assertEqual(
len(resp.data['scheduled']),
32)
for scheduled in resp.data['scheduled']:
self.assertIsNotNone(scheduled['id'])
self.assertIsInstance(scheduled['id'], int)
@freeze_time('2010-05-03')
def test_create(self):
"""
Test the creation of a series, making sure that initial
task chunks are scheduled and returned.
"""
self.assertEqual(
TaskChunkSeries.objects.count(),
0)
self.assertEqual(
TaskChunk.objects.count(),
0)
resp = self.client.post('/task/chunk/series/', {
'task_id': self.task.pk,
'duration': '2',
'start': '2010-05-23',
'end': '2010-06-23',
'rule': 'interval',
'interval_days': 1,
})
self.assertEqual(
resp.status_code,
status.HTTP_201_CREATED)
self.assertSetEqual(
set(resp.data.keys()),
{'series', 'scheduled', 'task'})
self.assertEqual(
resp.data['series']['task_id'],
self.task.pk)
self.assertEqual(
Decimal(resp.data['series']['duration']),
Decimal(2))
self.assertEqual(
resp.data['series']['start'],
'2010-05-23')
self.assertEqual(
resp.data['series']['end'],
'2010-06-23')
self.assertEqual(
resp.data['series']['rule'],
'interval')
self.assertEqual(
resp.data['series']['interval_days'],
1)
self.assertEqual(
len(resp.data['scheduled']),
32)
self.assertEqual(
TaskChunkSeries.objects.count(),
1)
series = TaskChunkSeries.objects.first()
self.assertTrue(
series.completely_scheduled)
self.assertEqual(
TaskChunk.objects.count(),
32)
@freeze_time('2010-05-03')
def test_create_scheduled_task_duration(self):
"""
Test the creation of a series, making sure that initial
task chunks are scheduled and returned.
"""
self.assertEqual(
TaskChunkSeries.objects.count(),
0)
self.assertEqual(
TaskChunk.objects.count(),
0)
initial_task_duration = self.task.duration
initial_task_scheduled_duration = self.task.scheduled_duration
resp = self.client.post('/task/chunk/series/', {
'task_id': self.task.pk,
'duration': '2',
'start': '2010-05-23',
'end': '2010-06-23',
'rule': 'interval',
'interval_days': 1,
})
self.assertEqual(
resp.status_code,
status.HTTP_201_CREATED)
self.assertSetEqual(
set(resp.data.keys()),
{'series', 'scheduled', 'task'})
self.assertEqual(
len(resp.data['scheduled']),
32)
self.task.refresh_from_db()
self.assertNotEqual(
self.task.duration,
initial_task_duration)
self.assertNotEqual(
self.task.scheduled_duration,
initial_task_scheduled_duration)
for scheduled in resp.data['scheduled']:
self.assertEqual(
Decimal(scheduled['task']['duration']),
self.task.duration)
self.assertEqual(
Decimal(scheduled['task']['scheduled_duration']),
self.task.scheduled_duration)
@freeze_time('2010-05-03')
def test_update_cleaning(self):
"""
Test the cleaning of task chunks when modifying the end date.
"""
series = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 5, 3),
end=date(2010, 5, 24),
rule='interval',
interval_days=7)
series.schedule()
self.assertEqual(
TaskChunkSeries.objects.count(),
1)
self.assertEqual(
TaskChunk.objects.count(),
4)
resp = self.client.put('/task/chunk/series/{}/'.format(series.pk), {
'task_id': self.task.pk,
'duration': '1',
'start': '2010-05-03',
'end': '2010-05-10',
'rule': 'interval',
'interval_days': 7,
})
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertSetEqual(
set(resp.data.keys()),
{'series', 'cleaned', 'scheduled', 'task'})
self.assertEqual(
len(resp.data['cleaned']),
2)
self.assertEqual(
len(resp.data['scheduled']),
0)
self.assertEqual(
TaskChunk.objects.count(),
2)
initial_task_duration = self.task.duration
self.task.refresh_from_db()
self.assertEqual(
self.task.duration,
initial_task_duration - 2)
@freeze_time('2010-05-03')
def test_update_duration(self):
"""
Test updating the duration of a task chunk series.
"""
series = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 5, 3),
end=date(2010, 5, 24),
rule='interval',
interval_days=7)
series.schedule()
self.assertEqual(
TaskChunkSeries.objects.count(),
1)
self.assertEqual(
TaskChunk.objects.count(),
4)
resp = self.client.put('/task/chunk/series/{}/'.format(series.pk), {
'task_id': self.task.pk,
'duration': 5,
'start': '2010-05-03',
'end': '2010-05-24',
'rule': 'interval',
'interval_days': 7,
})
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertSetEqual(
set(resp.data.keys()),
{'series', 'cleaned', 'scheduled', 'task'})
self.assertEqual(
len(resp.data['cleaned']),
0)
self.assertEqual(
len(resp.data['scheduled']),
0)
self.assertEqual(
TaskChunk.objects.count(),
4)
# each chunk is increased to the new duration of 5
initial_task_duration = self.task.duration
self.task.refresh_from_db()
self.assertEqual(
self.task.duration,
initial_task_duration + 4 * (5 - 1))
self.assertEqual(
Decimal(resp.data['task']['duration']),
initial_task_duration + 4 * (5 - 1))
@freeze_time('2010-05-03')
def test_update_duration_modified(self):
"""
Test updating the duration of a task chunk series when the duration
of a task chunk was already modified.
"""
series = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 5, 3),
end=date(2010, 5, 24),
rule='interval',
interval_days=7)
series.schedule()
self.assertEqual(
TaskChunkSeries.objects.count(),
1)
self.assertEqual(
TaskChunk.objects.count(),
4)
chunk = TaskChunk.objects.last()
chunk.duration = 3
chunk.save()
resp = self.client.put('/task/chunk/series/{}/'.format(series.pk), {
'task_id': self.task.pk,
'duration': 5,
'start': '2010-05-03',
'end': '2010-05-24',
'rule': 'interval',
'interval_days': 7,
})
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertSetEqual(
set(resp.data.keys()),
{'series', 'cleaned', 'scheduled', 'task'})
self.assertEqual(
len(resp.data['cleaned']),
0)
self.assertEqual(
len(resp.data['scheduled']),
0)
self.assertEqual(
TaskChunk.objects.count(),
4)
# each chunk except for the modified one is increased to the new duration of 5
initial_task_duration = self.task.duration
self.task.refresh_from_db()
self.assertEqual(
self.task.duration,
initial_task_duration + 3 * (5 - 1))
self.assertEqual(
Decimal(resp.data['task']['duration']),
initial_task_duration + 3 * (5 - 1))
@freeze_time('2010-05-03')
def test_update_scheduling(self):
"""
Test the scheduling of task chunks when modifying the end date.
"""
series = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 5, 3),
end=date(2010, 5, 24),
rule='interval',
interval_days=7)
series.schedule()
self.assertEqual(
TaskChunkSeries.objects.count(),
1)
self.assertEqual(
TaskChunk.objects.count(),
4)
resp = self.client.put('/task/chunk/series/{}/'.format(series.pk), {
'task_id': self.task.pk,
'duration': '2',
'start': '2010-05-03',
'end': '2010-06-7',
'rule': 'interval',
'interval_days': 7,
})
self.assertEqual(
resp.status_code,
status.HTTP_200_OK)
self.assertSetEqual(
set(resp.data.keys()),
{'series', 'cleaned', 'scheduled', 'task'})
self.assertEqual(
len(resp.data['cleaned']),
0)
self.assertEqual(
len(resp.data['scheduled']),
2)
self.assertEqual(
TaskChunk.objects.count(),
6)
def test_partial_update(self):
"""
Test that it is not allowed to partially update a task chunk series.
"""
series = TaskChunkSeries.objects.create(
task=self.task,
start=date(2010, 2, 24),
rule='interval',
interval_days=7)
resp = self.client.patch('/task/chunk/series/{}/'.format(series.pk), {
'duration': '2.5',
})
self.assertEqual(
resp.status_code,
status.HTTP_405_METHOD_NOT_ALLOWED)
def test_no_getting_of_foreign(self):
foreign_user = get_user_model().objects.create(
username='foreign')
foreign_task = Task.objects.create(
user=foreign_user,
name='Testtask',
duration=Decimal(2))
series = TaskChunkSeries.objects.create(
task=foreign_task,
start=date(2010, 2, 24),
rule='interval',
interval_days=7)
resp = self.client.get('/task/chunk/series/{}/'.format(series.id))
self.assertEqual(
resp.status_code,
status.HTTP_404_NOT_FOUND)
class TaskChunkTest(TestCase):
def setUp(self):
self.user1 = get_user_model().objects.create(
username='johndoe',
workhours_weekday=Decimal(10),
workhours_weekend=Decimal(5),
default_schedule_duration=Decimal(1),
default_schedule_full_duration_max=Decimal(3),
)
self.user2 = get_user_model().objects.create(
username='foobar',
default_schedule_duration=Decimal(2),
default_schedule_full_duration_max=Decimal(5),
)
self.weekdaydate1 = date(2017, 11, 6)
def test_str(self):
task = Task.objects.create(
name='Testtask',
user=self.user1)
chunk = TaskChunk(task=task, day=date(2018, 12, 24))
self.assertEqual(
str(chunk),
'johndoe: Testtask: 2018-12-24')
def test_split_chunk(self):
"""
Test splitting a task chunk.
"""
task = Task.objects.create(
name='Testtask',
user=self.user1,
duration=5)
chunk = TaskChunk.objects.create(
task=task,
day=date(2018, 12, 24),
duration=3,
day_order=1)
self.assertEqual(
TaskChunk.objects.count(),
1)
affected_chunks = chunk.split()
self.assertEqual(
len(affected_chunks),
2)
self.assertEqual(
TaskChunk.objects.count(),
2)
chunk.refresh_from_db()
split_chunk = TaskChunk.objects.get(~Q(pk=chunk.pk))
self.assertEqual(
chunk.day_order,
1)
self.assertEqual(
chunk.duration,
Decimal(1))
self.assertEqual(
split_chunk.day_order,
2)
self.assertEqual(
split_chunk.duration,
Decimal(2))
def test_split_chunk_with_existing(self):
"""
Test splitting a task chunk.
"""
task = Task.objects.create(
name='Testtask',
user=self.user1,
duration=5)
task2 = Task.objects.create(
name='Other Testtask',
user=self.user1,
duration=5)
task3 = Task.objects.create(
name='Yet Other Testtask',
user=self.user1,
duration=5)
chunk0 = TaskChunk.objects.create(
task=task2,
day=date(2018, 12, 24),
duration=3,
day_order=1)
chunk = TaskChunk.objects.create(
task=task,
day=date(2018, 12, 24),
duration=3,
day_order=2)
chunk2 = TaskChunk.objects.create(
task=task3,
day=date(2018, 12, 24),
duration=3,
day_order=3)
chunk3 = TaskChunk.objects.create(
task=task3,
day=date(2018, 12, 24),
duration=3,
day_order=4)
self.assertEqual(
TaskChunk.objects.count(),
4)
affected_chunks = chunk.split()
self.assertEqual(
len(affected_chunks),
4) # first chunk is not affected
self.assertEqual(
TaskChunk.objects.count(),
5)
chunk0.refresh_from_db()
chunk.refresh_from_db()
chunk2.refresh_from_db()
chunk3.refresh_from_db()
split_chunk = TaskChunk.objects.get(
~Q(pk__in={
chunk0.pk, chunk.pk, chunk2.pk, chunk3.pk
}))
self.assertEqual(
chunk0.day_order,
1)
self.assertEqual(
chunk2.day_order,
4)
self.assertEqual(
chunk3.day_order,
5)
self.assertEqual(
chunk.day_order,
2)
self.assertEqual(
chunk.duration,
Decimal(1))
self.assertEqual(
split_chunk.day_order,
3)
self.assertEqual(
split_chunk.duration,
Decimal(2))
@freeze_time('2017-11-16')
def test_missed_task_chunks(self):
self.assertListEqual(
list(TaskChunk.missed_chunks(self.user1)),
[])
self.assertListEqual(
list(TaskChunk.missed_chunks(self.user2)),
[])
task1 = Task.objects.create(
user=self.user1,
duration=Decimal(42))
self.assertListEqual(
list(TaskChunk.missed_chunks(self.user1)),
[])
self.assertListEqual(
list(TaskChunk.missed_chunks(self.user2)),
[])
TaskChunk.objects.create(
task=task1,
duration=1,
day=date(2018, 1, 1),
day_order=1,
)
self.assertListEqual(
list(TaskChunk.missed_chunks(self.user1)),
[])
self.assertListEqual(
list(TaskChunk.missed_chunks(self.user2)),
[])
chunk2 = TaskChunk.objects.create(
task=task1,
duration=1,
day=date(2017, 1, 1),
day_order=1,
)
self.assertListEqual(
list(TaskChunk.missed_chunks(self.user1)),
[chunk2])
self.assertListEqual(
list(TaskChunk.missed_chunks(self.user2)),
[])
chunk3 = TaskChunk.objects.create(
task=task1,
duration=1,
day=date(2015, 5, 1),
day_order=1,
)
self.assertListEqual(
list(TaskChunk.missed_chunks(self.user1)),
[chunk3, chunk2])
self.assertListEqual(
list(TaskChunk.missed_chunks(self.user2)),
[])
chunk3.finished = True
chunk3.save(update_fields=('finished',))
self.assertListEqual(
list(TaskChunk.missed_chunks(self.user1)),
[chunk2])
self.assertListEqual(
list(TaskChunk.missed_chunks(self.user2)),
[])
chunk2.finished = True
chunk2.save(update_fields=('finished',))
self.assertListEqual(
list(TaskChunk.missed_chunks(self.user1)),
[])
self.assertListEqual(
list(TaskChunk.missed_chunks(self.user2)),
[])
| 29.468549
| 90
| 0.515483
| 12,927
| 128,365
| 4.989093
| 0.035817
| 0.105359
| 0.033212
| 0.039151
| 0.891757
| 0.866856
| 0.837504
| 0.807315
| 0.774336
| 0.747062
| 0
| 0.048585
| 0.371456
| 128,365
| 4,355
| 91
| 29.475316
| 0.750765
| 0.046898
| 0
| 0.861322
| 0
| 0
| 0.070331
| 0.005325
| 0
| 0
| 0
| 0
| 0.158451
| 1
| 0.033315
| false
| 0
| 0.004605
| 0
| 0.040087
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d2937ae766e79197adaf5e25f1b2b6de127304d4
| 2,897
|
py
|
Python
|
roza/migrations/0005_product.py
|
sahin88/roza-tex
|
a05fdcaac7ba0e609e579d60a20ff9b13d824c64
|
[
"Unlicense"
] | null | null | null |
roza/migrations/0005_product.py
|
sahin88/roza-tex
|
a05fdcaac7ba0e609e579d60a20ff9b13d824c64
|
[
"Unlicense"
] | null | null | null |
roza/migrations/0005_product.py
|
sahin88/roza-tex
|
a05fdcaac7ba0e609e579d60a20ff9b13d824c64
|
[
"Unlicense"
] | null | null | null |
# Generated by Django 3.1.7 on 2021-03-24 16:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('roza', '0004_auto_20210324_1306'),
]
operations = [
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('product_name', models.CharField(blank=True, max_length=255, null=True)),
('product_image', models.ImageField(blank=True, null=True, upload_to='product/images')),
('weight_unit', models.CharField(blank=True, max_length=50, null=True)),
('weight_value', models.CharField(blank=True, max_length=50, null=True)),
('weft_yarn_unit', models.CharField(blank=True, max_length=50, null=True)),
('weft_yarn_value', models.CharField(blank=True, max_length=50, null=True)),
('warp_yarn_unit', models.CharField(blank=True, max_length=50, null=True)),
('warp_yarn_value', models.CharField(blank=True, max_length=50, null=True)),
('patern_unit', models.CharField(blank=True, max_length=50, null=True)),
('patern_value', models.CharField(blank=True, max_length=50, null=True)),
('md_tensile_strength_unit', models.CharField(blank=True, max_length=50, null=True)),
('md_tensile_strength_value', models.CharField(blank=True, max_length=50, null=True)),
('cd_tensile_strength_unit', models.CharField(blank=True, max_length=50, null=True)),
('cd_tensile_strength_value', models.CharField(blank=True, max_length=50, null=True)),
('elongation_at_break_unit', models.CharField(blank=True, max_length=50, null=True)),
('elongation_at_break_value', models.CharField(blank=True, max_length=50, null=True)),
('coathing_unit', models.CharField(blank=True, max_length=50, null=True)),
('coathing_value', models.CharField(blank=True, max_length=50, null=True)),
('core_size_unit', models.CharField(blank=True, max_length=50, null=True)),
('core_size_value', models.CharField(blank=True, max_length=50, null=True)),
('role_width_unit', models.CharField(blank=True, max_length=50, null=True)),
('role_width_value', models.CharField(blank=True, max_length=50, null=True)),
('yarns_type', models.CharField(blank=True, max_length=50, null=True)),
('constructions', models.CharField(blank=True, max_length=50, null=True)),
('patterns', models.CharField(blank=True, max_length=50, null=True)),
('bondings', models.CharField(blank=True, max_length=50, null=True)),
],
),
]
| 62.978261
| 114
| 0.628926
| 352
| 2,897
| 4.963068
| 0.213068
| 0.133944
| 0.286205
| 0.343446
| 0.773326
| 0.773326
| 0.754436
| 0.754436
| 0.754436
| 0.655982
| 0
| 0.036526
| 0.22506
| 2,897
| 45
| 115
| 64.377778
| 0.741648
| 0.015533
| 0
| 0
| 1
| 0
| 0.159298
| 0.059649
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.025641
| 0
| 0.102564
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d2a64c74b56467515e411e07bf21332666eb2a70
| 9,423
|
py
|
Python
|
DQM/SiPixelPhase1Summary/python/SiPixelPhase1Summary_cfi.py
|
PKUfudawei/cmssw
|
8fbb5ce74398269c8a32956d7c7943766770c093
|
[
"Apache-2.0"
] | 2
|
2020-10-26T18:40:32.000Z
|
2021-04-10T16:33:25.000Z
|
DQM/SiPixelPhase1Summary/python/SiPixelPhase1Summary_cfi.py
|
PKUfudawei/cmssw
|
8fbb5ce74398269c8a32956d7c7943766770c093
|
[
"Apache-2.0"
] | 30
|
2015-11-04T11:42:27.000Z
|
2021-12-01T07:56:34.000Z
|
DQM/SiPixelPhase1Summary/python/SiPixelPhase1Summary_cfi.py
|
PKUfudawei/cmssw
|
8fbb5ce74398269c8a32956d7c7943766770c093
|
[
"Apache-2.0"
] | 8
|
2016-03-25T07:17:43.000Z
|
2021-07-08T17:11:21.000Z
|
import FWCore.ParameterSet.Config as cms
from DQMServices.Core.DQMEDHarvester import DQMEDHarvester
#
# This object is used to make changes for different running scenarios
#
SiPixelPhase1SummaryOnline = DQMEDHarvester("SiPixelPhase1Summary",
TopFolderName = cms.string('PixelPhase1/Phase1_MechanicalView/'),
RunOnEndLumi = cms.bool(True),
RunOnEndJob = cms.bool(True),
# schedule this module to run *after* the QTests.
inputGeneration = cms.untracked.string('DQMGenerationQTest'),
outputGeneration = cms.untracked.string('DQMGenerationSummary'),
SummaryMaps = cms.VPSet(
cms.PSet(
MapName = cms.string("Digi"),
MapHist = cms.string("mean_num_digis")
),
cms.PSet(
MapName = cms.string("ADC"),
MapHist = cms.string("mean_adc")
),
cms.PSet(
MapName = cms.string("NClustsTotal"),
MapHist = cms.string("mean_num_clusters")
),
cms.PSet(
MapName = cms.string("ClustWidthOnTrk"),
MapHist = cms.string("mean_size")
),
cms.PSet(
MapName = cms.string("Charge"),
MapHist = cms.string("mean_charge")
)
),
# Number of dead ROCs required to generate an error. Order must be layers 1-4, ring1, ring2.
DeadROCErrorThreshold = cms.vdouble(0.2,0.2,0.2,0.2,0.2,0.2),
DeadROCWarningThreshold = cms.vdouble(0.1,0.1,0.1,0.1,0.1,0.1)
)
SiPixelPhase1SummaryOffline = DQMEDHarvester("SiPixelPhase1Summary",
TopFolderName = cms.string('PixelPhase1/Phase1_MechanicalView/'),
RunOnEndLumi = cms.bool(False),
RunOnEndJob = cms.bool(True),
# schedule this module to run *after* the QTests.
inputGeneration = cms.untracked.string('DQMGenerationQTest'),
outputGeneration = cms.untracked.string('DQMGenerationSummary'),
SummaryMaps = cms.VPSet(
cms.PSet(
MapName = cms.string("Digi"),
MapHist = cms.string("mean_num_digis")
),
cms.PSet(
MapName = cms.string("ADC"),
MapHist = cms.string("mean_adc")
),
cms.PSet(
MapName = cms.string("NClustsTotal"),
MapHist = cms.string("mean_num_clusters")
),
cms.PSet(
MapName = cms.string("ClustWidthOnTrk"),
MapHist = cms.string("mean_size")
),
cms.PSet(
MapName = cms.string("Charge"),
MapHist = cms.string("mean_charge")
)
),
DeadROCErrorThreshold = cms.vdouble(0.2,0.2,0.2,0.2,0.2,0.2),
DeadROCWarningThreshold = cms.vdouble(0.1,0.1,0.1,0.1,0.1,0.1)
)
SiPixelPhase1SummaryCosmics = DQMEDHarvester("SiPixelPhase1Summary",
TopFolderName = cms.string('PixelPhase1/Phase1_MechanicalView/'),
RunOnEndLumi = cms.bool(False),
RunOnEndJob = cms.bool(True),
# schedule this module to run *after* the QTests.
inputGeneration = cms.untracked.string('DQMGenerationQTest'),
outputGeneration = cms.untracked.string('DQMGenerationSummary'),
SummaryMaps = cms.VPSet(
cms.PSet(
MapName = cms.string("Digi"),
MapHist = cms.string("mean_num_digis")
),
cms.PSet(
MapName = cms.string("ClustWidthOnTrk"),
MapHist = cms.string("mean_size")
),
cms.PSet(
MapName = cms.string("Charge"),
MapHist = cms.string("mean_charge")
)
),
DeadROCErrorThreshold = cms.vdouble(0.2,0.2,0.2,0.2,0.2,0.2),
DeadROCWarningThreshold = cms.vdouble(0.1,0.1,0.1,0.1,0.1,0.1)
)
from DQMServices.Core.DQMQualityTester import DQMQualityTester
ADCQTester = DQMQualityTester(
qtList = cms.untracked.FileInPath('DQM/SiPixelPhase1Config/test/qTests/mean_adc_qualitytest_config.xml'),
QualityTestPrescaler = cms.untracked.int32(1),
getQualityTestsFromFile = cms.untracked.bool(True),
qtestOnEndLumi = cms.untracked.bool(True),
qtestOnEndJob = cms.untracked.bool(True),
reportThreshold = cms.untracked.string("more")
)
ADCQTester_offline = DQMQualityTester(
qtList = cms.untracked.FileInPath('DQM/SiPixelPhase1Config/test/qTests/mean_adc_qualitytest_config.xml'),
QualityTestPrescaler = cms.untracked.int32(1),
getQualityTestsFromFile = cms.untracked.bool(True),
qtestOnEndLumi = cms.untracked.bool(False),
qtestOnEndJob = cms.untracked.bool(True),
reportThreshold = cms.untracked.string("more")
)
NumClustersQTester = DQMQualityTester(
qtList = cms.untracked.FileInPath('DQM/SiPixelPhase1Config/test/qTests/mean_num_clusters_qualitytest_config.xml'),
QualityTestPrescaler = cms.untracked.int32(1),
getQualityTestsFromFile = cms.untracked.bool(True),
qtestOnEndLumi = cms.untracked.bool(True),
qtestOnEndJob = cms.untracked.bool(True),
reportThreshold = cms.untracked.string("more")
)
NumClustersQTester_offline = DQMQualityTester(
qtList = cms.untracked.FileInPath('DQM/SiPixelPhase1Config/test/qTests/mean_num_clusters_qualitytest_config.xml'),
QualityTestPrescaler = cms.untracked.int32(1),
getQualityTestsFromFile = cms.untracked.bool(True),
qtestOnEndLumi = cms.untracked.bool(False),
qtestOnEndJob = cms.untracked.bool(True),
reportThreshold = cms.untracked.string("more")
)
NumDigisQTester = DQMQualityTester(
qtList = cms.untracked.FileInPath('DQM/SiPixelPhase1Config/test/qTests/mean_num_digis_qualitytest_config.xml'),
QualityTestPrescaler = cms.untracked.int32(1),
getQualityTestsFromFile = cms.untracked.bool(True),
qtestOnEndLumi = cms.untracked.bool(True),
qtestOnEndJob = cms.untracked.bool(True),
reportThreshold = cms.untracked.string("more")
)
NumDigisQTester_offline = DQMQualityTester(
qtList = cms.untracked.FileInPath('DQM/SiPixelPhase1Config/test/qTests/mean_num_digis_qualitytest_config.xml'),
QualityTestPrescaler = cms.untracked.int32(1),
getQualityTestsFromFile = cms.untracked.bool(True),
qtestOnEndLumi = cms.untracked.bool(False),
qtestOnEndJob = cms.untracked.bool(True),
reportThreshold = cms.untracked.string("more")
)
NumDigisQTester_cosmics = DQMQualityTester(
qtList = cms.untracked.FileInPath('DQM/SiPixelPhase1Config/test/qTests/mean_num_digis_qualitytest_config_cosmics.xml'),
QualityTestPrescaler = cms.untracked.int32(1),
getQualityTestsFromFile = cms.untracked.bool(True),
qtestOnEndLumi = cms.untracked.bool(False),
qtestOnEndJob = cms.untracked.bool(True),
reportThreshold = cms.untracked.string("more")
)
SizeQTester = DQMQualityTester(
qtList = cms.untracked.FileInPath('DQM/SiPixelPhase1Config/test/qTests/mean_size_qualitytest_config.xml'),
QualityTestPrescaler = cms.untracked.int32(1),
getQualityTestsFromFile = cms.untracked.bool(True),
qtestOnEndLumi = cms.untracked.bool(True),
qtestOnEndJob = cms.untracked.bool(True),
reportThreshold = cms.untracked.string("more")
)
SizeQTester_offline = DQMQualityTester(
qtList = cms.untracked.FileInPath('DQM/SiPixelPhase1Config/test/qTests/mean_size_qualitytest_config.xml'),
QualityTestPrescaler = cms.untracked.int32(1),
getQualityTestsFromFile = cms.untracked.bool(True),
qtestOnEndLumi = cms.untracked.bool(False),
qtestOnEndJob = cms.untracked.bool(True),
reportThreshold = cms.untracked.string("more")
)
SizeQTester_cosmics = DQMQualityTester(
qtList = cms.untracked.FileInPath('DQM/SiPixelPhase1Config/test/qTests/mean_size_qualitytest_config_cosmics.xml'),
QualityTestPrescaler = cms.untracked.int32(1),
getQualityTestsFromFile = cms.untracked.bool(True),
qtestOnEndLumi = cms.untracked.bool(False),
qtestOnEndJob = cms.untracked.bool(True),
reportThreshold = cms.untracked.string("more")
)
ChargeQTester = DQMQualityTester(
qtList = cms.untracked.FileInPath('DQM/SiPixelPhase1Config/test/qTests/mean_charge_qualitytest_config.xml'),
QualityTestPrescaler = cms.untracked.int32(1),
getQualityTestsFromFile = cms.untracked.bool(True),
qtestOnEndLumi = cms.untracked.bool(True),
qtestOnEndJob = cms.untracked.bool(True),
reportThreshold = cms.untracked.string("more")
)
ChargeQTester_offline = DQMQualityTester(
qtList = cms.untracked.FileInPath('DQM/SiPixelPhase1Config/test/qTests/mean_charge_qualitytest_config.xml'),
QualityTestPrescaler = cms.untracked.int32(1),
getQualityTestsFromFile = cms.untracked.bool(True),
qtestOnEndLumi = cms.untracked.bool(False),
qtestOnEndJob = cms.untracked.bool(True),
reportThreshold = cms.untracked.string("more")
)
ChargeQTester_cosmics = DQMQualityTester(
qtList = cms.untracked.FileInPath('DQM/SiPixelPhase1Config/test/qTests/mean_charge_qualitytest_config_cosmics.xml'),
QualityTestPrescaler = cms.untracked.int32(1),
getQualityTestsFromFile = cms.untracked.bool(True),
qtestOnEndLumi = cms.untracked.bool(False),
qtestOnEndJob = cms.untracked.bool(True),
reportThreshold = cms.untracked.string("more")
)
RunQTests_online = cms.Sequence(ADCQTester * NumClustersQTester * NumDigisQTester * SizeQTester * ChargeQTester)
RunQTests_offline = cms.Sequence(ADCQTester_offline * NumClustersQTester_offline * NumDigisQTester_offline * SizeQTester_offline * ChargeQTester_offline)
RunQTests_cosmics = cms.Sequence(NumDigisQTester_cosmics * SizeQTester_cosmics * ChargeQTester_cosmics)
| 42.638009
| 153
| 0.709328
| 967
| 9,423
| 6.82213
| 0.110652
| 0.152797
| 0.094588
| 0.093982
| 0.896165
| 0.896165
| 0.896165
| 0.896165
| 0.896165
| 0.896165
| 0
| 0.017965
| 0.172981
| 9,423
| 220
| 154
| 42.831818
| 0.828564
| 0.032049
| 0
| 0.769231
| 0
| 0
| 0.167673
| 0.114671
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.015385
| 0
| 0.015385
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d2e0d879a18f07d00b454f00d9009cccde960886
| 9,108
|
py
|
Python
|
orc8r/protos/tenants_pb2_grpc.py
|
aweimeow/enodebd
|
e1cd20693153e6b85e5d1bf9d21af2501c358601
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
orc8r/protos/tenants_pb2_grpc.py
|
aweimeow/enodebd
|
e1cd20693153e6b85e5d1bf9d21af2501c358601
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
orc8r/protos/tenants_pb2_grpc.py
|
aweimeow/enodebd
|
e1cd20693153e6b85e5d1bf9d21af2501c358601
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from orc8r.protos import common_pb2 as orc8r_dot_protos_dot_common__pb2
from orc8r.protos import tenants_pb2 as orc8r_dot_protos_dot_tenants__pb2
class TenantsServiceStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GetAllTenants = channel.unary_unary(
'/magma.orc8r.TenantsService/GetAllTenants',
request_serializer=orc8r_dot_protos_dot_common__pb2.Void.SerializeToString,
response_deserializer=orc8r_dot_protos_dot_tenants__pb2.TenantList.FromString,
)
self.GetTenant = channel.unary_unary(
'/magma.orc8r.TenantsService/GetTenant',
request_serializer=orc8r_dot_protos_dot_tenants__pb2.GetTenantRequest.SerializeToString,
response_deserializer=orc8r_dot_protos_dot_tenants__pb2.Tenant.FromString,
)
self.CreateTenant = channel.unary_unary(
'/magma.orc8r.TenantsService/CreateTenant',
request_serializer=orc8r_dot_protos_dot_tenants__pb2.IDAndTenant.SerializeToString,
response_deserializer=orc8r_dot_protos_dot_common__pb2.Void.FromString,
)
self.SetTenant = channel.unary_unary(
'/magma.orc8r.TenantsService/SetTenant',
request_serializer=orc8r_dot_protos_dot_tenants__pb2.IDAndTenant.SerializeToString,
response_deserializer=orc8r_dot_protos_dot_common__pb2.Void.FromString,
)
self.DeleteTenant = channel.unary_unary(
'/magma.orc8r.TenantsService/DeleteTenant',
request_serializer=orc8r_dot_protos_dot_tenants__pb2.GetTenantRequest.SerializeToString,
response_deserializer=orc8r_dot_protos_dot_common__pb2.Void.FromString,
)
class TenantsServiceServicer(object):
"""Missing associated documentation comment in .proto file."""
def GetAllTenants(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetTenant(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateTenant(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetTenant(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteTenant(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_TenantsServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetAllTenants': grpc.unary_unary_rpc_method_handler(
servicer.GetAllTenants,
request_deserializer=orc8r_dot_protos_dot_common__pb2.Void.FromString,
response_serializer=orc8r_dot_protos_dot_tenants__pb2.TenantList.SerializeToString,
),
'GetTenant': grpc.unary_unary_rpc_method_handler(
servicer.GetTenant,
request_deserializer=orc8r_dot_protos_dot_tenants__pb2.GetTenantRequest.FromString,
response_serializer=orc8r_dot_protos_dot_tenants__pb2.Tenant.SerializeToString,
),
'CreateTenant': grpc.unary_unary_rpc_method_handler(
servicer.CreateTenant,
request_deserializer=orc8r_dot_protos_dot_tenants__pb2.IDAndTenant.FromString,
response_serializer=orc8r_dot_protos_dot_common__pb2.Void.SerializeToString,
),
'SetTenant': grpc.unary_unary_rpc_method_handler(
servicer.SetTenant,
request_deserializer=orc8r_dot_protos_dot_tenants__pb2.IDAndTenant.FromString,
response_serializer=orc8r_dot_protos_dot_common__pb2.Void.SerializeToString,
),
'DeleteTenant': grpc.unary_unary_rpc_method_handler(
servicer.DeleteTenant,
request_deserializer=orc8r_dot_protos_dot_tenants__pb2.GetTenantRequest.FromString,
response_serializer=orc8r_dot_protos_dot_common__pb2.Void.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'magma.orc8r.TenantsService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class TenantsService(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def GetAllTenants(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/magma.orc8r.TenantsService/GetAllTenants',
orc8r_dot_protos_dot_common__pb2.Void.SerializeToString,
orc8r_dot_protos_dot_tenants__pb2.TenantList.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetTenant(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/magma.orc8r.TenantsService/GetTenant',
orc8r_dot_protos_dot_tenants__pb2.GetTenantRequest.SerializeToString,
orc8r_dot_protos_dot_tenants__pb2.Tenant.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateTenant(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/magma.orc8r.TenantsService/CreateTenant',
orc8r_dot_protos_dot_tenants__pb2.IDAndTenant.SerializeToString,
orc8r_dot_protos_dot_common__pb2.Void.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def SetTenant(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/magma.orc8r.TenantsService/SetTenant',
orc8r_dot_protos_dot_tenants__pb2.IDAndTenant.SerializeToString,
orc8r_dot_protos_dot_common__pb2.Void.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteTenant(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/magma.orc8r.TenantsService/DeleteTenant',
orc8r_dot_protos_dot_tenants__pb2.GetTenantRequest.SerializeToString,
orc8r_dot_protos_dot_common__pb2.Void.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 45.54
| 106
| 0.674572
| 876
| 9,108
| 6.656393
| 0.118721
| 0.043903
| 0.076831
| 0.093294
| 0.833648
| 0.833648
| 0.785286
| 0.749957
| 0.717373
| 0.659921
| 0
| 0.011611
| 0.252964
| 9,108
| 199
| 107
| 45.768844
| 0.845385
| 0.075318
| 0
| 0.619632
| 1
| 0
| 0.084002
| 0.04985
| 0
| 0
| 0
| 0
| 0
| 1
| 0.07362
| false
| 0
| 0.018405
| 0.030675
| 0.141104
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d2fcce5ff3cf37303c247044f31bb9e5d7cadba0
| 38,159
|
py
|
Python
|
src/v5.1/resources/swagger_client/api/employment_events_api.py
|
xmarcosx/edfi-notebook
|
0564ebdf1d0f45a9d25056e7e61369f0a837534d
|
[
"Apache-2.0"
] | 2
|
2021-04-27T17:18:17.000Z
|
2021-04-27T19:14:39.000Z
|
src/v5.1/resources/swagger_client/api/employment_events_api.py
|
xmarcosx/edfi-notebook
|
0564ebdf1d0f45a9d25056e7e61369f0a837534d
|
[
"Apache-2.0"
] | null | null | null |
src/v5.1/resources/swagger_client/api/employment_events_api.py
|
xmarcosx/edfi-notebook
|
0564ebdf1d0f45a9d25056e7e61369f0a837534d
|
[
"Apache-2.0"
] | 1
|
2022-01-06T09:43:11.000Z
|
2022-01-06T09:43:11.000Z
|
# coding: utf-8
"""
Ed-Fi Operational Data Store API
The Ed-Fi ODS / API enables applications to read and write education data stored in an Ed-Fi ODS through a secure REST interface. *** > *Note: Consumers of ODS / API information should sanitize all data for display and storage. The ODS / API provides reasonable safeguards against cross-site scripting attacks and other malicious content, but the platform does not and cannot guarantee that the data it contains is free of all potentially harmful content.* *** # noqa: E501
OpenAPI spec version: 3
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from swagger_client.api_client import ApiClient
class EmploymentEventsApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def delete_employment_event_by_id(self, id, **kwargs): # noqa: E501
"""Deletes an existing resource using the resource identifier. # noqa: E501
The DELETE operation is used to delete an existing resource by identifier. If the resource doesn't exist, an error will result (the resource will not be found). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_employment_event_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param str if_match: The ETag header value used to prevent the DELETE from removing a resource modified by another consumer.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_employment_event_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_employment_event_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_employment_event_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Deletes an existing resource using the resource identifier. # noqa: E501
The DELETE operation is used to delete an existing resource by identifier. If the resource doesn't exist, an error will result (the resource will not be found). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_employment_event_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param str if_match: The ETag header value used to prevent the DELETE from removing a resource modified by another consumer.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'if_match'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_employment_event_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `delete_employment_event_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
if 'if_match' in params:
header_params['If-Match'] = params['if_match'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2_client_credentials'] # noqa: E501
return self.api_client.call_api(
'/tpdm/employmentEvents/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def deletes_employment_events(self, **kwargs): # noqa: E501
"""Retrieves deleted resources based on change version. # noqa: E501
The DELETES operation is used to retrieve deleted resources. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.deletes_employment_events(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset: Indicates how many items should be skipped before returning results.
:param int limit: Indicates the maximum number of items that should be returned in the results.
:param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion
:param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion
:return: list[TpdmEmploymentEvent]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.deletes_employment_events_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.deletes_employment_events_with_http_info(**kwargs) # noqa: E501
return data
def deletes_employment_events_with_http_info(self, **kwargs): # noqa: E501
"""Retrieves deleted resources based on change version. # noqa: E501
The DELETES operation is used to retrieve deleted resources. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.deletes_employment_events_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset: Indicates how many items should be skipped before returning results.
:param int limit: Indicates the maximum number of items that should be returned in the results.
:param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion
:param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion
:return: list[TpdmEmploymentEvent]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['offset', 'limit', 'min_change_version', 'max_change_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method deletes_employment_events" % key
)
params[key] = val
del params['kwargs']
if self.api_client.client_side_validation and ('limit' in params and params['limit'] > 500): # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `deletes_employment_events`, must be a value less than or equal to `500`") # noqa: E501
if self.api_client.client_side_validation and ('limit' in params and params['limit'] < 0): # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `deletes_employment_events`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
if 'min_change_version' in params:
query_params.append(('minChangeVersion', params['min_change_version'])) # noqa: E501
if 'max_change_version' in params:
query_params.append(('maxChangeVersion', params['max_change_version'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2_client_credentials'] # noqa: E501
return self.api_client.call_api(
'/tpdm/employmentEvents/deletes', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[TpdmEmploymentEvent]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_employment_events(self, **kwargs): # noqa: E501
"""Retrieves specific resources using the resource's property values (using the \"Get\" pattern). # noqa: E501
This GET operation provides access to resources using the \"Get\" search pattern. The values of any properties of the resource that are specified will be used to return all matching results (if it exists). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_employment_events(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset: Indicates how many items should be skipped before returning results.
:param int limit: Indicates the maximum number of items that should be returned in the results.
:param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion
:param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion
:param bool total_count: Indicates if the total number of items available should be returned in the 'Total-Count' header of the response. If set to false, 'Total-Count' header will not be provided.
:param str employment_event_type_descriptor: The type of the employment event (e.g., transfer, new hire, title change).
:param int education_organization_id: The identifier assigned to an education organization.
:param str requisition_number: The number or identifier assigned to an open staff position, typically a requisition number assigned by Human Resources.
:param str internal_external_hire_descriptor: Indicates whether the hire was an internal or external person.
:param bool early_hire: Indicator of whether this was an early hire.
:param date hire_date: The month, day, and year on which an individual was hired for a position.
:param str id:
:param bool mutual_consent: Indicator of whether this was a mutual consent hire.
:param bool restricted_choice: Indicator of whether this was a restricted choice hire.
:return: list[TpdmEmploymentEvent]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_employment_events_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_employment_events_with_http_info(**kwargs) # noqa: E501
return data
def get_employment_events_with_http_info(self, **kwargs): # noqa: E501
"""Retrieves specific resources using the resource's property values (using the \"Get\" pattern). # noqa: E501
This GET operation provides access to resources using the \"Get\" search pattern. The values of any properties of the resource that are specified will be used to return all matching results (if it exists). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_employment_events_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset: Indicates how many items should be skipped before returning results.
:param int limit: Indicates the maximum number of items that should be returned in the results.
:param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion
:param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion
:param bool total_count: Indicates if the total number of items available should be returned in the 'Total-Count' header of the response. If set to false, 'Total-Count' header will not be provided.
:param str employment_event_type_descriptor: The type of the employment event (e.g., transfer, new hire, title change).
:param int education_organization_id: The identifier assigned to an education organization.
:param str requisition_number: The number or identifier assigned to an open staff position, typically a requisition number assigned by Human Resources.
:param str internal_external_hire_descriptor: Indicates whether the hire was an internal or external person.
:param bool early_hire: Indicator of whether this was an early hire.
:param date hire_date: The month, day, and year on which an individual was hired for a position.
:param str id:
:param bool mutual_consent: Indicator of whether this was a mutual consent hire.
:param bool restricted_choice: Indicator of whether this was a restricted choice hire.
:return: list[TpdmEmploymentEvent]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['offset', 'limit', 'min_change_version', 'max_change_version', 'total_count', 'employment_event_type_descriptor', 'education_organization_id', 'requisition_number', 'internal_external_hire_descriptor', 'early_hire', 'hire_date', 'id', 'mutual_consent', 'restricted_choice'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_employment_events" % key
)
params[key] = val
del params['kwargs']
if self.api_client.client_side_validation and ('limit' in params and params['limit'] > 500): # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `get_employment_events`, must be a value less than or equal to `500`") # noqa: E501
if self.api_client.client_side_validation and ('limit' in params and params['limit'] < 0): # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `get_employment_events`, must be a value greater than or equal to `0`") # noqa: E501
if self.api_client.client_side_validation and ('employment_event_type_descriptor' in params and
len(params['employment_event_type_descriptor']) > 306):
raise ValueError("Invalid value for parameter `employment_event_type_descriptor` when calling `get_employment_events`, length must be less than or equal to `306`") # noqa: E501
if self.api_client.client_side_validation and ('requisition_number' in params and
len(params['requisition_number']) > 20):
raise ValueError("Invalid value for parameter `requisition_number` when calling `get_employment_events`, length must be less than or equal to `20`") # noqa: E501
if self.api_client.client_side_validation and ('internal_external_hire_descriptor' in params and
len(params['internal_external_hire_descriptor']) > 306):
raise ValueError("Invalid value for parameter `internal_external_hire_descriptor` when calling `get_employment_events`, length must be less than or equal to `306`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
if 'min_change_version' in params:
query_params.append(('minChangeVersion', params['min_change_version'])) # noqa: E501
if 'max_change_version' in params:
query_params.append(('maxChangeVersion', params['max_change_version'])) # noqa: E501
if 'total_count' in params:
query_params.append(('totalCount', params['total_count'])) # noqa: E501
if 'employment_event_type_descriptor' in params:
query_params.append(('employmentEventTypeDescriptor', params['employment_event_type_descriptor'])) # noqa: E501
if 'education_organization_id' in params:
query_params.append(('educationOrganizationId', params['education_organization_id'])) # noqa: E501
if 'requisition_number' in params:
query_params.append(('requisitionNumber', params['requisition_number'])) # noqa: E501
if 'internal_external_hire_descriptor' in params:
query_params.append(('internalExternalHireDescriptor', params['internal_external_hire_descriptor'])) # noqa: E501
if 'early_hire' in params:
query_params.append(('earlyHire', params['early_hire'])) # noqa: E501
if 'hire_date' in params:
query_params.append(('hireDate', params['hire_date'])) # noqa: E501
if 'id' in params:
query_params.append(('id', params['id'])) # noqa: E501
if 'mutual_consent' in params:
query_params.append(('mutualConsent', params['mutual_consent'])) # noqa: E501
if 'restricted_choice' in params:
query_params.append(('restrictedChoice', params['restricted_choice'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2_client_credentials'] # noqa: E501
return self.api_client.call_api(
'/tpdm/employmentEvents', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[TpdmEmploymentEvent]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_employment_events_by_id(self, id, **kwargs): # noqa: E501
"""Retrieves a specific resource using the resource's identifier (using the \"Get By Id\" pattern). # noqa: E501
This GET operation retrieves a resource by the specified resource identifier. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_employment_events_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param str if_none_match: The previously returned ETag header value, used here to prevent the unnecessary data transfer of an unchanged resource.
:return: TpdmEmploymentEvent
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_employment_events_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_employment_events_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_employment_events_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieves a specific resource using the resource's identifier (using the \"Get By Id\" pattern). # noqa: E501
This GET operation retrieves a resource by the specified resource identifier. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_employment_events_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param str if_none_match: The previously returned ETag header value, used here to prevent the unnecessary data transfer of an unchanged resource.
:return: TpdmEmploymentEvent
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'if_none_match'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_employment_events_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `get_employment_events_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
if 'if_none_match' in params:
header_params['If-None-Match'] = params['if_none_match'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2_client_credentials'] # noqa: E501
return self.api_client.call_api(
'/tpdm/employmentEvents/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TpdmEmploymentEvent', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_employment_event(self, employment_event, **kwargs): # noqa: E501
"""Creates or updates resources based on the natural key values of the supplied resource. # noqa: E501
The POST operation can be used to create or update resources. In database terms, this is often referred to as an \"upsert\" operation (insert + update). Clients should NOT include the resource \"id\" in the JSON body because it will result in an error (you must use a PUT operation to update a resource by \"id\"). The web service will identify whether the resource already exists based on the natural key values provided, and update or create the resource appropriately. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_employment_event(employment_event, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TpdmEmploymentEvent employment_event: The JSON representation of the \"employmentEvent\" resource to be created or updated. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_employment_event_with_http_info(employment_event, **kwargs) # noqa: E501
else:
(data) = self.post_employment_event_with_http_info(employment_event, **kwargs) # noqa: E501
return data
def post_employment_event_with_http_info(self, employment_event, **kwargs): # noqa: E501
"""Creates or updates resources based on the natural key values of the supplied resource. # noqa: E501
The POST operation can be used to create or update resources. In database terms, this is often referred to as an \"upsert\" operation (insert + update). Clients should NOT include the resource \"id\" in the JSON body because it will result in an error (you must use a PUT operation to update a resource by \"id\"). The web service will identify whether the resource already exists based on the natural key values provided, and update or create the resource appropriately. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_employment_event_with_http_info(employment_event, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TpdmEmploymentEvent employment_event: The JSON representation of the \"employmentEvent\" resource to be created or updated. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['employment_event'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_employment_event" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'employment_event' is set
if self.api_client.client_side_validation and ('employment_event' not in params or
params['employment_event'] is None): # noqa: E501
raise ValueError("Missing the required parameter `employment_event` when calling `post_employment_event`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'employment_event' in params:
body_params = params['employment_event']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2_client_credentials'] # noqa: E501
return self.api_client.call_api(
'/tpdm/employmentEvents', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def put_employment_event(self, id, employment_event, **kwargs): # noqa: E501
"""Updates or creates a resource based on the resource identifier. # noqa: E501
The PUT operation is used to update or create a resource by identifier. If the resource doesn't exist, the resource will be created using that identifier. Additionally, natural key values cannot be changed using this operation, and will not be modified in the database. If the resource \"id\" is provided in the JSON body, it will be ignored as well. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.put_employment_event(id, employment_event, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param TpdmEmploymentEvent employment_event: The JSON representation of the \"employmentEvent\" resource to be created or updated. (required)
:param str if_match: The ETag header value used to prevent the PUT from updating a resource modified by another consumer.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.put_employment_event_with_http_info(id, employment_event, **kwargs) # noqa: E501
else:
(data) = self.put_employment_event_with_http_info(id, employment_event, **kwargs) # noqa: E501
return data
def put_employment_event_with_http_info(self, id, employment_event, **kwargs): # noqa: E501
"""Updates or creates a resource based on the resource identifier. # noqa: E501
The PUT operation is used to update or create a resource by identifier. If the resource doesn't exist, the resource will be created using that identifier. Additionally, natural key values cannot be changed using this operation, and will not be modified in the database. If the resource \"id\" is provided in the JSON body, it will be ignored as well. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.put_employment_event_with_http_info(id, employment_event, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param TpdmEmploymentEvent employment_event: The JSON representation of the \"employmentEvent\" resource to be created or updated. (required)
:param str if_match: The ETag header value used to prevent the PUT from updating a resource modified by another consumer.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'employment_event', 'if_match'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method put_employment_event" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `put_employment_event`") # noqa: E501
# verify the required parameter 'employment_event' is set
if self.api_client.client_side_validation and ('employment_event' not in params or
params['employment_event'] is None): # noqa: E501
raise ValueError("Missing the required parameter `employment_event` when calling `put_employment_event`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
if 'if_match' in params:
header_params['If-Match'] = params['if_match'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'employment_event' in params:
body_params = params['employment_event']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2_client_credentials'] # noqa: E501
return self.api_client.call_api(
'/tpdm/employmentEvents/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 52.851801
| 493
| 0.655547
| 4,681
| 38,159
| 5.142064
| 0.075625
| 0.043207
| 0.017283
| 0.017948
| 0.932447
| 0.917034
| 0.900208
| 0.894848
| 0.884254
| 0.882842
| 0
| 0.015498
| 0.26442
| 38,159
| 721
| 494
| 52.925104
| 0.842032
| 0.424094
| 0
| 0.749354
| 0
| 0.018088
| 0.24621
| 0.085056
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033592
| false
| 0
| 0.010336
| 0
| 0.093023
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d2fd717fb08e8cbb6b556f96cded634aa2905c8a
| 163
|
py
|
Python
|
src/reactivestate/api/observe.py
|
k-groenbroek/reactivestate
|
c228085dc2ca014c4aae4216e655f9cfa1d59fc5
|
[
"MIT"
] | null | null | null |
src/reactivestate/api/observe.py
|
k-groenbroek/reactivestate
|
c228085dc2ca014c4aae4216e655f9cfa1d59fc5
|
[
"MIT"
] | null | null | null |
src/reactivestate/api/observe.py
|
k-groenbroek/reactivestate
|
c228085dc2ca014c4aae4216e655f9cfa1d59fc5
|
[
"MIT"
] | null | null | null |
from typing import Callable
from reactivestate.core.observer import Observer
def observe(fn: Callable[[], None]) -> Callable[[], None]:
return Observer(fn)
| 20.375
| 58
| 0.730061
| 20
| 163
| 5.95
| 0.6
| 0.201681
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147239
| 163
| 7
| 59
| 23.285714
| 0.856115
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
8249ccdbba63ad38bef7ebef9de8806039c12e78
| 4,528
|
py
|
Python
|
wellcom_app/migrations/0019_auto_20160926_1437.py
|
nathanrogers18/WellCom
|
1e58d02d821e6dcedf809f39c3baffbf638c46db
|
[
"MIT"
] | null | null | null |
wellcom_app/migrations/0019_auto_20160926_1437.py
|
nathanrogers18/WellCom
|
1e58d02d821e6dcedf809f39c3baffbf638c46db
|
[
"MIT"
] | 10
|
2016-11-13T23:16:53.000Z
|
2016-11-13T23:35:48.000Z
|
wellcom_app/migrations/0019_auto_20160926_1437.py
|
nathanrogers18/WellCom
|
1e58d02d821e6dcedf809f39c3baffbf638c46db
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-26 18:37
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('wellcom_app', '0018_auto_20160925_1004'),
]
operations = [
migrations.AlterField(
model_name='watertest',
name='aluminum',
field=models.DecimalField(decimal_places=2, max_digits=15),
),
migrations.AlterField(
model_name='watertest',
name='ammonia_nitrogen',
field=models.DecimalField(decimal_places=2, max_digits=15),
),
migrations.AlterField(
model_name='watertest',
name='calcium',
field=models.DecimalField(decimal_places=2, max_digits=15),
),
migrations.AlterField(
model_name='watertest',
name='calcium_hardness',
field=models.DecimalField(decimal_places=2, max_digits=15),
),
migrations.AlterField(
model_name='watertest',
name='chloride',
field=models.DecimalField(decimal_places=2, max_digits=15),
),
migrations.AlterField(
model_name='watertest',
name='conductivity_uscm',
field=models.DecimalField(decimal_places=2, max_digits=15),
),
migrations.AlterField(
model_name='watertest',
name='copper',
field=models.DecimalField(decimal_places=2, max_digits=15),
),
migrations.AlterField(
model_name='watertest',
name='fluoride',
field=models.DecimalField(decimal_places=2, max_digits=15),
),
migrations.AlterField(
model_name='watertest',
name='magnesium',
field=models.DecimalField(decimal_places=2, max_digits=15),
),
migrations.AlterField(
model_name='watertest',
name='magnesium_hardness',
field=models.DecimalField(decimal_places=2, max_digits=15),
),
migrations.AlterField(
model_name='watertest',
name='manganese',
field=models.DecimalField(decimal_places=2, max_digits=15),
),
migrations.AlterField(
model_name='watertest',
name='mpn_index_tc_per_deciliter',
field=models.DecimalField(decimal_places=2, max_digits=15),
),
migrations.AlterField(
model_name='watertest',
name='nitrate_nitrogen',
field=models.DecimalField(decimal_places=2, max_digits=15),
),
migrations.AlterField(
model_name='watertest',
name='nitrite_nitrogen',
field=models.DecimalField(decimal_places=2, max_digits=15),
),
migrations.AlterField(
model_name='watertest',
name='pH',
field=models.DecimalField(decimal_places=2, max_digits=15),
),
migrations.AlterField(
model_name='watertest',
name='sulphate',
field=models.DecimalField(decimal_places=2, max_digits=15),
),
migrations.AlterField(
model_name='watertest',
name='suspended_solids',
field=models.DecimalField(decimal_places=2, max_digits=15),
),
migrations.AlterField(
model_name='watertest',
name='total_alkalinity',
field=models.DecimalField(decimal_places=2, max_digits=15),
),
migrations.AlterField(
model_name='watertest',
name='total_dissolved_solids',
field=models.DecimalField(decimal_places=2, max_digits=15),
),
migrations.AlterField(
model_name='watertest',
name='total_hardness',
field=models.DecimalField(decimal_places=2, max_digits=15),
),
migrations.AlterField(
model_name='watertest',
name='total_iron',
field=models.DecimalField(decimal_places=2, max_digits=15),
),
migrations.AlterField(
model_name='watertest',
name='total_solids',
field=models.DecimalField(decimal_places=2, max_digits=15),
),
migrations.AlterField(
model_name='watertest',
name='turbidity_ntu',
field=models.DecimalField(decimal_places=2, max_digits=15),
),
]
| 34.564885
| 71
| 0.582155
| 424
| 4,528
| 5.990566
| 0.17217
| 0.181102
| 0.226378
| 0.262598
| 0.863386
| 0.863386
| 0.84685
| 0.84685
| 0.84685
| 0.827953
| 0
| 0.032577
| 0.308525
| 4,528
| 130
| 72
| 34.830769
| 0.778665
| 0.015018
| 0
| 0.747967
| 1
| 0
| 0.119812
| 0.01593
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.01626
| 0
| 0.04065
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
82cdb5f22479d8d48736d0eeed7b1596a3e5193d
| 2,694
|
py
|
Python
|
tests/test_flatten.py
|
nxdevel/nx_itertools
|
744da75c616a8a7991b963a549152fe9c434abd9
|
[
"MIT"
] | null | null | null |
tests/test_flatten.py
|
nxdevel/nx_itertools
|
744da75c616a8a7991b963a549152fe9c434abd9
|
[
"MIT"
] | null | null | null |
tests/test_flatten.py
|
nxdevel/nx_itertools
|
744da75c616a8a7991b963a549152fe9c434abd9
|
[
"MIT"
] | null | null | null |
"""Test for nx_itertools.recipes.flatten"""
from nx_itertools.recipes import flatten
def test_normal():
"""Test flatten."""
# flatten two iterables
data1, data2 = iter('ABC'), iter('DEF')
lst = (x for x in (data1, data2))
res = flatten(lst)
assert next(res) == 'A'
assert list(res) == ['B', 'C', 'D', 'E', 'F']
assert list(lst) == []
assert list(data1) == []
assert list(data2) == []
# flatten one iterable
data = iter('ABC')
lst = (x for x in (data,))
res = flatten(lst)
assert next(res) == 'A'
assert list(res) == ['B', 'C']
assert list(lst) == []
assert list(data) == []
# flatten two iterables, one empty
data1, data2 = iter('ABC'), ()
lst = (x for x in (data1, data2))
res = flatten(lst)
assert next(res) == 'A'
assert list(res) == ['B', 'C']
assert list(lst) == []
assert list(data1) == []
# flatten two iterables, one empty
data1, data2 = (), iter('DEF')
lst = (x for x in (data1, data2))
res = flatten(lst)
assert next(res) == 'D'
assert list(res) == ['E', 'F']
assert list(lst) == []
assert list(data2) == []
# flatten two iterables, both empty
data1, data2 = (), ()
lst = (x for x in (data1, data2))
res = flatten(lst)
assert list(res) == []
assert list(lst) == []
# flatten one iterable, empty
data = ()
lst = (x for x in (data,))
res = flatten(lst)
assert list(res) == []
assert list(lst) == []
def test_normal_peek():
"""Test flatten with peeking at iterable."""
# flatten two iterables
data1, data2 = iter('ABC'), iter('DEF')
lst = (x for x in (data1, data2))
res = flatten(lst)
assert next(res) == 'A'
assert next(data1)
assert list(res) == ['C', 'D', 'E', 'F']
assert list(lst) == []
assert list(data1) == []
assert list(data2) == []
# flatten one iterable
data = iter('ABC')
lst = (x for x in (data,))
res = flatten(lst)
assert next(res) == 'A'
assert next(data) == 'B'
assert list(res) == ['C']
assert list(lst) == []
assert list(data) == []
# flatten two iterables, one empty
data1, data2 = iter('ABC'), ()
lst = (x for x in (data1, data2))
res = flatten(lst)
assert next(res) == 'A'
assert next(data1) == 'B'
assert list(res) == ['C']
assert list(lst) == []
assert list(data1) == []
# flatten two iterables, one empty
data1, data2 = (), iter('DEF')
lst = (x for x in (data1, data2))
res = flatten(lst)
assert next(res) == 'D'
assert next(data2) == 'E'
assert list(res) == ['F']
assert list(lst) == []
assert list(data2) == []
| 26.673267
| 49
| 0.535264
| 357
| 2,694
| 4.02521
| 0.106443
| 0.208768
| 0.048713
| 0.055672
| 0.824635
| 0.824635
| 0.824635
| 0.783577
| 0.783577
| 0.752958
| 0
| 0.02001
| 0.27654
| 2,694
| 100
| 50
| 26.94
| 0.717291
| 0.137713
| 0
| 0.826667
| 0
| 0
| 0.025675
| 0
| 0
| 0
| 0
| 0
| 0.56
| 1
| 0.026667
| false
| 0
| 0.013333
| 0
| 0.04
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
82da096d5d14043696b5c8b7a9e78e517bf5dca1
| 41,438
|
py
|
Python
|
coremltools/test/test_keras2.py
|
Mistobaan/coremltools
|
e5ce33c6396077c0b267180a92573355a8ab53eb
|
[
"BSD-3-Clause"
] | 1
|
2019-02-01T20:13:20.000Z
|
2019-02-01T20:13:20.000Z
|
coremltools/test/test_keras2.py
|
Mistobaan/coremltools
|
e5ce33c6396077c0b267180a92573355a8ab53eb
|
[
"BSD-3-Clause"
] | null | null | null |
coremltools/test/test_keras2.py
|
Mistobaan/coremltools
|
e5ce33c6396077c0b267180a92573355a8ab53eb
|
[
"BSD-3-Clause"
] | 1
|
2018-10-30T14:07:21.000Z
|
2018-10-30T14:07:21.000Z
|
import unittest
from coremltools._deps import HAS_KERAS2_TF
from coremltools.proto import Model_pb2
from coremltools.proto import FeatureTypes_pb2
from coremltools.proto import NeuralNetwork_pb2
import pytest
if HAS_KERAS2_TF:
import tensorflow as tf
from keras.models import Sequential, Model
from coremltools.converters import keras
@unittest.skipIf(not HAS_KERAS2_TF, 'Missing keras. Skipping tests.')
@pytest.mark.keras2
class KerasSingleLayerTest(unittest.TestCase):
"""
Unit test class for testing scikit-learn converter.
"""
@classmethod
def setUpClass(self):
"""
Set up the unit test by loading common utilities.
"""
def test_dense(self):
"""
Test the conversion of Dense layer.
"""
from keras.layers import Dense
# Create a simple Keras model
model = Sequential()
model.add(Dense(32, input_dim=16))
input_names = ['input']
output_names = ['output']
spec = keras.convert(model, input_names, output_names).get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertTrue(spec.HasField('neuralNetwork'))
# Test the inputs and outputs
self.assertEquals(len(spec.description.input), len(input_names))
self.assertEqual(sorted(input_names),
sorted(map(lambda x: x.name, spec.description.input)))
self.assertEquals(len(spec.description.output), len(output_names))
self.assertEqual(sorted(output_names),
sorted(map(lambda x: x.name, spec.description.output)))
# Test the layer parameters.
layers = spec.neuralNetwork.layers
layer_0 = layers[0]
self.assertIsNotNone(layer_0.innerProduct)
def test_activations(self):
"""
Test the conversion for a Dense + Activation('something')
"""
from keras.layers import Dense, Activation
# Create a simple Keras model
keras_activation_options = ['elu', 'tanh', 'softplus', 'softsign', 'relu', 'sigmoid', 'hard_sigmoid', 'linear']
coreml_activation_options = ['ELU', 'tanh', 'softplus', 'softsign', 'ReLU', 'sigmoid', 'sigmoidHard', 'linear']
for i, k_act in enumerate(keras_activation_options):
c_act = coreml_activation_options[i]
model = Sequential()
model.add(Dense(32, input_dim=16))
model.add(Activation(k_act))
input_names = ['input']
output_names = ['output']
spec = keras.convert(model, input_names, output_names).get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertTrue(spec.HasField('neuralNetwork'))
# Test the inputs and outputs
self.assertEquals(len(spec.description.input), len(input_names))
self.assertEqual(sorted(input_names),
sorted(map(lambda x: x.name, spec.description.input)))
self.assertEquals(len(spec.description.output), len(output_names))
self.assertEqual(sorted(output_names),
sorted(map(lambda x: x.name, spec.description.output)))
# Test the layer parameters.
layers = spec.neuralNetwork.layers
self.assertIsNotNone(layers[0].innerProduct)
self.assertIsNotNone(layers[1].activation)
self.assertTrue(layers[1].activation.HasField(c_act))
def test_activation_softmax(self):
"""
Test the conversion for a Dense + Activation('softmax')
"""
from keras.layers import Dense, Activation
# Create a simple Keras model
model = Sequential()
model.add(Dense(32, input_dim=16))
model.add(Activation('softmax'))
input_names = ['input']
output_names = ['output']
spec = keras.convert(model, input_names, output_names).get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertTrue(spec.HasField('neuralNetwork'))
# Test the inputs and outputs
self.assertEquals(len(spec.description.input), len(input_names))
self.assertEqual(sorted(input_names),
sorted(map(lambda x: x.name, spec.description.input)))
self.assertEquals(len(spec.description.output), len(output_names))
self.assertEqual(sorted(output_names),
sorted(map(lambda x: x.name, spec.description.output)))
# Test the layer parameters.
layers = spec.neuralNetwork.layers
layer_0 = layers[0]
self.assertIsNotNone(layer_0.innerProduct)
layer_1 = layers[1]
self.assertIsNotNone(layer_1.softmax)
def test_dropout(self):
"""
Test the conversion for a Dense + Dropout
"""
from keras.layers import Dense, Dropout
# Create a simple Keras model
model = Sequential()
model.add(Dense(32, input_shape=(16,)))
model.add(Dropout(0.5))
model.add(Dense(32, input_shape=(16,)))
input_names = ['input']
output_names = ['output']
spec = keras.convert(model, input_names, output_names).get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertTrue(spec.HasField('neuralNetwork'))
# Test the inputs and outputs
self.assertEquals(len(spec.description.input), len(input_names))
self.assertEqual(sorted(input_names),
sorted(map(lambda x: x.name, spec.description.input)))
self.assertEquals(len(spec.description.output), len(output_names))
self.assertEqual(sorted(output_names),
sorted(map(lambda x: x.name, spec.description.output)))
# Test the layer parameters.
layers = spec.neuralNetwork.layers
layer_0 = layers[0]
self.assertIsNotNone(layer_0.innerProduct)
self.assertEquals(len(layers), 2)
def test_convolution(self, with_dilations=False):
"""
Test the conversion of 2D convolutional layer.
"""
from keras.layers import Conv2D
dilation_rate = [1, 1]
if with_dilations:
dilation_rate = [2, 2]
# Create a simple Keras model
model = Sequential()
model.add(Conv2D(input_shape=(64, 64, 3),
filters=32, kernel_size=(5,5), activation=None,
padding='valid', strides=(1, 1), use_bias=True,
dilation_rate=dilation_rate))
input_names = ['input']
output_names = ['output']
spec = keras.convert(model, input_names, output_names).get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertTrue(spec.HasField('neuralNetwork'))
# Test the inputs and outputs
self.assertEquals(len(spec.description.input), len(input_names))
self.assertEqual(sorted(input_names),
sorted(map(lambda x: x.name, spec.description.input)))
self.assertEquals(len(spec.description.output), len(output_names))
self.assertEqual(sorted(output_names),
sorted(map(lambda x: x.name, spec.description.output)))
# Test the layer parameters.
layers = spec.neuralNetwork.layers
layer_0 = layers[0]
self.assertIsNotNone(layer_0.convolution)
self.assertEqual(layer_0.convolution.dilationFactor, dilation_rate)
def test_convolution_dilated(self):
"""
Test the conversion of 2D convolutional layer with dilated kernels
"""
self.test_convolution(with_dilations=True)
def test_separable_convolution(self, with_dilations=False, activation=None):
"""
Test the conversion of 2D depthwise separable convolutional layer.
"""
from keras.layers import SeparableConv2D
dilation_rate = [1, 1]
if with_dilations:
dilation_rate = [2, 2]
# Create a simple Keras model
model = Sequential()
model.add(SeparableConv2D(input_shape=(64, 64, 3),
filters=32, kernel_size=(5,5),
activation=activation,
padding='valid', strides=(1, 1), use_bias=True,
dilation_rate=dilation_rate))
input_names = ['input']
output_names = ['output']
spec = keras.convert(model, input_names, output_names).get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertTrue(spec.HasField('neuralNetwork'))
# Test the inputs and outputs
self.assertEquals(len(spec.description.input), len(input_names))
self.assertEqual(sorted(input_names),
sorted(map(lambda x: x.name, spec.description.input)))
self.assertEquals(len(spec.description.output), len(output_names))
self.assertEqual(sorted(output_names),
sorted(map(lambda x: x.name, spec.description.output)))
# Test the layer parameters.
layers = spec.neuralNetwork.layers
layer_depthwise, layer_pointwise = layers[0], layers[1]
self.assertIsNotNone(layer_depthwise.convolution)
self.assertIsNotNone(layer_pointwise.convolution)
self.assertEqual(layer_depthwise.convolution.dilationFactor, dilation_rate)
if activation is not None:
self.assertIsNotNone(layers[2].activation)
self.assertTrue(layers[2].activation.HasField('ELU'))
def test_separable_convolution_dilated(self):
"""
Test the conversion of 2D depthwise separable convolutional layer with dilated kernels.
"""
self.test_separable_convolution(with_dilations=True)
def test_separable_convolution_with_nonlinearity(self):
"""
Test the conversion of 2D depthwise separable convolutional layer with nonlinearity.
"""
self.test_separable_convolution(activation='elu')
def test_upsample(self):
"""
Test the conversion of 2D convolutional layer + upsample
"""
from keras.layers import Conv2D, UpSampling2D
# Create a simple Keras model
model = Sequential()
model.add(Conv2D(input_shape=(64, 64, 3), filters=32,
kernel_size=(5,5)))
model.add(UpSampling2D(size = (2, 2)))
input_names = ['input']
output_names = ['output']
spec = keras.convert(model, input_names, output_names).get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertTrue(spec.HasField('neuralNetwork'))
# Test the inputs and outputs
self.assertEquals(len(spec.description.input), len(input_names))
self.assertEqual(sorted(input_names),
sorted(map(lambda x: x.name, spec.description.input)))
self.assertEquals(len(spec.description.output), len(output_names))
self.assertEqual(sorted(output_names),
sorted(map(lambda x: x.name, spec.description.output)))
# Test the layer parameters.
layers = spec.neuralNetwork.layers
layer_0 = layers[0]
self.assertIsNotNone(layer_0.convolution)
layer_1 = layers[1]
self.assertIsNotNone(layer_1.upsample)
def test_pooling(self):
"""
Test the conversion of pooling layer.
"""
from keras.layers import Conv2D, MaxPooling2D
# Create a simple Keras model
model = Sequential()
model.add(Conv2D(input_shape=(64, 64, 3),
filters=32, kernel_size=(5,5), strides=(1,1), activation=None,
padding='valid', use_bias=True))
model.add(MaxPooling2D(pool_size=(2,2)))
input_names = ['input']
output_names = ['output']
spec = keras.convert(model, input_names, output_names).get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertTrue(spec.HasField('neuralNetwork'))
# Test the inputs and outputs
self.assertEquals(len(spec.description.input), len(input_names))
self.assertEqual(sorted(input_names),
sorted(map(lambda x: x.name, spec.description.input)))
self.assertEquals(len(spec.description.output), len(output_names))
self.assertEqual(sorted(output_names),
sorted(map(lambda x: x.name, spec.description.output)))
# Test the layer parameters.
layers = spec.neuralNetwork.layers
self.assertIsNotNone(layers[1].pooling)
def test_permute(self):
"""
Test the conversion of pooling layer.
"""
from keras.layers.core import Permute
# Create a simple Keras model
model = Sequential()
model.add(Permute((3, 2, 1), input_shape=(10, 64,3)))
input_names = ['input']
output_names = ['output']
spec = keras.convert(model, input_names, output_names).get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertTrue(spec.HasField('neuralNetwork'))
# Test the inputs and outputs
self.assertEquals(len(spec.description.input), len(input_names))
self.assertEqual(sorted(input_names),
sorted(map(lambda x: x.name, spec.description.input)))
self.assertEquals(len(spec.description.output), len(output_names))
self.assertEqual(sorted(output_names),
sorted(map(lambda x: x.name, spec.description.output)))
# Test the layer parameters.
layers = spec.neuralNetwork.layers
layer_0 = layers[0]
self.assertIsNotNone(layer_0.permute)
def test_lstm(self):
"""
Test the conversion of an LSTM layer.
"""
from keras.layers import LSTM
# Create a simple Keras model
model = Sequential()
model.add(LSTM(32, input_shape=(10,24)))
input_names = ['input']
output_names = ['output']
spec = keras.convert(model, input_names, output_names).get_spec()
print(spec)
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertTrue(spec.HasField('neuralNetwork'))
# Test the inputs and outputs
self.assertEquals(len(spec.description.input), len(input_names)+2)
self.assertEquals(32, spec.description.input[1].type.multiArrayType.shape[0])
self.assertEquals(32, spec.description.input[2].type.multiArrayType.shape[0])
self.assertEquals(len(spec.description.output), len(output_names) + 2)
self.assertEquals(output_names[0], spec.description.output[0].name)
self.assertEquals(32, spec.description.output[0].type.multiArrayType.shape[0])
self.assertEquals(32, spec.description.output[1].type.multiArrayType.shape[0])
self.assertEquals(32, spec.description.output[2].type.multiArrayType.shape[0])
# Test the layer parameters.
layers = spec.neuralNetwork.layers
layer_0 = layers[0]
self.assertIsNotNone(layer_0.uniDirectionalLSTM)
self.assertEquals(len(layer_0.input), 3)
self.assertEquals(len(layer_0.output), 3)
def test_simple_rnn(self):
"""
Test the conversion of a simple RNN layer.
"""
from keras.layers import SimpleRNN
# Create a simple Keras model
model = Sequential()
model.add(SimpleRNN(32, input_shape=(10,32)))
input_names = ['input']
output_names = ['output']
spec = keras.convert(model, input_names, output_names).get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertTrue(spec.HasField('neuralNetwork'))
# Test the inputs and outputs
self.assertEquals(len(spec.description.input), len(input_names) + 1)
self.assertEquals(input_names[0], spec.description.input[0].name)
self.assertEquals(32, spec.description.input[1].type.multiArrayType.shape[0])
self.assertEquals(len(spec.description.output), len(output_names) + 1)
self.assertEquals(output_names[0], spec.description.output[0].name)
self.assertEquals(32, spec.description.output[0].type.multiArrayType.shape[0])
self.assertEquals(32, spec.description.output[1].type.multiArrayType.shape[0])
# Test the layer parameters.
layers = spec.neuralNetwork.layers
layer_0 = layers[0]
self.assertIsNotNone(layer_0.simpleRecurrent)
self.assertEquals(len(layer_0.input), 2)
self.assertEquals(len(layer_0.output), 2)
def test_gru(self):
"""
Test the conversion of a GRU layer.
"""
from keras.layers import GRU
# Create a simple Keras model
model = Sequential()
model.add(GRU(32, input_shape=(32,10)))
input_names = ['input']
output_names = ['output']
spec = keras.convert(model, input_names, output_names).get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertTrue(spec.HasField('neuralNetwork'))
# Test the inputs and outputs
self.assertEquals(len(spec.description.input), len(input_names) + 1)
self.assertEquals(input_names[0], spec.description.input[0].name)
self.assertEquals(32, spec.description.input[1].type.multiArrayType.shape[0])
self.assertEquals(len(spec.description.output), len(output_names) + 1)
self.assertEquals(output_names[0], spec.description.output[0].name)
self.assertEquals(32, spec.description.output[0].type.multiArrayType.shape[0])
self.assertEquals(32, spec.description.output[1].type.multiArrayType.shape[0])
# Test the layer parameters.
layers = spec.neuralNetwork.layers
layer_0 = layers[0]
self.assertIsNotNone(layer_0.gru)
self.assertEquals(len(layer_0.input), 2)
self.assertEquals(len(layer_0.output), 2)
def test_bidir(self):
"""
Test the conversion of a bidirectional layer
"""
from keras.layers import LSTM
from keras.layers.wrappers import Bidirectional
# Create a simple Keras model
model = Sequential()
model.add(Bidirectional(LSTM(32, input_shape=(10, 32)),
input_shape=(10, 32)))
input_names = ['input']
output_names = ['output']
spec = keras.convert(model, input_names, output_names).get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertTrue(spec.HasField('neuralNetwork'))
# Test the inputs and outputs
self.assertEquals(len(spec.description.input), len(input_names) + 4)
self.assertEquals(input_names[0], spec.description.input[0].name)
self.assertEquals(32, spec.description.input[1].type.multiArrayType.shape[0])
self.assertEquals(32, spec.description.input[2].type.multiArrayType.shape[0])
self.assertEquals(32, spec.description.input[3].type.multiArrayType.shape[0])
self.assertEquals(32, spec.description.input[4].type.multiArrayType.shape[0])
self.assertEquals(len(spec.description.output), len(output_names) + 4)
self.assertEquals(output_names[0], spec.description.output[0].name)
self.assertEquals(64, spec.description.output[0].type.multiArrayType.shape[0])
self.assertEquals(32, spec.description.output[1].type.multiArrayType.shape[0])
self.assertEquals(32, spec.description.output[2].type.multiArrayType.shape[0])
self.assertEquals(32, spec.description.output[3].type.multiArrayType.shape[0])
self.assertEquals(32, spec.description.output[4].type.multiArrayType.shape[0])
# Test the layer parameters.
layers = spec.neuralNetwork.layers
layer_0 = layers[0]
self.assertIsNotNone(layer_0.biDirectionalLSTM)
self.assertEquals(len(layer_0.input), 5)
self.assertEquals(len(layer_0.output), 5)
def test_embedding(self):
from keras.layers import Embedding
model = Sequential()
num_inputs = 10
num_outputs = 3
model.add(Embedding(num_inputs, num_outputs, input_length=5))
input_names = ['input']
output_names = ['output']
spec = keras.convert(model, input_names, output_names).get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertTrue(spec.HasField('neuralNetwork'))
# Test the inputs and outputs
self.assertEquals(len(spec.description.input), len(input_names))
# Test the layer parameters.
layers = spec.neuralNetwork.layers
layer_0 = layers[0]
self.assertIsNotNone(layer_0.embedding)
self.assertEquals(layer_0.embedding.inputDim, num_inputs)
self.assertEquals(layer_0.embedding.outputChannels, num_outputs)
self.assertEquals(len(layer_0.embedding.weights.floatValue), num_inputs*num_outputs)
def test_sentiment_analysis(self):
"""
Test the conversion for a Embedding + LSTM + Dense layer
"""
from keras.layers import Dense, Embedding, LSTM
# Create a simple Keras model
max_features = 50
embedded_dim = 32
sequence_length = 10
model = Sequential()
# Embedding layer example:
# Embedding(1000, 64, input_length=10) input_dim=index(0~999), 64-dimensional vector, sequence length = 10
# If we have Dense/Flatten layer upstream, input_length, a.k.a sequence_length is required
model.add(Embedding(max_features, embedded_dim, input_length=sequence_length))
# output_dim = 32
model.add(LSTM(32))
model.add(Dense(1, activation='sigmoid'))
# Input/output
input_names = ['input']
output_names = ['output']
spec = keras.convert(model, input_names, output_names).get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertTrue(spec.HasField('neuralNetwork'))
# Test the inputs and outputs
# We're giving state input and output so expect description to differ.
self.assertEquals(len(spec.description.input), len(input_names) + 2)
self.assertEquals(len(spec.description.output), len(output_names) + 2)
# Test the layer parameters.
layers = spec.neuralNetwork.layers
self.assertIsNotNone(layers[0].embedding)
self.assertIsNotNone(layers[1].uniDirectionalLSTM)
self.assertIsNotNone(layers[2].innerProduct)
def test_conv1d_lstm(self):
from keras.layers import Conv1D, LSTM, Dense
model = Sequential()
# input_shape = (time_step, dimensions)
model.add(Conv1D(32,3,padding='same',input_shape=(10,8)))
# conv1d output shape = (None, 10, 32)
model.add(LSTM(24))
model.add(Dense(1, activation='sigmoid'))
input_names = ['input']
output_names = ['output']
spec = keras.convert(model, input_names, output_names).get_spec()
self.assertIsNotNone(spec)
self.assertTrue(spec.HasField('neuralNetwork'))
# Test the inputs and outputs
self.assertEquals(len(spec.description.input), len(input_names) + 2)
self.assertEquals(len(spec.description.output), len(output_names) + 2)
# Test the layer parameters.
layers = spec.neuralNetwork.layers
self.assertIsNotNone(layers[0].convolution)
self.assertIsNotNone(layers[1].simpleRecurrent)
self.assertIsNotNone(layers[2].innerProduct)
def test_batchnorm(self):
"""
Test the conversion for a Convoultion2D + Batchnorm layer
"""
from keras.layers import Conv2D
from keras.layers.normalization import BatchNormalization
# Create a simple Keras model
model = Sequential()
model.add(Conv2D(input_shape=(64, 64, 3),
filters=32, kernel_size=(5,5), strides=(1,1), activation=None,
padding='valid', use_bias=True))
# epsilon in CoreML is currently fixed at 1e-5
model.add(BatchNormalization(epsilon=1e-5))
input_names = ['input']
output_names = ['output']
spec = keras.convert(model, input_names, output_names).get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertTrue(spec.HasField('neuralNetwork'))
# Test the inputs and outputs
self.assertEquals(len(spec.description.input), len(input_names))
self.assertEqual(sorted(input_names),
sorted(map(lambda x: x.name, spec.description.input)))
self.assertEquals(len(spec.description.output), len(output_names))
self.assertEqual(sorted(output_names),
sorted(map(lambda x: x.name, spec.description.output)))
# Test the layer parameters.
layers = spec.neuralNetwork.layers
self.assertIsNotNone(layers[0].convolution)
self.assertIsNotNone(layers[1].batchnorm)
def test_repeat_vector(self):
from keras.layers import RepeatVector
model = Sequential()
model.add(RepeatVector(3, input_shape=(5,)))
input_names = ['input']
output_names = ['output']
spec = keras.convert(model, input_names, output_names).get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertTrue(spec.HasField('neuralNetwork'))
# Test the inputs and outputs
self.assertEquals(len(spec.description.input), len(input_names))
self.assertEqual(sorted(input_names),
sorted(map(lambda x: x.name, spec.description.input)))
self.assertEquals(len(spec.description.output), len(output_names))
self.assertEqual(sorted(output_names),
sorted(map(lambda x: x.name, spec.description.output)))
layers = spec.neuralNetwork.layers
self.assertIsNotNone(layers[0].sequenceRepeat)
@pytest.mark.xfail(raises = ValueError)
def test_unsupported_variational_deconv(self):
from keras.layers import Input, Lambda, Conv2D, Flatten, Dense
x = Input(shape=(8,8,3))
conv_1 = Conv2D(4, (2, 2), padding='same', activation='relu')(x)
flat = Flatten()(conv_1)
hidden = Dense(10, activation='relu')(flat)
z_mean = Dense(10)(hidden)
z_log_var = Dense(10)(hidden)
def sampling(args):
z_mean, z_log_var = args
return z_mean + z_log_var
z = Lambda(sampling, output_shape=(10,))([z_mean, z_log_var])
model = Model([x], [z])
spec = keras.convert(model, ['input'], ['output']).get_spec()
def test_image_processing(self):
"""
Test the image-processing parameters.
"""
from keras.layers import Conv2D
# Create a simple Keras model
model = Sequential()
model.add(Conv2D(input_shape=(64, 64, 3),
filters=32, kernel_size=(5,5),
activation=None, padding='valid',
strides=(1, 1), use_bias=True))
input_names = ['input']
output_names = ['output']
spec = keras.convert(model, input_names, output_names, image_input_names =
['input'], red_bias = 110.0, blue_bias = 117.0, green_bias = 120.0,
is_bgr = True, image_scale = 1.0).get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertTrue(spec.HasField('neuralNetwork'))
self.assertEquals(spec.description.input[0].type.WhichOneof('Type'),
'imageType')
self.assertEquals(spec.description.input[0].type.imageType.colorSpace,
FeatureTypes_pb2.ImageFeatureType.ColorSpace.Value('BGR'))
# Test the layer parameters.
preprocessing = spec.neuralNetwork.preprocessing[0]
self.assertTrue(preprocessing.HasField('scaler'))
pr_0 = preprocessing.scaler
print('pr_0.channelScale = ', pr_0.channelScale)
print('pr_0.redBias = ', pr_0.redBias)
print('pr_0.blueBias = ', pr_0.blueBias)
print('pr_0.greenBias = ', pr_0.greenBias)
self.assertIsNotNone(pr_0.redBias)
self.assertIsNotNone(pr_0.greenBias)
self.assertIsNotNone(pr_0.blueBias)
self.assertIsNotNone(pr_0.channelScale)
self.assertEqual(pr_0.channelScale, 1.0)
self.assertEqual(pr_0.redBias, 110.0)
self.assertEqual(pr_0.blueBias, 117.0)
self.assertEqual(pr_0.greenBias, 120.0)
# Configuration 2: isbgr = False
spec = keras.convert(model, input_names, output_names, image_input_names =
['input'], red_bias = 110.0, blue_bias = 117.0, green_bias = 120.0,
is_bgr = False, image_scale = 1.0).get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertTrue(spec.HasField('neuralNetwork'))
self.assertEquals(spec.description.input[0].type.WhichOneof('Type'),
'imageType')
self.assertEquals(spec.description.input[0].type.imageType.colorSpace,
FeatureTypes_pb2.ImageFeatureType.ColorSpace.Value('RGB'))
# Test the layer parameters.
preprocessing = spec.neuralNetwork.preprocessing[0]
self.assertTrue(preprocessing.HasField('scaler'))
pr_0 = preprocessing.scaler
self.assertIsNotNone(pr_0.redBias)
self.assertIsNotNone(pr_0.greenBias)
self.assertIsNotNone(pr_0.blueBias)
self.assertIsNotNone(pr_0.channelScale)
self.assertEqual(pr_0.channelScale, 1.0)
self.assertEqual(pr_0.redBias, 110.0)
self.assertEqual(pr_0.blueBias, 117.0)
self.assertEqual(pr_0.greenBias, 120.0)
# Configuration 3: Defaults
spec = keras.convert(model, input_names, output_names, image_input_names =
['input'], is_bgr = False, image_scale = 1.0).get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertTrue(spec.HasField('neuralNetwork'))
self.assertEquals(spec.description.input[0].type.WhichOneof('Type'),
'imageType')
self.assertEquals(spec.description.input[0].type.imageType.colorSpace,
FeatureTypes_pb2.ImageFeatureType.ColorSpace.Value('RGB'))
# Test the layer parameters.
preprocessing = spec.neuralNetwork.preprocessing[0]
self.assertTrue(preprocessing.HasField('scaler'))
pr_0 = preprocessing.scaler
self.assertIsNotNone(pr_0.redBias)
self.assertIsNotNone(pr_0.greenBias)
self.assertIsNotNone(pr_0.blueBias)
self.assertIsNotNone(pr_0.channelScale)
self.assertEqual(pr_0.channelScale, 1.0)
self.assertEqual(pr_0.redBias, 0.0)
self.assertEqual(pr_0.blueBias, 0.0)
self.assertEqual(pr_0.greenBias, 0.0)
def test_classifier_string_classes(self):
from keras.layers import Dense
from keras.layers import Activation
# Create a simple Keras model
model = Sequential()
model.add(Dense(32, input_shape=(16,)))
model.add(Activation('softmax'))
classes = ['c%s' % i for i in range(32)]
input_names = ['input']
output_names = ['prob_output']
expected_output_names = ['prob_output', 'classLabel']
spec = keras.convert(model, input_names, output_names, class_labels = classes).get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertTrue(spec.HasField('neuralNetworkClassifier'))
self.assertFalse(spec.HasField('neuralNetwork'))
# Test the inputs and outputs
self.assertEquals(len(spec.description.input), len(input_names))
self.assertEqual(sorted(input_names),
sorted(map(lambda x: x.name, spec.description.input)))
self.assertEquals(len(spec.description.output), len(expected_output_names))
self.assertEquals(expected_output_names,
list(map(lambda x: x.name, spec.description.output)))
# Check the types
self.assertEquals(spec.description.output[0].type.WhichOneof('Type'), 'dictionaryType')
self.assertEquals(spec.description.output[0].type.dictionaryType.WhichOneof('KeyType'), 'stringKeyType')
self.assertEquals(spec.description.output[1].type.WhichOneof('Type'), 'stringType')
self.assertTrue(spec.description.predictedFeatureName, 'classLabel')
self.assertTrue(spec.description.predictedProbabilitiesName, 'prob_output')
# Test the class parameters
self.assertEqual(spec.WhichOneof('Type'), 'neuralNetworkClassifier', "Expected a NN classifier model")
self.assertEqual(spec.neuralNetworkClassifier.WhichOneof('ClassLabels'), 'stringClassLabels')
class_from_proto = list(spec.neuralNetworkClassifier.stringClassLabels.vector)
self.assertEqual(sorted(classes), sorted(class_from_proto))
def test_classifier_file(self):
from keras.layers import Dense
from keras.layers import Activation
import os
import tempfile
# Create a simple Keras model
model = Sequential()
model.add(Dense(32, input_shape=(16,)))
model.add(Activation('softmax'))
classes = ['c%s' % i for i in range(32)]
classes_file = tempfile.mktemp()
with open(classes_file, 'w') as f:
f.write('\n'.join(classes))
input_names = ['input']
output_names = ['prob_output']
expected_output_names = ['prob_output', 'classLabel']
spec = keras.convert(model, input_names, output_names, class_labels = classes).get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertTrue(spec.HasField('neuralNetworkClassifier'))
self.assertFalse(spec.HasField('neuralNetwork'))
# Test the inputs and outputs
self.assertEquals(len(spec.description.input), len(input_names))
self.assertEqual(sorted(input_names),
sorted(map(lambda x: x.name, spec.description.input)))
self.assertEquals(len(spec.description.output), len(expected_output_names))
self.assertEquals(expected_output_names,
list(map(lambda x: x.name, spec.description.output)))
# Check the types
self.assertEquals(spec.description.output[0].type.WhichOneof('Type'), 'dictionaryType')
self.assertEquals(spec.description.output[0].type.dictionaryType.WhichOneof('KeyType'), 'stringKeyType')
self.assertEquals(spec.description.output[1].type.WhichOneof('Type'), 'stringType')
self.assertTrue(spec.description.predictedFeatureName, 'classLabel')
self.assertTrue(spec.description.predictedProbabilitiesName, 'prob_output')
# cleanup
os.remove(classes_file)
def test_classifier_integer_classes(self):
from keras.layers import Dense
from keras.layers import Activation
# Create a simple Keras model
model = Sequential()
model.add(Dense(32, input_shape=(16,)))
model.add(Activation('softmax'))
classes = list(range(32))
input_names = ['input']
output_names = ['prob_output']
expected_output_names = ['prob_output', 'classLabel']
spec = keras.convert(model, input_names, output_names, class_labels = classes).get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertTrue(spec.HasField('neuralNetworkClassifier'))
self.assertFalse(spec.HasField('neuralNetwork'))
# Test the inputs and outputs
self.assertEquals(len(spec.description.input), len(input_names))
self.assertEqual(sorted(input_names),
sorted(map(lambda x: x.name, spec.description.input)))
self.assertEquals(len(spec.description.output), len(expected_output_names))
self.assertEquals(expected_output_names,
list(map(lambda x: x.name, spec.description.output)))
# Check the types
self.assertEquals(spec.description.output[0].type.WhichOneof('Type'), 'dictionaryType')
self.assertEquals(spec.description.output[0].type.dictionaryType.WhichOneof('KeyType'), 'int64KeyType')
self.assertEquals(spec.description.output[1].type.WhichOneof('Type'), 'int64Type')
self.assertTrue(spec.description.predictedFeatureName, 'classLabel')
self.assertTrue(spec.description.predictedProbabilitiesName, 'prob_output')
# Test the class parameters
self.assertEqual(spec.WhichOneof('Type'), 'neuralNetworkClassifier', "Expected a NN classifier model")
self.assertEqual(spec.neuralNetworkClassifier.WhichOneof('ClassLabels'), 'int64ClassLabels')
class_from_proto = list(spec.neuralNetworkClassifier.int64ClassLabels.vector)
self.assertEqual(sorted(classes), sorted(class_from_proto))
def test_classifier_custom_class_name(self):
from keras.layers import Dense
from keras.layers import Activation
# Create a simple Keras model
model = Sequential()
model.add(Dense(32, input_shape=(16,)))
model.add(Activation('softmax'))
classes = ['c%s' % i for i in range(32)]
input_names = ['input']
output_names = ['prob_output']
expected_output_names = ['prob_output', 'my_foo_bar_class_output']
spec = keras.convert(model, input_names, output_names, class_labels = classes,
predicted_feature_name = 'my_foo_bar_class_output').get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertTrue(spec.HasField('neuralNetworkClassifier'))
self.assertFalse(spec.HasField('neuralNetwork'))
# Test the inputs and outputs
self.assertEquals(len(spec.description.input), len(input_names))
self.assertEqual(sorted(input_names),
sorted(map(lambda x: x.name, spec.description.input)))
self.assertEquals(len(spec.description.output), len(expected_output_names))
self.assertEquals(expected_output_names,
list(map(lambda x: x.name, spec.description.output)))
# Check the types
self.assertEquals(spec.description.output[0].type.WhichOneof('Type'), 'dictionaryType')
self.assertEquals(spec.description.output[0].type.dictionaryType.WhichOneof('KeyType'), 'stringKeyType')
self.assertEquals(spec.description.output[1].type.WhichOneof('Type'), 'stringType')
self.assertTrue(spec.description.predictedFeatureName, 'my_foo_bar_class_output')
self.assertTrue(spec.description.predictedProbabilitiesName, 'prob_output')
# Test the class parameters
self.assertEqual(spec.WhichOneof('Type'), 'neuralNetworkClassifier', "Expected a NN classifier model")
self.assertEqual(spec.neuralNetworkClassifier.WhichOneof('ClassLabels'), 'stringClassLabels')
class_from_proto = list(spec.neuralNetworkClassifier.stringClassLabels.vector)
self.assertEqual(sorted(classes), sorted(class_from_proto))
def test_default_interface_names(self):
from keras.layers import Dense
from keras.layers import Activation
# Create a simple Keras model
model = Sequential()
model.add(Dense(32, input_shape=(16,)))
model.add(Activation('softmax'))
expected_input_names = ['input1']
expected_output_names = ['output1']
spec = keras.convert(model).get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
self.assertTrue(spec.HasField('neuralNetwork'))
# Test the inputs and outputs
self.assertEquals(len(spec.description.input), len(expected_input_names))
self.assertEqual(sorted(expected_input_names),
sorted(map(lambda x: x.name, spec.description.input)))
self.assertEquals(len(spec.description.output), len(expected_output_names))
self.assertEquals(sorted(expected_output_names),
sorted(map(lambda x: x.name, spec.description.output)))
| 41.314058
| 119
| 0.651431
| 4,683
| 41,438
| 5.647662
| 0.065129
| 0.087908
| 0.052405
| 0.039133
| 0.861237
| 0.835375
| 0.814995
| 0.800514
| 0.779908
| 0.766712
| 0
| 0.018502
| 0.238284
| 41,438
| 1,002
| 120
| 41.355289
| 0.819415
| 0.097447
| 0
| 0.727134
| 0
| 0
| 0.048918
| 0.006247
| 0
| 0
| 0
| 0
| 0.443598
| 1
| 0.045732
| false
| 0
| 0.065549
| 0
| 0.114329
| 0.007622
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7dcc82bb8a62743c254686dd3dffe450185bbfb2
| 26,850
|
py
|
Python
|
napalm_yang/models/openconfig/network_instances/network_instance/policy_forwarding/path_selection_groups/path_selection_group/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 64
|
2016-10-20T15:47:18.000Z
|
2021-11-11T11:57:32.000Z
|
napalm_yang/models/openconfig/network_instances/network_instance/policy_forwarding/path_selection_groups/path_selection_group/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 126
|
2016-10-05T10:36:14.000Z
|
2019-05-15T08:43:23.000Z
|
napalm_yang/models/openconfig/network_instances/network_instance/policy_forwarding/path_selection_groups/path_selection_group/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 63
|
2016-11-07T15:23:08.000Z
|
2021-09-22T14:41:16.000Z
|
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import config
from . import state
class path_selection_group(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/policy-forwarding/path-selection-groups/path-selection-group. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: A path selection group is a set of forwarding resources,
which are grouped as eligible paths for a particular
policy-based forwarding rule. A policy rule may select a
path-selection-group as the egress for a particular type of
traffic (e.g., DSCP value). The system then utilises its
standard forwarding lookup mechanism to select from the
paths that are specified within the group - for IP packets,
the destination IP address is used such that the packet is
routed to the entity within the path-selection-group that
corresponds to the next-hop for the destination IP address
of the packet; for L2 packets, the selection is based on the
destination MAC address.
"""
__slots__ = ("_path_helper", "_extmethods", "__group_id", "__config", "__state")
_yang_name = "path-selection-group"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__group_id = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="group-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"policy-forwarding",
"path-selection-groups",
"path-selection-group",
]
def _get_group_id(self):
"""
Getter method for group_id, mapped from YANG variable /network_instances/network_instance/policy_forwarding/path_selection_groups/path_selection_group/group_id (leafref)
YANG Description: Reference to a unique identifier for the path selection
group
"""
return self.__group_id
def _set_group_id(self, v, load=False):
"""
Setter method for group_id, mapped from YANG variable /network_instances/network_instance/policy_forwarding/path_selection_groups/path_selection_group/group_id (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_group_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_group_id() directly.
YANG Description: Reference to a unique identifier for the path selection
group
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError(
"Cannot set keys directly when" + " within an instantiated list"
)
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
is_leaf=True,
yang_name="group-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """group_id must be of a type compatible with leafref""",
"defined-type": "leafref",
"generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="group-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=True)""",
}
)
self.__group_id = t
if hasattr(self, "_set"):
self._set()
def _unset_group_id(self):
self.__group_id = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="group-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /network_instances/network_instance/policy_forwarding/path_selection_groups/path_selection_group/config (container)
YANG Description: Configuration parameters relating to the path selection
group.
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /network_instances/network_instance/policy_forwarding/path_selection_groups/path_selection_group/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration parameters relating to the path selection
group.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """config must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__config = t
if hasattr(self, "_set"):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/policy_forwarding/path_selection_groups/path_selection_group/state (container)
YANG Description: Operational state parameters relating to the path
selection group.
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/policy_forwarding/path_selection_groups/path_selection_group/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: Operational state parameters relating to the path
selection group.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
group_id = __builtin__.property(_get_group_id, _set_group_id)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
_pyangbind_elements = OrderedDict(
[("group_id", group_id), ("config", config), ("state", state)]
)
from . import config
from . import state
class path_selection_group(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/policy-forwarding/path-selection-groups/path-selection-group. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: A path selection group is a set of forwarding resources,
which are grouped as eligible paths for a particular
policy-based forwarding rule. A policy rule may select a
path-selection-group as the egress for a particular type of
traffic (e.g., DSCP value). The system then utilises its
standard forwarding lookup mechanism to select from the
paths that are specified within the group - for IP packets,
the destination IP address is used such that the packet is
routed to the entity within the path-selection-group that
corresponds to the next-hop for the destination IP address
of the packet; for L2 packets, the selection is based on the
destination MAC address.
"""
__slots__ = ("_path_helper", "_extmethods", "__group_id", "__config", "__state")
_yang_name = "path-selection-group"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__group_id = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="group-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"policy-forwarding",
"path-selection-groups",
"path-selection-group",
]
def _get_group_id(self):
"""
Getter method for group_id, mapped from YANG variable /network_instances/network_instance/policy_forwarding/path_selection_groups/path_selection_group/group_id (leafref)
YANG Description: Reference to a unique identifier for the path selection
group
"""
return self.__group_id
def _set_group_id(self, v, load=False):
"""
Setter method for group_id, mapped from YANG variable /network_instances/network_instance/policy_forwarding/path_selection_groups/path_selection_group/group_id (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_group_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_group_id() directly.
YANG Description: Reference to a unique identifier for the path selection
group
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError(
"Cannot set keys directly when" + " within an instantiated list"
)
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
is_leaf=True,
yang_name="group-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """group_id must be of a type compatible with leafref""",
"defined-type": "leafref",
"generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="group-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=True)""",
}
)
self.__group_id = t
if hasattr(self, "_set"):
self._set()
def _unset_group_id(self):
self.__group_id = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="group-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /network_instances/network_instance/policy_forwarding/path_selection_groups/path_selection_group/config (container)
YANG Description: Configuration parameters relating to the path selection
group.
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /network_instances/network_instance/policy_forwarding/path_selection_groups/path_selection_group/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration parameters relating to the path selection
group.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """config must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__config = t
if hasattr(self, "_set"):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/policy_forwarding/path_selection_groups/path_selection_group/state (container)
YANG Description: Operational state parameters relating to the path
selection group.
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/policy_forwarding/path_selection_groups/path_selection_group/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: Operational state parameters relating to the path
selection group.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
group_id = __builtin__.property(_get_group_id, _set_group_id)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
_pyangbind_elements = OrderedDict(
[("group_id", group_id), ("config", config), ("state", state)]
)
| 39.718935
| 377
| 0.61594
| 2,948
| 26,850
| 5.386024
| 0.076323
| 0.06235
| 0.044086
| 0.050762
| 0.976382
| 0.962716
| 0.962716
| 0.962716
| 0.962716
| 0.962716
| 0
| 0.001056
| 0.2946
| 26,850
| 675
| 378
| 39.777778
| 0.837276
| 0.25054
| 0
| 0.876543
| 0
| 0.012346
| 0.247667
| 0.078363
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045267
| false
| 0
| 0.039095
| 0
| 0.13786
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7de70638998f316cf6caea190a150bed96675bb5
| 194
|
py
|
Python
|
tests/provider_delivery/conftest.py
|
alphagov/notify-functional-tests
|
5d15be45500f381629c32dba7650dd77c9f58a2e
|
[
"MIT"
] | 3
|
2017-03-01T18:17:36.000Z
|
2019-05-15T12:32:05.000Z
|
tests/provider_delivery/conftest.py
|
alphagov/notify-functional-tests
|
5d15be45500f381629c32dba7650dd77c9f58a2e
|
[
"MIT"
] | 110
|
2016-03-09T16:42:24.000Z
|
2021-11-22T16:51:21.000Z
|
tests/provider_delivery/conftest.py
|
alphagov/notify-functional-tests
|
5d15be45500f381629c32dba7650dd77c9f58a2e
|
[
"MIT"
] | 4
|
2017-11-21T17:14:56.000Z
|
2021-04-10T19:11:26.000Z
|
import pytest
from config import setup_staging_live_config
@pytest.fixture(scope="session", autouse=True)
def staging_live_config():
"""
Setup
"""
setup_staging_live_config()
| 16.166667
| 46
| 0.726804
| 24
| 194
| 5.541667
| 0.541667
| 0.24812
| 0.383459
| 0.330827
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170103
| 194
| 11
| 47
| 17.636364
| 0.826087
| 0.025773
| 0
| 0
| 0
| 0
| 0.040462
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8177bcadbd47f984bcffbba0c1442df2686cd4f0
| 41,986
|
py
|
Python
|
src/python/procyon/spec.py
|
orbea/procyon
|
469d94427d3b6e7cc2ab93606bdf968717a49150
|
[
"Apache-2.0"
] | null | null | null |
src/python/procyon/spec.py
|
orbea/procyon
|
469d94427d3b6e7cc2ab93606bdf968717a49150
|
[
"Apache-2.0"
] | null | null | null |
src/python/procyon/spec.py
|
orbea/procyon
|
469d94427d3b6e7cc2ab93606bdf968717a49150
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
from .error import Error
from .parse_enums import Acc, Emit, Key
LEX_BYTE_CLASSES = bytearray(b"\000\000\000\000\000\000\000\000\000\001\002\000\000\000\000\000"
b"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"
b"\001\003\004\005\006\007\007\007\007\007\010\011\012\013\014\015"
b"\016\017\020\020\020\020\020\020\021\021\022\007\007\007\023\007"
b"\007\024\024\024\025\026\024\027\027\027\027\027\027\027\027\027"
b"\027\027\027\027\027\030\027\027\027\027\027\031\032\033\007\027"
b"\007\034\035\024\025\036\037\027\027\040\027\027\041\027\042\027"
b"\027\027\043\044\045\046\027\027\027\027\027\047\050\051\007\000"
b"\052\052\052\052\052\052\052\052\052\052\052\052\052\052\052\052"
b"\053\053\053\053\053\053\053\053\053\053\053\053\053\053\053\053"
b"\054\054\054\054\054\054\054\054\054\054\054\054\054\054\054\054"
b"\054\054\054\054\054\054\054\054\054\054\054\054\054\054\054\054"
b"\055\055\056\056\056\056\056\056\056\056\056\056\056\056\056\056"
b"\056\056\056\056\056\056\056\056\056\056\056\056\056\056\056\056"
b"\057\060\060\060\060\060\060\060\060\060\060\060\060\061\060\060"
b"\062\063\063\063\064\055\055\055\055\055\055\055\055\055\055\055")
LEX_TRANSITIONS = [
bytearray(b"\214\220\201\113\041\125\115\220\117\002\124\003\001\001\004\005\005\005\013\067"
b"\001\001\001\001\001\120\220\121\001\001\001\024\031\001\014\001\001\020\001\122"
b"\101\123\215\215\215\215\215\215\215\215\215\215\215"),
bytearray(b"\223\223\223\223\223\223\223\223\223\001\223\001\001\001\001\001\001\001\013\223"
b"\001\001\001\001\001\223\223\223\001\001\001\001\001\001\001\001\001\001\001\223"
b"\223\223\223\223\223\223\223\223\223\223\223\223\223"),
bytearray(b"\223\223\223\223\223\223\223\223\223\001\223\001\001\001\004\005\005\005\013\223"
b"\001\001\001\001\001\223\223\223\001\001\001\001\031\001\001\001\001\001\001\223"
b"\223\223\223\223\223\223\223\223\223\223\223\223\223"),
bytearray(b"\223\223\223\223\223\223\223\223\223\001\223\001\001\001\004\005\005\005\013\223"
b"\001\001\001\001\001\223\223\223\001\001\001\001\034\001\001\001\001\001\001\223"
b"\223\223\223\223\223\223\223\223\223\223\223\223\223"),
bytearray(b"\324\324\324\324\324\324\324\324\324\001\324\001\006\001\001\001\001\001\013\324"
b"\001\001\010\001\001\324\324\324\001\001\010\001\001\001\001\001\001\001\001\324"
b"\324\324\324\324\324\324\324\324\324\324\324\324\324"),
bytearray(b"\324\324\324\324\324\324\324\324\324\001\324\001\006\001\005\005\005\005\013\324"
b"\001\001\010\001\001\324\324\324\001\001\010\001\001\001\001\001\001\001\001\324"
b"\324\324\324\324\324\324\324\324\324\324\324\324\324"),
bytearray(b"\223\223\223\223\223\223\223\223\223\001\223\001\001\001\007\007\007\007\013\223"
b"\001\001\001\001\001\223\223\223\001\001\001\001\001\001\001\001\001\001\001\223"
b"\223\223\223\223\223\223\223\223\223\223\223\223\223"),
bytearray(b"\325\325\325\325\325\325\325\325\325\001\325\001\001\001\007\007\007\007\013\325"
b"\001\001\010\001\001\325\325\325\001\001\010\001\001\001\001\001\001\001\001\325"
b"\325\325\325\325\325\325\325\325\325\325\325\325\325"),
bytearray(b"\223\223\223\223\223\223\223\223\223\011\223\011\001\001\012\012\012\012\013\223"
b"\001\001\001\001\001\223\223\223\001\001\001\001\001\001\001\001\001\001\001\223"
b"\223\223\223\223\223\223\223\223\223\223\223\223\223"),
bytearray(b"\223\223\223\223\223\223\223\223\223\001\223\001\001\001\012\012\012\012\013\223"
b"\001\001\001\001\001\223\223\223\001\001\001\001\001\001\001\001\001\001\001\223"
b"\223\223\223\223\223\223\223\223\223\223\223\223\223"),
bytearray(b"\325\325\325\325\325\325\325\325\325\001\325\001\001\001\012\012\012\012\013\325"
b"\001\001\001\001\001\325\325\325\001\001\001\001\001\001\001\001\001\001\001\325"
b"\325\325\325\325\325\325\325\325\325\325\325\325\325"),
bytearray(b"\322\322\322\322\322\322\322\322\322\322\322\322\322\322\322\322\322\322\322\322"
b"\322\322\322\322\322\322\322\322\322\322\322\322\322\322\322\322\322\322\322\322"
b"\322\322\322\322\322\322\322\322\322\322\322\322\322"),
bytearray(b"\223\223\223\223\223\223\223\223\223\001\223\001\001\001\001\001\001\001\013\223"
b"\001\001\001\001\001\223\223\223\037\001\001\001\001\001\001\001\001\001\015\223"
b"\223\223\223\223\223\223\223\223\223\223\223\223\223"),
bytearray(b"\223\223\223\223\223\223\223\223\223\001\223\001\001\001\001\001\001\001\013\223"
b"\001\001\001\001\001\223\223\223\001\001\001\001\001\016\001\001\001\001\001\223"
b"\223\223\223\223\223\223\223\223\223\223\223\223\223"),
bytearray(b"\223\223\223\223\223\223\223\223\223\001\223\001\001\001\001\001\001\001\013\223"
b"\001\001\001\001\001\223\223\223\001\001\001\001\001\017\001\001\001\001\001\223"
b"\223\223\223\223\223\223\223\223\223\223\223\223\223"),
bytearray(b"\314\314\314\314\314\314\314\314\314\001\314\001\001\001\001\001\001\001\013\314"
b"\001\001\001\001\001\314\314\314\001\001\001\001\001\001\001\001\001\001\001\314"
b"\314\314\314\314\314\314\314\314\314\314\314\314\314"),
bytearray(b"\223\223\223\223\223\223\223\223\223\001\223\001\001\001\001\001\001\001\013\223"
b"\001\001\001\001\001\223\223\223\001\001\001\001\001\001\001\021\001\001\001\223"
b"\223\223\223\223\223\223\223\223\223\223\223\223\223"),
bytearray(b"\223\223\223\223\223\223\223\223\223\001\223\001\001\001\001\001\001\001\013\223"
b"\001\001\001\001\001\223\223\223\001\001\001\001\001\001\001\001\001\001\022\223"
b"\223\223\223\223\223\223\223\223\223\223\223\223\223"),
bytearray(b"\223\223\223\223\223\223\223\223\223\001\223\001\001\001\001\001\001\001\013\223"
b"\001\001\001\001\001\223\223\223\001\001\023\001\001\001\001\001\001\001\001\223"
b"\223\223\223\223\223\223\223\223\223\223\223\223\223"),
bytearray(b"\315\315\315\315\315\315\315\315\315\001\315\001\001\001\001\001\001\001\013\315"
b"\001\001\001\001\001\315\315\315\001\001\001\001\001\001\001\001\001\001\001\315"
b"\315\315\315\315\315\315\315\315\315\315\315\315\315"),
bytearray(b"\223\223\223\223\223\223\223\223\223\001\223\001\001\001\001\001\001\001\013\223"
b"\001\001\001\001\001\223\223\223\025\001\001\001\001\001\001\001\001\001\001\223"
b"\223\223\223\223\223\223\223\223\223\223\223\223\223"),
bytearray(b"\223\223\223\223\223\223\223\223\223\001\223\001\001\001\001\001\001\001\013\223"
b"\001\001\001\001\001\223\223\223\001\001\001\001\001\026\001\001\001\001\001\223"
b"\223\223\223\223\223\223\223\223\223\223\223\223\223"),
bytearray(b"\223\223\223\223\223\223\223\223\223\001\223\001\001\001\001\001\001\001\013\223"
b"\001\001\001\001\001\223\223\223\001\001\001\001\001\001\001\001\027\001\001\223"
b"\223\223\223\223\223\223\223\223\223\223\223\223\223"),
bytearray(b"\223\223\223\223\223\223\223\223\223\001\223\001\001\001\001\001\001\001\013\223"
b"\001\001\001\001\001\223\223\223\001\001\030\001\001\001\001\001\001\001\001\223"
b"\223\223\223\223\223\223\223\223\223\223\223\223\223"),
bytearray(b"\316\316\316\316\316\316\316\316\316\001\316\001\001\001\001\001\001\001\013\316"
b"\001\001\001\001\001\316\316\316\001\001\001\001\001\001\001\001\001\001\001\316"
b"\316\316\316\316\316\316\316\316\316\316\316\316\316"),
bytearray(b"\223\223\223\223\223\223\223\223\223\001\223\001\001\001\001\001\001\001\013\223"
b"\001\001\001\001\001\223\223\223\001\001\001\001\001\001\032\001\001\001\001\223"
b"\223\223\223\223\223\223\223\223\223\223\223\223\223"),
bytearray(b"\223\223\223\223\223\223\223\223\223\001\223\001\001\001\001\001\001\001\013\223"
b"\001\001\001\001\001\223\223\223\001\001\001\033\001\001\001\001\001\001\001\223"
b"\223\223\223\223\223\223\223\223\223\223\223\223\223"),
bytearray(b"\317\317\317\317\317\317\317\317\317\001\317\001\001\001\001\001\001\001\013\317"
b"\001\001\001\001\001\317\317\317\001\001\001\001\001\001\001\001\001\001\001\317"
b"\317\317\317\317\317\317\317\317\317\317\317\317\317"),
bytearray(b"\223\223\223\223\223\223\223\223\223\001\223\001\001\001\001\001\001\001\013\223"
b"\001\001\001\001\001\223\223\223\001\001\001\001\001\001\035\001\001\001\001\223"
b"\223\223\223\223\223\223\223\223\223\223\223\223\223"),
bytearray(b"\223\223\223\223\223\223\223\223\223\001\223\001\001\001\001\001\001\001\013\223"
b"\001\001\001\001\001\223\223\223\001\001\001\036\001\001\001\001\001\001\001\223"
b"\223\223\223\223\223\223\223\223\223\223\223\223\223"),
bytearray(b"\320\320\320\320\320\320\320\320\320\001\320\001\001\001\001\001\001\001\013\320"
b"\001\001\001\001\001\320\320\320\001\001\001\001\001\001\001\001\001\001\001\320"
b"\320\320\320\320\320\320\320\320\320\320\320\320\320"),
bytearray(b"\223\223\223\223\223\223\223\223\223\001\223\001\001\001\001\001\001\001\013\223"
b"\001\001\001\001\001\223\223\223\001\001\001\001\001\001\040\001\001\001\001\223"
b"\223\223\223\223\223\223\223\223\223\223\223\223\223"),
bytearray(b"\321\321\321\321\321\321\321\321\321\001\321\001\001\001\001\001\001\001\013\321"
b"\001\001\001\001\001\321\321\321\001\001\001\001\001\001\001\001\001\001\001\321"
b"\321\321\321\321\321\321\321\321\321\321\321\321\321"),
bytearray(b"\214\041\226\041\065\041\041\041\041\041\041\041\041\041\041\041\041\041\041\041"
b"\041\041\041\041\041\041\051\041\041\041\041\041\041\041\041\041\041\041\041\041"
b"\041\041\216\216\216\216\050\042\047\043\044\046\045"),
bytearray(b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\050\217\217\217\217\217\217\217\217"),
bytearray(b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\050\050\217\217\217\217\217\217\217\217\217"),
bytearray(b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\047\047\217\217\217\217\217\217\217\217"),
bytearray(b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\047\217\217\217\217\217\217\217\217\217\217"),
bytearray(b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\047\047\047\217\217\217\217\217\217\217\217"),
bytearray(b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\050\050\050\217\217\217\217\217\217\217\217"),
bytearray(b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\041\041\041\217\217\217\217\217\217\217\217"),
bytearray(b"\224\224\226\224\041\224\224\224\224\224\224\224\224\041\224\224\224\224\224\224"
b"\224\224\224\224\052\224\041\224\224\041\224\041\224\224\041\041\224\041\060\224"
b"\224\224\224\224\224\224\224\224\224\224\224\224\224"),
bytearray(b"\225\225\226\225\225\225\225\225\225\225\225\225\225\225\053\225\225\225\225\225"
b"\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225"
b"\225\225\225\225\225\225\225\225\225\225\225\225\225"),
bytearray(b"\225\225\226\225\225\225\225\225\225\225\225\225\225\225\054\225\225\225\225\225"
b"\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225"
b"\225\225\225\225\225\225\225\225\225\225\225\225\225"),
bytearray(b"\225\225\226\225\225\225\225\225\225\225\225\225\225\225\056\055\225\225\225\225"
b"\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225"
b"\225\225\225\225\225\225\225\225\225\225\225\225\225"),
bytearray(b"\225\225\226\225\225\225\225\225\225\225\225\225\225\225\057\225\225\225\225\225"
b"\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225"
b"\225\225\225\225\225\225\225\225\225\225\225\225\225"),
bytearray(b"\225\225\226\225\225\225\225\225\225\225\225\225\225\225\060\057\057\057\225\225"
b"\057\057\057\225\225\225\225\225\057\057\057\057\225\225\225\225\225\225\225\225"
b"\225\225\225\225\225\225\225\225\225\225\225\225\225"),
bytearray(b"\225\225\226\225\225\225\225\225\225\225\225\225\225\225\061\061\061\061\225\225"
b"\061\061\061\225\225\225\225\225\061\061\061\061\225\225\225\225\225\225\225\225"
b"\225\225\225\225\225\225\225\225\225\225\225\225\225"),
bytearray(b"\225\225\226\225\225\225\225\225\225\225\225\225\225\225\061\061\061\061\225\225"
b"\061\064\061\225\225\225\225\225\061\061\061\061\225\225\225\225\225\225\225\225"
b"\225\225\225\225\225\225\225\225\225\225\225\225\225"),
bytearray(b"\225\225\226\225\225\225\225\225\225\225\225\225\225\225\062\062\062\062\225\225"
b"\062\062\062\225\225\225\225\225\062\062\062\062\225\225\225\225\225\225\225\225"
b"\225\225\225\225\225\225\225\225\225\225\225\225\225"),
bytearray(b"\225\225\226\225\225\225\225\225\225\225\225\225\225\225\063\063\063\063\225\225"
b"\063\063\063\225\225\225\225\225\063\063\063\063\225\225\225\225\225\225\225\225"
b"\225\225\225\225\225\225\225\225\225\225\225\225\225"),
bytearray(b"\225\225\226\225\225\225\225\225\225\225\225\225\225\225\041\041\041\041\225\225"
b"\041\041\041\225\225\225\225\225\041\041\041\041\225\225\225\225\225\225\225\225"
b"\225\225\225\225\225\225\225\225\225\225\225\225\225"),
bytearray(b"\225\225\226\225\225\225\225\225\225\225\225\225\225\225\062\062\062\225\225\225"
b"\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225\225"
b"\225\225\225\225\225\225\225\225\225\225\225\225\225"),
bytearray(b"\327\327\327\327\327\327\327\327\327\327\327\327\327\327\327\327\327\327\066\327"
b"\327\327\327\327\327\327\327\327\327\327\327\327\327\327\327\327\327\327\327\327"
b"\327\327\327\327\327\327\327\327\327\327\327\327\327"),
bytearray(b"\323\323\323\323\323\323\323\323\323\323\323\323\323\323\323\323\323\323\323\323"
b"\323\323\323\323\323\323\323\323\323\323\323\323\323\323\323\323\323\323\323\323"
b"\323\323\323\323\323\323\323\323\323\323\323\323\323"),
bytearray(b"\214\077\311\100\100\100\100\100\100\100\100\100\100\100\100\100\100\100\100\100"
b"\100\100\100\100\100\100\100\100\100\100\100\100\100\100\100\100\100\100\100\100"
b"\100\100\216\216\216\216\076\070\075\071\072\074\073"),
bytearray(b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\076\217\217\217\217\217\217\217\217"),
bytearray(b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\076\076\217\217\217\217\217\217\217\217\217"),
bytearray(b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\075\075\217\217\217\217\217\217\217\217"),
bytearray(b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\075\217\217\217\217\217\217\217\217\217\217"),
bytearray(b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\075\075\075\217\217\217\217\217\217\217\217"),
bytearray(b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\076\076\076\217\217\217\217\217\217\217\217"),
bytearray(b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\100\100\100\217\217\217\217\217\217\217\217"),
bytearray(b"\214\100\311\100\100\100\100\100\100\100\100\100\100\100\100\100\100\100\100\100"
b"\100\100\100\100\100\100\100\100\100\100\100\100\100\100\100\100\100\100\100\100"
b"\100\100\216\216\216\216\076\070\075\071\072\074\073"),
bytearray(b"\214\100\330\100\100\100\100\100\100\100\100\100\100\100\100\100\100\100\100\100"
b"\100\100\100\100\100\100\100\100\100\100\100\100\100\100\100\100\100\100\100\100"
b"\100\100\216\216\216\216\076\070\075\071\072\074\073"),
bytearray(b"\214\111\312\112\112\112\112\112\112\112\112\112\112\112\112\112\112\112\112\112"
b"\112\112\112\112\112\112\112\112\112\112\112\112\112\112\112\112\112\112\112\112"
b"\112\112\216\216\216\216\110\102\107\103\104\106\105"),
bytearray(b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\110\217\217\217\217\217\217\217\217"),
bytearray(b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\110\110\217\217\217\217\217\217\217\217\217"),
bytearray(b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\107\107\217\217\217\217\217\217\217\217"),
bytearray(b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\107\217\217\217\217\217\217\217\217\217\217"),
bytearray(b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\107\107\107\217\217\217\217\217\217\217\217"),
bytearray(b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\110\110\110\217\217\217\217\217\217\217\217"),
bytearray(b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\112\112\112\217\217\217\217\217\217\217\217"),
bytearray(b"\214\112\312\112\112\112\112\112\112\112\112\112\112\112\112\112\112\112\112\112"
b"\112\112\112\112\112\112\112\112\112\112\112\112\112\112\112\112\112\112\112\112"
b"\112\112\216\216\216\216\110\102\107\103\104\106\105"),
bytearray(b"\214\112\331\112\112\112\112\112\112\112\112\112\112\112\112\112\112\112\112\112"
b"\112\112\112\112\112\112\112\112\112\112\112\112\112\112\112\112\112\112\112\112"
b"\112\112\216\216\216\216\110\102\107\103\104\106\105"),
bytearray(b"\227\114\313\227\227\227\227\227\227\227\227\227\227\227\227\227\227\227\227\227"
b"\227\227\227\227\227\227\227\227\227\227\227\227\227\227\227\227\227\227\227\227"
b"\227\227\227\227\227\227\227\227\227\227\227\227\227"),
bytearray(b"\214\227\313\227\227\227\227\227\227\227\227\227\227\227\227\227\227\227\227\227"
b"\227\227\227\227\227\227\227\227\227\227\227\227\227\227\227\227\227\227\227\227"
b"\227\227\215\215\215\215\215\215\215\215\215\215\215"),
bytearray(b"\326\115\326\326\326\326\326\326\326\221\326\221\221\221\116\116\116\116\326\326"
b"\116\116\116\221\221\326\326\326\116\116\116\116\221\221\221\221\221\221\221\326"
b"\326\326\326\326\326\326\326\326\326\326\326\326\326"),
bytearray(b"\222\222\222\222\222\222\222\222\222\221\222\221\221\221\115\115\115\115\222\222"
b"\115\115\115\221\221\222\222\222\115\115\115\115\221\221\221\221\221\221\221\222"
b"\222\222\222\222\222\222\222\222\222\222\222\222\222"),
bytearray(b"\303\303\303\303\303\303\303\303\303\303\303\303\303\303\303\303\303\303\303\303"
b"\303\303\303\303\303\303\303\303\303\303\303\303\303\303\303\303\303\303\303\303"
b"\303\303\303\303\303\303\303\303\303\303\303\303\303"),
bytearray(b"\304\304\304\304\304\304\304\304\304\304\304\304\304\304\304\304\304\304\304\304"
b"\304\304\304\304\304\304\304\304\304\304\304\304\304\304\304\304\304\304\304\304"
b"\304\304\304\304\304\304\304\304\304\304\304\304\304"),
bytearray(b"\305\305\305\305\305\305\305\305\305\305\305\305\305\305\305\305\305\305\305\305"
b"\305\305\305\305\305\305\305\305\305\305\305\305\305\305\305\305\305\305\305\305"
b"\305\305\305\305\305\305\305\305\305\305\305\305\305"),
bytearray(b"\306\306\306\306\306\306\306\306\306\306\306\306\306\306\306\306\306\306\306\306"
b"\306\306\306\306\306\306\306\306\306\306\306\306\306\306\306\306\306\306\306\306"
b"\306\306\306\306\306\306\306\306\306\306\306\306\306"),
bytearray(b"\307\307\307\307\307\307\307\307\307\307\307\307\307\307\307\307\307\307\307\307"
b"\307\307\307\307\307\307\307\307\307\307\307\307\307\307\307\307\307\307\307\307"
b"\307\307\307\307\307\307\307\307\307\307\307\307\307"),
bytearray(b"\310\310\310\310\310\310\310\310\310\310\310\310\310\310\310\310\310\310\310\310"
b"\310\310\310\310\310\310\310\310\310\310\310\310\310\310\310\310\310\310\310\310"
b"\310\310\310\310\310\310\310\310\310\310\310\310\310"),
bytearray(b"\214\125\332\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125"
b"\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125"
b"\125\125\216\216\216\216\134\126\133\127\130\132\131"),
bytearray(b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\134\217\217\217\217\217\217\217\217"),
bytearray(b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\134\134\217\217\217\217\217\217\217\217\217"),
bytearray(b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\133\133\217\217\217\217\217\217\217\217"),
bytearray(b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\133\217\217\217\217\217\217\217\217\217\217"),
bytearray(b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\133\133\133\217\217\217\217\217\217\217\217"),
bytearray(b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\134\134\134\217\217\217\217\217\217\217\217"),
bytearray(b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217\217"
b"\217\217\125\125\125\217\217\217\217\217\217\217\217"),
]
PARSE_DEFS = [
(Error.LONG, None, [], [], None),
(None, None, [1], [], None),
(None, Emit.NULL, [], [], None),
(None, Emit.TRUE, [], [], None),
(None, Emit.FALSE, [], [], None),
(None, Emit.INF, [], [], None),
(None, Emit.NEG_INF, [], [], None),
(None, Emit.NAN, [], [], None),
(None, Emit.INT, [], [], None),
(None, Emit.FLOAT, [], [], None),
(None, Emit.STRING, [], [], None),
(None, Emit.DATA, [], [], None),
(None, Emit.SHORT_ARRAY_IN, [17], [], None),
(None, Emit.SHORT_MAP_IN, [21], [], None),
(None, None, [2], [], None),
(None, Emit.NULL, [3], [], None),
(None, Emit.TRUE, [3], [], None),
(None, Emit.FALSE, [3], [], None),
(None, Emit.INF, [3], [], None),
(None, Emit.NEG_INF, [3], [], None),
(None, Emit.NAN, [3], [], None),
(None, Emit.INT, [3], [], None),
(None, Emit.FLOAT, [3], [], None),
(None, Emit.STRING, [3], [], None),
(None, None, [9], [Acc.DATA], None),
(None, None, [13], [Acc.STRING], None),
(None, None, [11], [], None),
(None, None, [15], [], None),
(None, Emit.SHORT_ARRAY_IN, [3, 17], [], None),
(None, Emit.LONG_ARRAY_IN, [19, 0], [], None),
(None, Emit.SHORT_MAP_IN, [3, 21], [], None),
(None, Emit.LONG_MAP_IN, [24, 0], [], Key.UNQUOTED),
(None, Emit.LONG_MAP_IN, [24, 0], [], Key.QUOTED),
(Error.SUFFIX, None, [], [], None),
(None, None, [2, 5], [], None),
(None, None, [3, 5], [], None),
(None, None, [4], [], None),
(None, None, [], [], None),
(None, None, [3], [], None),
(Error.SIBLING, None, [], [], None),
(Error.CHILD, None, [], [], None),
(None, None, [6], [], None),
(None, None, [6, 5], [], None),
(None, None, [5], [], None),
(Error.SHORT, None, [], [], None),
(None, None, [9], [], None),
(None, None, [9, 5], [], None),
(None, None, [8], [], None),
(None, Emit.ACC_DATA, [], [], None),
(None, None, [13], [Acc.NL, Acc.STRING], None),
(None, None, [11], [Acc.NL], None),
(None, None, [11, 5], [], None),
(None, None, [10], [], None),
(None, Emit.ACC_STRING, [], [Acc.NL], None),
(None, None, [13], [], None),
(None, None, [13], [Acc.SP, Acc.STRING], None),
(None, None, [13, 5], [], None),
(None, None, [12], [], None),
(Error.BANG_LAST, None, [], [], None),
(None, None, [15, 5], [], None),
(None, None, [14], [], None),
(None, Emit.ACC_STRING, [], [], None),
(Error.ARRAY_END, None, [], [], None),
(None, None, [16, 7], [], None),
(None, Emit.SHORT_ARRAY_OUT, [], [], None),
(None, Emit.NULL, [16], [], None),
(None, Emit.TRUE, [16], [], None),
(None, Emit.FALSE, [16], [], None),
(None, Emit.INF, [16], [], None),
(None, Emit.NEG_INF, [16], [], None),
(None, Emit.NAN, [16], [], None),
(None, Emit.INT, [16], [], None),
(None, Emit.FLOAT, [16], [], None),
(None, Emit.STRING, [16], [], None),
(None, Emit.DATA, [16], [], None),
(None, Emit.SHORT_ARRAY_IN, [16, 17], [], None),
(None, Emit.SHORT_MAP_IN, [16, 21], [], None),
(None, None, [19], [], None),
(None, None, [19, 0], [], None),
(None, None, [19, 5], [], None),
(None, None, [18], [], None),
(None, Emit.LONG_ARRAY_OUT, [], [], None),
(Error.MAP_END, None, [], [], None),
(None, None, [20, 22], [], None),
(None, Emit.SHORT_MAP_OUT, [], [], None),
(Error.MAP_KEY, None, [], [], None),
(None, None, [20, 7], [], Key.UNQUOTED),
(None, None, [20, 7], [], Key.QUOTED),
(None, None, [7], [], Key.UNQUOTED),
(None, None, [7], [], Key.QUOTED),
(None, None, [24], [], None),
(None, None, [24, 0], [], Key.UNQUOTED),
(None, None, [24, 0], [], Key.QUOTED),
(None, None, [24, 5], [], None),
(None, None, [23], [], None),
(None, Emit.LONG_MAP_OUT, [], [], None),
]
PARSE_TABLE = [
[PARSE_DEFS[1], PARSE_DEFS[0], PARSE_DEFS[0], PARSE_DEFS[0], PARSE_DEFS[12],
PARSE_DEFS[0], PARSE_DEFS[13], PARSE_DEFS[0], PARSE_DEFS[0], PARSE_DEFS[0],
PARSE_DEFS[0], PARSE_DEFS[0], PARSE_DEFS[2], PARSE_DEFS[3], PARSE_DEFS[4],
PARSE_DEFS[5], PARSE_DEFS[6], PARSE_DEFS[7], PARSE_DEFS[0], PARSE_DEFS[0],
PARSE_DEFS[8], PARSE_DEFS[9], PARSE_DEFS[11], PARSE_DEFS[10], PARSE_DEFS[0],
PARSE_DEFS[0], PARSE_DEFS[0]],
[PARSE_DEFS[0], PARSE_DEFS[0], PARSE_DEFS[0], PARSE_DEFS[29], PARSE_DEFS[28],
PARSE_DEFS[0], PARSE_DEFS[30], PARSE_DEFS[0], PARSE_DEFS[0], PARSE_DEFS[26],
PARSE_DEFS[26], PARSE_DEFS[27], PARSE_DEFS[15], PARSE_DEFS[16], PARSE_DEFS[17],
PARSE_DEFS[18], PARSE_DEFS[19], PARSE_DEFS[20], PARSE_DEFS[31], PARSE_DEFS[32],
PARSE_DEFS[21], PARSE_DEFS[22], PARSE_DEFS[24], PARSE_DEFS[23], PARSE_DEFS[25],
PARSE_DEFS[25], PARSE_DEFS[14]],
[PARSE_DEFS[34], PARSE_DEFS[1], PARSE_DEFS[0], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33]],
[PARSE_DEFS[35], PARSE_DEFS[36], PARSE_DEFS[37], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[38]],
[PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[0], PARSE_DEFS[39], PARSE_DEFS[39],
PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39],
PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39],
PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39],
PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39],
PARSE_DEFS[39], PARSE_DEFS[38]],
[PARSE_DEFS[40], PARSE_DEFS[40], PARSE_DEFS[0], PARSE_DEFS[40], PARSE_DEFS[40],
PARSE_DEFS[40], PARSE_DEFS[40], PARSE_DEFS[40], PARSE_DEFS[40], PARSE_DEFS[40],
PARSE_DEFS[40], PARSE_DEFS[40], PARSE_DEFS[40], PARSE_DEFS[40], PARSE_DEFS[40],
PARSE_DEFS[40], PARSE_DEFS[40], PARSE_DEFS[40], PARSE_DEFS[40], PARSE_DEFS[40],
PARSE_DEFS[40], PARSE_DEFS[40], PARSE_DEFS[40], PARSE_DEFS[40], PARSE_DEFS[40],
PARSE_DEFS[40], PARSE_DEFS[41]],
[PARSE_DEFS[42], PARSE_DEFS[43], PARSE_DEFS[37], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33]],
[PARSE_DEFS[44], PARSE_DEFS[44], PARSE_DEFS[44], PARSE_DEFS[44], PARSE_DEFS[12],
PARSE_DEFS[44], PARSE_DEFS[13], PARSE_DEFS[44], PARSE_DEFS[44], PARSE_DEFS[44],
PARSE_DEFS[44], PARSE_DEFS[44], PARSE_DEFS[2], PARSE_DEFS[3], PARSE_DEFS[4],
PARSE_DEFS[5], PARSE_DEFS[6], PARSE_DEFS[7], PARSE_DEFS[44], PARSE_DEFS[44],
PARSE_DEFS[8], PARSE_DEFS[9], PARSE_DEFS[11], PARSE_DEFS[10], PARSE_DEFS[44],
PARSE_DEFS[44], PARSE_DEFS[44]],
[PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39],
PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39],
PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39],
PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39],
PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[24], PARSE_DEFS[39], PARSE_DEFS[39],
PARSE_DEFS[39], PARSE_DEFS[45]],
[PARSE_DEFS[46], PARSE_DEFS[47], PARSE_DEFS[48], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[45]],
[PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39],
PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[50],
PARSE_DEFS[50], PARSE_DEFS[27], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39],
PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39],
PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[49],
PARSE_DEFS[49], PARSE_DEFS[26]],
[PARSE_DEFS[51], PARSE_DEFS[52], PARSE_DEFS[53], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33]],
[PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39],
PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[50],
PARSE_DEFS[50], PARSE_DEFS[27], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39],
PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39],
PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[55],
PARSE_DEFS[49], PARSE_DEFS[54]],
[PARSE_DEFS[56], PARSE_DEFS[57], PARSE_DEFS[53], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33]],
[PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39],
PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[58],
PARSE_DEFS[58], PARSE_DEFS[58], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39],
PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39],
PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[58],
PARSE_DEFS[58], PARSE_DEFS[27]],
[PARSE_DEFS[59], PARSE_DEFS[60], PARSE_DEFS[61], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33]],
[PARSE_DEFS[62], PARSE_DEFS[62], PARSE_DEFS[62], PARSE_DEFS[62], PARSE_DEFS[62],
PARSE_DEFS[64], PARSE_DEFS[62], PARSE_DEFS[62], PARSE_DEFS[63], PARSE_DEFS[62],
PARSE_DEFS[62], PARSE_DEFS[62], PARSE_DEFS[62], PARSE_DEFS[62], PARSE_DEFS[62],
PARSE_DEFS[62], PARSE_DEFS[62], PARSE_DEFS[62], PARSE_DEFS[62], PARSE_DEFS[62],
PARSE_DEFS[62], PARSE_DEFS[62], PARSE_DEFS[62], PARSE_DEFS[62], PARSE_DEFS[62],
PARSE_DEFS[62], PARSE_DEFS[62]],
[PARSE_DEFS[44], PARSE_DEFS[44], PARSE_DEFS[44], PARSE_DEFS[44], PARSE_DEFS[75],
PARSE_DEFS[64], PARSE_DEFS[76], PARSE_DEFS[44], PARSE_DEFS[44], PARSE_DEFS[44],
PARSE_DEFS[44], PARSE_DEFS[44], PARSE_DEFS[65], PARSE_DEFS[66], PARSE_DEFS[67],
PARSE_DEFS[68], PARSE_DEFS[69], PARSE_DEFS[70], PARSE_DEFS[44], PARSE_DEFS[44],
PARSE_DEFS[71], PARSE_DEFS[72], PARSE_DEFS[74], PARSE_DEFS[73], PARSE_DEFS[44],
PARSE_DEFS[44], PARSE_DEFS[44]],
[PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[78], PARSE_DEFS[39],
PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39],
PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39],
PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39],
PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39],
PARSE_DEFS[39], PARSE_DEFS[77]],
[PARSE_DEFS[79], PARSE_DEFS[80], PARSE_DEFS[81], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[77]],
[PARSE_DEFS[82], PARSE_DEFS[82], PARSE_DEFS[82], PARSE_DEFS[82], PARSE_DEFS[82],
PARSE_DEFS[82], PARSE_DEFS[82], PARSE_DEFS[84], PARSE_DEFS[83], PARSE_DEFS[82],
PARSE_DEFS[82], PARSE_DEFS[82], PARSE_DEFS[82], PARSE_DEFS[82], PARSE_DEFS[82],
PARSE_DEFS[82], PARSE_DEFS[82], PARSE_DEFS[82], PARSE_DEFS[82], PARSE_DEFS[82],
PARSE_DEFS[82], PARSE_DEFS[82], PARSE_DEFS[82], PARSE_DEFS[82], PARSE_DEFS[82],
PARSE_DEFS[82], PARSE_DEFS[82]],
[PARSE_DEFS[85], PARSE_DEFS[85], PARSE_DEFS[85], PARSE_DEFS[85], PARSE_DEFS[85],
PARSE_DEFS[85], PARSE_DEFS[85], PARSE_DEFS[84], PARSE_DEFS[85], PARSE_DEFS[85],
PARSE_DEFS[85], PARSE_DEFS[85], PARSE_DEFS[85], PARSE_DEFS[85], PARSE_DEFS[85],
PARSE_DEFS[85], PARSE_DEFS[85], PARSE_DEFS[85], PARSE_DEFS[86], PARSE_DEFS[87],
PARSE_DEFS[85], PARSE_DEFS[85], PARSE_DEFS[85], PARSE_DEFS[85], PARSE_DEFS[85],
PARSE_DEFS[85], PARSE_DEFS[85]],
[PARSE_DEFS[85], PARSE_DEFS[85], PARSE_DEFS[85], PARSE_DEFS[85], PARSE_DEFS[85],
PARSE_DEFS[85], PARSE_DEFS[85], PARSE_DEFS[85], PARSE_DEFS[85], PARSE_DEFS[85],
PARSE_DEFS[85], PARSE_DEFS[85], PARSE_DEFS[85], PARSE_DEFS[85], PARSE_DEFS[85],
PARSE_DEFS[85], PARSE_DEFS[85], PARSE_DEFS[85], PARSE_DEFS[88], PARSE_DEFS[89],
PARSE_DEFS[85], PARSE_DEFS[85], PARSE_DEFS[85], PARSE_DEFS[85], PARSE_DEFS[85],
PARSE_DEFS[85], PARSE_DEFS[85]],
[PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39],
PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39],
PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39],
PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[91], PARSE_DEFS[92],
PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39], PARSE_DEFS[39],
PARSE_DEFS[39], PARSE_DEFS[90]],
[PARSE_DEFS[93], PARSE_DEFS[94], PARSE_DEFS[95], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33], PARSE_DEFS[33],
PARSE_DEFS[33], PARSE_DEFS[90]],
]
| 75.514388
| 97
| 0.63495
| 8,117
| 41,986
| 3.196008
| 0.033633
| 0.303446
| 0.416313
| 0.512528
| 0.906407
| 0.868707
| 0.854522
| 0.846427
| 0.821679
| 0.820638
| 0
| 0.472887
| 0.143524
| 41,986
| 555
| 98
| 75.65045
| 0.248526
| 0.0005
| 0
| 0.417122
| 0
| 0.537341
| 0.494233
| 0.494233
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.003643
| 0
| 0.003643
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
819d1d38f39bef6fae65e4b00d60387eba9927cb
| 11,039
|
py
|
Python
|
tests/test_stream_sql.py
|
agtiwari/cobra
|
b1c857ca5246985ae45dd94fbf0d7ffb82e3ee63
|
[
"BSD-3-Clause"
] | 33
|
2019-07-26T20:12:44.000Z
|
2022-02-17T06:06:45.000Z
|
tests/test_stream_sql.py
|
agtiwari/cobra
|
b1c857ca5246985ae45dd94fbf0d7ffb82e3ee63
|
[
"BSD-3-Clause"
] | 17
|
2019-07-26T19:09:50.000Z
|
2022-02-10T00:55:15.000Z
|
tests/test_stream_sql.py
|
agtiwari/cobra
|
b1c857ca5246985ae45dd94fbf0d7ffb82e3ee63
|
[
"BSD-3-Clause"
] | 3
|
2021-02-24T01:18:51.000Z
|
2021-06-07T05:29:22.000Z
|
'''Copyright (c) 2018-2019 Machine Zone, Inc. All rights reserved.'''
from cobras.server.stream_sql import match_stream_sql_filter
def test_answer():
miso_sql_filter = "SELECT * FROM blah WHERE device.game = 'miso'"
miso_msg = {'device': {'game': 'miso'}}
ody_msg = {'device': {'game': 'ody'}}
assert match_stream_sql_filter(miso_sql_filter, miso_msg)
assert not match_stream_sql_filter(miso_sql_filter, ody_msg)
def test_invalid_sql():
sql_filter = "SELECT *"
msg = {'device': {'game': 'miso'}}
assert not match_stream_sql_filter(sql_filter, msg)
def test_answer_bad_message_list():
sql_filter = "SELECT * FROM blah WHERE device.game = 'miso'"
msg = []
assert not match_stream_sql_filter(sql_filter, msg)
def test_answer_bad_message_list_of_list():
sql_filter = "SELECT * FROM blah WHERE device.game = 'miso'"
msg = [[]]
assert not match_stream_sql_filter(sql_filter, msg)
def test_answer_bad_message_string():
sql_filter = "SELECT * FROM blah WHERE device.game = 'miso'"
msg = 'asdcasdcadcascd'
assert not match_stream_sql_filter(sql_filter, msg)
def test_answer_bad_message_none():
sql_filter = "SELECT * FROM blah WHERE device.game = 'miso'"
msg = None
assert not match_stream_sql_filter(sql_filter, msg)
def test_answer_bad_message_dictionary():
sql_filter = "SELECT * FROM blah"
msg = {}
assert match_stream_sql_filter(sql_filter, msg) == {}
def test_filter_is_none():
sql_filter = None
msg = []
assert not match_stream_sql_filter(sql_filter, msg)
def test_check_filter_sms_id():
sql_filter = "SELECT * FROM blah WHERE id = 'sms_test_id'"
msg_pass = {'id': 'sms_test_id'}
msg_fail = {'id': 'bar'}
assert match_stream_sql_filter(sql_filter, msg_pass)
assert not match_stream_sql_filter(sql_filter, msg_fail)
def test_check_filter_nested():
sql_filter = "SELECT * FROM blah WHERE foo.bar.baz = '10'"
msg_pass = {'foo': {'bar': {'baz': '10'}}}
msg_fail = {'id': 'bar'}
assert match_stream_sql_filter(sql_filter, msg_pass)
assert not match_stream_sql_filter(sql_filter, msg_fail)
# FIXME: ints are unsupported
def _test_check_filter_int():
sql_filter = "SELECT * FROM blah WHERE baz = 10"
msg_pass = {'baz': 10}
msg_fail = {'baz': 11}
assert match_stream_sql_filter(sql_filter, msg_pass)
assert not match_stream_sql_filter(sql_filter, msg_fail)
def test_check_and_combination():
sql_filter = "SELECT * FROM blah WHERE game = 'ody' AND os_name = 'Android'"
msg_pass = {'game': 'ody', 'os_name': 'Android'}
msg_fail = {'game': 'ody', 'os_name': 'iOS'}
assert match_stream_sql_filter(sql_filter, msg_pass)
assert not match_stream_sql_filter(sql_filter, msg_fail)
def test_check_or_combination():
sql_filter = "SELECT * FROM blah WHERE game = 'ody' OR os_name = 'Android'"
msg_pass = {'game': 'niso', 'os_name': 'Android'}
msg_fail = {'game': 'ody', 'os_name': 'iOS'}
assert match_stream_sql_filter(sql_filter, msg_pass)
assert match_stream_sql_filter(sql_filter, msg_fail)
def test_like_statement():
miso_sql_filter = "SELECT * FROM blah WHERE device.game LIKE '_iso'"
miso_msg = {'device': {'game': 'miso'}}
ody_msg = {'device': {'game': 'ody'}}
assert match_stream_sql_filter(miso_sql_filter, miso_msg)
assert not match_stream_sql_filter(miso_sql_filter, ody_msg)
def test_like_statement2():
miso_sql_filter = "SELECT * FROM blah WHERE device.game LIKE '*iso'"
miso_msg = {'device': {'game': 'werwerweriso'}}
ody_msg = {'device': {'game': 'ody'}}
assert match_stream_sql_filter(miso_sql_filter, miso_msg)
assert not match_stream_sql_filter(miso_sql_filter, ody_msg)
def test_like_statement3():
miso_sql_filter = "SELECT * FROM blah WHERE session LIKE '*aa'"
good_session = {'session': 'asdcasdcasdcadscaa'}
bad_session = {'session': 'asdcasdcasdcadcasbb'}
assert match_stream_sql_filter(miso_sql_filter, good_session)
assert not match_stream_sql_filter(miso_sql_filter, bad_session)
def test_booleans_true():
essential_sql_filter = "SELECT * FROM blah WHERE data.essential = true"
essential_msg = {'data': {'essential': True}}
non_essential_msg = {'data': {'essential': False}}
assert match_stream_sql_filter(essential_sql_filter, essential_msg)
assert not match_stream_sql_filter(essential_sql_filter, non_essential_msg)
def test_booleans_false():
essential_sql_filter = "SELECT * FROM blah WHERE data.essential = false"
essential_msg = {'data': {'essential': True}}
non_essential_msg = {'data': {'essential': False}}
assert not match_stream_sql_filter(essential_sql_filter, essential_msg)
assert match_stream_sql_filter(essential_sql_filter, non_essential_msg)
def test_int_equal():
sql_filter = "SELECT * FROM blah WHERE data.file_count = 10"
low_file_count_msg = {'data': {'file_count': 10}}
high_file_count_msg = {'data': {'file_count': 100}}
assert match_stream_sql_filter(sql_filter, low_file_count_msg)
assert not match_stream_sql_filter(sql_filter, high_file_count_msg)
def test_int_multiple_conditions():
sql_filter = """SELECT * FROM `blah` WHERE data.file_count = 16
AND data.payload_KB = 5637
AND data.essential = true
"""
hit = {
'data': {
'bundled_assets_check': '6988e3449c7ec3f4',
'essential': True,
'failed_patches_count': 0,
'file_count': 16,
'manifest_bundled_assets_check': '6988e3449c7ec3f4',
'patches_applicable': True,
'payload_KB': 5637,
'runtime_second': 15.72719062300166,
'storage_empty': True,
}
}
miss = {'data': {'file_count': 100}}
assert match_stream_sql_filter(sql_filter, hit)
assert not match_stream_sql_filter(sql_filter, miss)
def test_different_operand():
sql_filter = """SELECT * FROM `blah` WHERE data.file_count != 16
AND data.essential != true
AND data.description != 'content_sync'
"""
hit = {
'data': {
'essential': False,
'file_count': 17,
'description': 'content_sync_oops',
}
}
miss = {
'data': {'essential': True, 'file_count': 15, 'description': 'content_sync'}
}
assert match_stream_sql_filter(sql_filter, hit)
assert not match_stream_sql_filter(sql_filter, miss)
def test_larger_than_operand():
sql_filter = """SELECT * FROM `blah` WHERE data.file_count > 16
"""
hit = {'data': {'file_count': 17}}
miss = {'data': {'file_count': 15}}
assert match_stream_sql_filter(sql_filter, hit)
assert not match_stream_sql_filter(sql_filter, miss)
def test_select_with_subfields_1():
sql_filter = """SELECT device.app_version FROM `blah`
"""
hit = {'data': {'file_count': 17}, 'device': {'app_version': '4.3.2'}}
# FIXME: implement 'fail case'
# miss = {'data': {'file_count': 15}}
assert {"device.app_version": "4.3.2"} == match_stream_sql_filter(sql_filter, hit)
def test_select_with_subfields_2():
sql_filter = """SELECT device.app_version FROM `blah` WHERE data.file_count > 16
"""
hit = {'data': {'file_count': 17}, 'device': {'app_version': '4.3.2'}}
miss = {'data': {'file_count': 15}}
assert {"device.app_version": "4.3.2"} == match_stream_sql_filter(sql_filter, hit)
assert not match_stream_sql_filter(sql_filter, miss)
def test_select_with_multiple_subfields():
sql_filter = """SELECT device.app_version,data.file_count FROM `blah` WHERE data.file_count > 16
"""
hit = {'data': {'file_count': 17}, 'device': {'app_version': '4.3.2'}}
miss = {'data': {'file_count': 15}}
assert {
"device.app_version": "4.3.2",
'data.file_count': 17,
} == match_stream_sql_filter(sql_filter, hit)
assert not match_stream_sql_filter(sql_filter, miss)
def test_select_with_multiple_subfields_and_space():
sql_filter = """SELECT device.app_version, data.file_count FROM `blah` WHERE data.file_count > 16
"""
hit = {'data': {'file_count': 17}, 'device': {'app_version': '4.3.2'}}
miss = {'data': {'file_count': 15}}
assert {
"device.app_version": "4.3.2",
'data.file_count': 17,
} == match_stream_sql_filter(sql_filter, hit)
assert not match_stream_sql_filter(sql_filter, miss)
def test_select_with_multiple_subfields_and_space_with_aliases_A():
sql_filter = """SELECT device.app_version AS app_version, data.file_count AS file_count
FROM `blah` WHERE data.file_count > 16
"""
hit = {'data': {'file_count': 17}, 'device': {'app_version': '4.3.2'}}
miss = {'data': {'file_count': 15}}
assert {"app_version": "4.3.2", 'file_count': 17} == match_stream_sql_filter(
sql_filter, hit
)
assert not match_stream_sql_filter(sql_filter, miss)
def test_select_with_multiple_subfields_and_space_with_aliases_B():
sql_filter = """SELECT device.app_version AS app_version, data.file_count
FROM `blah` WHERE data.file_count > 16
"""
hit = {'data': {'file_count': 17}, 'device': {'app_version': '4.3.2'}}
miss = {'data': {'file_count': 15}}
assert {"app_version": "4.3.2", 'data.file_count': 17} == match_stream_sql_filter(
sql_filter, hit
)
assert not match_stream_sql_filter(sql_filter, miss)
def test_select_with_multiple_subfields_and_space_with_aliases_C():
sql_filter = """SELECT device.app_version, data.file_count AS file_count
FROM `blah` WHERE data.file_count > 16
"""
hit = {'data': {'file_count': 17}, 'device': {'app_version': '4.3.2'}}
miss = {'data': {'file_count': 15}}
assert {"device.app_version": "4.3.2", 'file_count': 17} == match_stream_sql_filter(
sql_filter, hit
)
assert not match_stream_sql_filter(sql_filter, miss)
def test_select_field_with_zero_integer_value():
sql_filter = """SELECT data.previous_scene_spent_time FROM `blah` """
hit = {'data': {"previous_scene_spent_time": 0}}
assert {"data.previous_scene_spent_time": 0} == match_stream_sql_filter(
sql_filter, hit
)
def test_select_field_with_false_boolean_value():
sql_filter = """SELECT data.on_application_inactive FROM `blah` """
hit = {'data': {"on_application_inactive": False}}
assert {"data.on_application_inactive": False} == match_stream_sql_filter(
sql_filter, hit
)
def test_select_field_with_empty_string_value():
sql_filter = """SELECT data.scene_name FROM `blah` """
hit = {'data': {"scene_name": ""}}
assert {"data.scene_name": ""} == match_stream_sql_filter(sql_filter, hit)
| 33.553191
| 101
| 0.660658
| 1,484
| 11,039
| 4.533019
| 0.098383
| 0.185967
| 0.112383
| 0.160547
| 0.82206
| 0.783707
| 0.747287
| 0.737773
| 0.720678
| 0.657054
| 0
| 0.021692
| 0.210707
| 11,039
| 328
| 102
| 33.655488
| 0.750373
| 0.014222
| 0
| 0.446429
| 0
| 0
| 0.322237
| 0.020875
| 0
| 0
| 0
| 0.003049
| 0.236607
| 1
| 0.142857
| false
| 0.044643
| 0.004464
| 0
| 0.147321
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
81b4abdadf361f9d8d14e6e840afb50fbcfd2124
| 5,572
|
py
|
Python
|
tests/test_task.py
|
RaphiOriginal/blindAutomation
|
47f087be0ef33983cfc372abe09760c9a64f1849
|
[
"MIT"
] | 1
|
2020-08-20T19:43:14.000Z
|
2020-08-20T19:43:14.000Z
|
tests/test_task.py
|
RaphiOriginal/blindAutomation
|
47f087be0ef33983cfc372abe09760c9a64f1849
|
[
"MIT"
] | null | null | null |
tests/test_task.py
|
RaphiOriginal/blindAutomation
|
47f087be0ef33983cfc372abe09760c9a64f1849
|
[
"MIT"
] | null | null | null |
import unittest
from blind_automation.building.blind.blind import Blind
from blind_automation.building.state import State
from blind_automation.jobs.task import PreTilt, Open, Close, Tilt
from tests.mock.device import DeviceMock
class Task(unittest.TestCase):
def test_done_pretilt_open_state(self):
device = DeviceMock('TestDevice')
blind = Blind('Test', 0, 0, device, [], [])
Open(blind).do()
self.assertEqual(1, device.open_counter)
self.assertEqual(State.OPEN, device.stats())
prepare = PreTilt(blind)
self.assertFalse(prepare.done())
def test_done_pretilt_close_state(self):
device = DeviceMock('TestDevice')
blind = Blind('Test', 0, 0, device, [], [])
Close(blind).do()
self.assertEqual(1, device.close_counter)
self.assertEqual(State.CLOSED, device.stats())
prepare = PreTilt(blind)
self.assertTrue(prepare.done())
def test_done_pretilt_tilt_state(self):
device = DeviceMock('TestDevice')
blind = Blind('Test', 0, 0, device, [], [])
Tilt(blind).do()
self.assertEqual(1, device.tilt_counter)
self.assertEqual(State.TILT, device.stats())
prepare = PreTilt(blind)
self.assertTrue(prepare.done())
def test_done_pretilt_moved_state(self):
device = DeviceMock('TestDevice')
blind = Blind('Test', 0, 0, device, [], [])
device.move(50)
prepare = PreTilt(blind)
self.assertFalse(prepare.done())
def test_done_tilt_open_state(self):
device = DeviceMock('TestDevice')
blind = Blind('Test', 0, 0, device, [], [])
Open(blind).do()
self.assertEqual(1, device.open_counter)
self.assertEqual(State.OPEN, device.stats())
tilt = Tilt(blind)
self.assertFalse(tilt.done())
def test_done_tilt_close_state(self):
device = DeviceMock('TestDevice')
blind = Blind('Test', 0, 0, device, [], [])
Close(blind).do()
self.assertEqual(1, device.close_counter)
self.assertEqual(State.CLOSED, device.stats())
tilt = Tilt(blind)
self.assertFalse(tilt.done())
def test_done_tilt_tilt_state(self):
device = DeviceMock('TestDevice')
blind = Blind('Test', 0, 0, device, [], [])
Tilt(blind).do()
self.assertEqual(1, device.tilt_counter)
self.assertEqual(State.TILT, device.stats())
tilt = Tilt(blind)
self.assertTrue(tilt.done())
def test_done_tilt_different_tilt_state(self):
device = DeviceMock('TestDevice')
blind = Blind('Test', 0, 0, device, [], [])
Tilt(blind, 45).do()
self.assertEqual(1, device.tilt_counter)
self.assertEqual(State.TILT, device.stats())
tilt = Tilt(blind)
self.assertFalse(tilt.done())
def test_done_tilt_moved_state(self):
device = DeviceMock('TestDevice')
blind = Blind('Test', 0, 0, device, [], [])
device.move(50)
tilt = Tilt(blind)
self.assertFalse(tilt.done())
def test_done_close_open_state(self):
device = DeviceMock('TestDevice')
blind = Blind('Test', 0, 0, device, [], [])
Open(blind).do()
self.assertEqual(1, device.open_counter)
self.assertEqual(State.OPEN, device.stats())
close = Close(blind)
self.assertFalse(close.done())
def test_done_close_close_state(self):
device = DeviceMock('TestDevice')
blind = Blind('Test', 0, 0, device, [], [])
Close(blind).do()
self.assertEqual(1, device.close_counter)
self.assertEqual(State.CLOSED, device.stats())
close = Close(blind)
self.assertTrue(close.done())
def test_done_close_tilt_state(self):
device = DeviceMock('TestDevice')
blind = Blind('Test', 0, 0, device, [], [])
Tilt(blind).do()
self.assertEqual(1, device.tilt_counter)
self.assertEqual(State.TILT, device.stats())
close = Close(blind)
self.assertFalse(close.done())
def test_done_close_moved_state(self):
device = DeviceMock('TestDevice')
blind = Blind('Test', 0, 0, device, [], [])
device.move(50)
close = Close(blind)
self.assertFalse(close.done())
def test_done_open_open_state(self):
device = DeviceMock('TestDevice')
blind = Blind('Test', 0, 0, device, [], [])
Open(blind).do()
self.assertEqual(1, device.open_counter)
self.assertEqual(State.OPEN, device.stats())
task = Open(blind)
self.assertTrue(task.done())
def test_done_open_close_state(self):
device = DeviceMock('TestDevice')
blind = Blind('Test', 0, 0, device, [], [])
Close(blind).do()
self.assertEqual(1, device.close_counter)
self.assertEqual(State.CLOSED, device.stats())
task = Open(blind)
self.assertFalse(task.done())
def test_done_open_tilt_state(self):
device = DeviceMock('TestDevice')
blind = Blind('Test', 0, 0, device, [], [])
Tilt(blind).do()
self.assertEqual(1, device.tilt_counter)
self.assertEqual(State.TILT, device.stats())
task = Open(blind)
self.assertFalse(task.done())
def test_done_open_moved_state(self):
device = DeviceMock('TestDevice')
blind = Blind('Test', 0, 0, device, [], [])
device.move(50)
task = Open(blind)
self.assertFalse(task.done())
if __name__ == '__main__':
unittest.main()
| 35.265823
| 65
| 0.611271
| 642
| 5,572
| 5.160436
| 0.065421
| 0.117718
| 0.056444
| 0.128283
| 0.909749
| 0.905826
| 0.871416
| 0.860549
| 0.860549
| 0.860549
| 0
| 0.013559
| 0.245513
| 5,572
| 157
| 66
| 35.490446
| 0.7745
| 0
| 0
| 0.786765
| 0
| 0
| 0.044149
| 0
| 0
| 0
| 0
| 0
| 0.316176
| 1
| 0.125
| false
| 0
| 0.036765
| 0
| 0.169118
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
81c10f29fef2716acc894fb4d7129c7499f90265
| 4,309
|
py
|
Python
|
nflpool/data/points.py
|
michaelizergit/nflpool
|
157a00a1807aaff3ecc08f6cdf3075c2de90a295
|
[
"MIT"
] | 9
|
2016-12-25T16:42:20.000Z
|
2021-03-19T02:52:55.000Z
|
nflpool/data/points.py
|
michaelizergit/nflpool
|
157a00a1807aaff3ecc08f6cdf3075c2de90a295
|
[
"MIT"
] | 64
|
2017-08-20T14:54:05.000Z
|
2020-03-11T19:07:18.000Z
|
nflpool/data/points.py
|
michaelizergit/nflpool
|
157a00a1807aaff3ecc08f6cdf3075c2de90a295
|
[
"MIT"
] | 6
|
2019-01-11T01:20:57.000Z
|
2021-03-19T02:52:57.000Z
|
from nflpool.data.modelbase import SqlAlchemyBase
import sqlalchemy
# Point values for each category
class Points(SqlAlchemyBase):
__tablename__ = "Points"
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True, autoincrement=True)
# Division Points
afc_east_first_pts = sqlalchemy.Column(sqlalchemy.Integer)
afc_east_second_pts = sqlalchemy.Column(sqlalchemy.Integer)
afc_east_last_pts = sqlalchemy.Column(sqlalchemy.Integer)
afc_north_first_pts = sqlalchemy.Column(sqlalchemy.Integer)
afc_north_second_pts = sqlalchemy.Column(sqlalchemy.Integer)
afc_north_last_pts = sqlalchemy.Column(sqlalchemy.Integer)
afc_south_first_pts = sqlalchemy.Column(sqlalchemy.Integer)
afc_south_second_pts = sqlalchemy.Column(sqlalchemy.Integer)
afc_south_last_pts = sqlalchemy.Column(sqlalchemy.Integer)
afc_west_first_pts = sqlalchemy.Column(sqlalchemy.Integer)
afc_west_second_pts = sqlalchemy.Column(sqlalchemy.Integer)
afc_west_last_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_east_first_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_east_second_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_east_last_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_north_first_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_north_second_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_north_last_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_south_first_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_south_second_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_south_last_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_west_first_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_west_second_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_west_last_pts = sqlalchemy.Column(sqlalchemy.Integer)
# Playoff Wildcards
afc_wildcard1_pts = sqlalchemy.Column(sqlalchemy.Integer)
afc_wildcard2_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_wildcard1_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_wildcard2_pts = sqlalchemy.Column(sqlalchemy.Integer)
# Individual Stats
afc_rushing_first_pts = sqlalchemy.Column(sqlalchemy.Integer)
afc_rushing_second_pts = sqlalchemy.Column(sqlalchemy.Integer)
afc_rushing_third_pts = sqlalchemy.Column(sqlalchemy.Integer)
afc_passing_first_pts = sqlalchemy.Column(sqlalchemy.Integer)
afc_passing_second_pts = sqlalchemy.Column(sqlalchemy.Integer)
afc_passing_third_pts = sqlalchemy.Column(sqlalchemy.Integer)
afc_receiving_first_pts = sqlalchemy.Column(sqlalchemy.Integer)
afc_receiving_second_pts = sqlalchemy.Column(sqlalchemy.Integer)
afc_receiving_third_pts = sqlalchemy.Column(sqlalchemy.Integer)
afc_sacks_first_pts = sqlalchemy.Column(sqlalchemy.Integer)
afc_sacks_second_pts = sqlalchemy.Column(sqlalchemy.Integer)
afc_sacks_third_pts = sqlalchemy.Column(sqlalchemy.Integer)
afc_int_first_pts = sqlalchemy.Column(sqlalchemy.Integer)
afc_int_second_pts = sqlalchemy.Column(sqlalchemy.Integer)
afc_int_third_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_rushing_first_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_rushing_second_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_rushing_third_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_passing_first_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_passing_second_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_passing_third_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_receiving_first_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_receiving_second_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_receiving_third_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_sacks_first_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_sacks_second_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_sacks_third_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_int_first_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_int_second_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_int_third_pts = sqlalchemy.Column(sqlalchemy.Integer)
# Conference Team with most Points For
afc_pf_pts = sqlalchemy.Column(sqlalchemy.Integer)
nfc_pf_pts = sqlalchemy.Column(sqlalchemy.Integer)
# Tiebreaker
specialteams_td_pts = sqlalchemy.Column(sqlalchemy.Integer)
| 44.885417
| 84
| 0.803667
| 522
| 4,309
| 6.287356
| 0.101533
| 0.302255
| 0.491164
| 0.6234
| 0.907374
| 0.896405
| 0.814747
| 0
| 0
| 0
| 0
| 0.001055
| 0.120214
| 4,309
| 95
| 85
| 45.357895
| 0.86468
| 0.029937
| 0
| 0
| 0
| 0
| 0.001438
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.090909
| 0.030303
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
81c2a523f3828d4e6ea6a930dc2cd1cd6e159210
| 144
|
py
|
Python
|
model_sel.py
|
sunnylgz/faceapi
|
7de817a2924d7ad4cbbca9778c7a7ec0e4427458
|
[
"MIT"
] | null | null | null |
model_sel.py
|
sunnylgz/faceapi
|
7de817a2924d7ad4cbbca9778c7a7ec0e4427458
|
[
"MIT"
] | null | null | null |
model_sel.py
|
sunnylgz/faceapi
|
7de817a2924d7ad4cbbca9778c7a7ec0e4427458
|
[
"MIT"
] | null | null | null |
facenet_model = "/home/ubuntu/share/source_code/faceapi/checkpoint/20171219-004017/20171219-004017_49975.pb"
c_normal_mean_stddev = [0.9, 0.2]
| 36
| 108
| 0.805556
| 23
| 144
| 4.782609
| 0.869565
| 0.254545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.272059
| 0.055556
| 144
| 3
| 109
| 48
| 0.536765
| 0
| 0
| 0
| 0
| 0.5
| 0.629371
| 0.629371
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
81c3561d7501981e8bdc2d25d0167106e93cf8d3
| 43
|
py
|
Python
|
The Core/At The Crossroads/11. Extra Number/extraNumber.py
|
DKLynch/CodeSignalTasks
|
c15750fff0f571bf884b9e2b8d5614f30f369008
|
[
"Unlicense"
] | null | null | null |
The Core/At The Crossroads/11. Extra Number/extraNumber.py
|
DKLynch/CodeSignalTasks
|
c15750fff0f571bf884b9e2b8d5614f30f369008
|
[
"Unlicense"
] | null | null | null |
The Core/At The Crossroads/11. Extra Number/extraNumber.py
|
DKLynch/CodeSignalTasks
|
c15750fff0f571bf884b9e2b8d5614f30f369008
|
[
"Unlicense"
] | null | null | null |
def extraNumber(a, b, c):
return a^b^c
| 14.333333
| 25
| 0.604651
| 9
| 43
| 2.888889
| 0.666667
| 0.153846
| 0.230769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.232558
| 43
| 2
| 26
| 21.5
| 0.787879
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
484e6ba52ef9424a815e22b58e9f972e450cd33a
| 347
|
py
|
Python
|
qlib/error.py
|
fufik/qlib
|
f71b1de1598eaa4ed98f73a28d5fa0a84d92d35d
|
[
"MIT"
] | null | null | null |
qlib/error.py
|
fufik/qlib
|
f71b1de1598eaa4ed98f73a28d5fa0a84d92d35d
|
[
"MIT"
] | null | null | null |
qlib/error.py
|
fufik/qlib
|
f71b1de1598eaa4ed98f73a28d5fa0a84d92d35d
|
[
"MIT"
] | null | null | null |
class QbitError(RuntimeError):
def __init__(self, arg):
self.args = arg
class QregisterError(RuntimeError):
def __init__(self, arg):
self.args = arg
#class QgateError(RuntimeError):
# def __init__(self, arg):
# self.args = arg
class QoperatorError(RuntimeError):
def __init__(self, arg):
self.args = arg
| 21.6875
| 35
| 0.665706
| 40
| 347
| 5.375
| 0.275
| 0.27907
| 0.353488
| 0.427907
| 0.75814
| 0.75814
| 0.75814
| 0.75814
| 0.586047
| 0
| 0
| 0
| 0.221902
| 347
| 15
| 36
| 23.133333
| 0.796296
| 0.227666
| 0
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6f93274e552ac80fffe48cce7cf206cf62802204
| 76,747
|
py
|
Python
|
tests/tests_models_mappers/tests_binance/tests_spot/test_models_mapper.py
|
astsu-dev/exapi
|
1ef39ccdd77e9ddb60ec6eaa16a2cc26e1ac3e12
|
[
"MIT"
] | null | null | null |
tests/tests_models_mappers/tests_binance/tests_spot/test_models_mapper.py
|
astsu-dev/exapi
|
1ef39ccdd77e9ddb60ec6eaa16a2cc26e1ac3e12
|
[
"MIT"
] | null | null | null |
tests/tests_models_mappers/tests_binance/tests_spot/test_models_mapper.py
|
astsu-dev/exapi
|
1ef39ccdd77e9ddb60ec6eaa16a2cc26e1ac3e12
|
[
"MIT"
] | null | null | null |
from decimal import Decimal
import pytest
from exapi.enums.binance import (BinanceOrderSide, BinanceOrderStatus,
BinanceOrderType, BinanceTimeInForce)
from exapi.models.binance import (BinanceAccountInfoJson,
BinanceAccountInfoModel,
BinanceAccountTradeJson,
BinanceAccountTradeModel,
BinanceAccountTradesJson,
BinanceAggregateTradeJson,
BinanceAggregateTradeModel,
BinanceAggregateTradesJson,
BinanceAveragePriceJson,
BinanceAveragePriceModel,
BinanceCanceledOrderJson,
BinanceCanceledOrderModel,
BinanceCanceledOrdersJson, BinanceCandleJson,
BinanceCandleModel, BinanceCandlesJson,
BinanceCurrencyBalanceJson,
BinanceCurrencyBalanceModel,
BinanceCurrencyBalancesJson,
BinanceErrorJson, BinanceErrorModel,
BinanceExchangeFilterJson,
BinanceExchangeFilterModel,
BinanceExchangeFilters,
BinanceExchangeFiltersJson,
BinanceExchangeInfoJson,
BinanceExchangeInfoModel,
BinanceFilledOrderJson,
BinanceFilledOrderModel,
BinanceFilledOrdersJson,
BinanceIcebergPartsSymbolFilterJson,
BinanceIcebergPartsSymbolFilterModel,
BinanceLotSizeSymbolFilterJson,
BinanceLotSizeSymbolFilterModel,
BinanceMarketLotSizeSymbolFilterJson,
BinanceMarketLotSizeSymbolFilterModel,
BinanceMaxNumAlgoOrdersExchangeFilterModel,
BinanceMaxNumAlgoOrdersSymbolFilterJson,
BinanceMaxNumAlgoOrdersSymbolFilterModel,
BinanceMaxNumIcebergOrdersSymbolFilterJson,
BinanceMaxNumIcebergOrdersSymbolFilterModel,
BinanceMaxNumOrdersExchangeFilterJson,
BinanceMaxNumOrdersExchangeFilterModel,
BinanceMaxNumOrdersSymbolFilterJson,
BinanceMaxNumOrdersSymbolFilterModel,
BinanceMaxPositionSymbolFilterJson,
BinanceMaxPositionSymbolFilterModel,
BinanceMinNotionalSymbolFilterJson,
BinanceMinNotionalSymbolFilterModel,
BinanceOrderBookJson, BinanceOrderBookModel,
BinanceOrderBookOrderJson,
BinanceOrderBookOrderModel,
BinanceOrderBookOrdersJson,
BinanceOrderBookTickerJson,
BinanceOrderBookTickerModel,
BinanceOrderBookTickersJson,
BinanceOrderInfoJson, BinanceOrderInfoModel,
BinanceOrderInfosJson, BinanceOrderJson,
BinanceOrderModel, BinanceOrdersJson,
BinanceOrdersRateLimitJson,
BinanceOrdersRateLimitModel,
BinancePercentPriceSymbolFilterJson,
BinancePercentPriceSymbolFilterModel,
BinancePingJson, BinancePingModel,
BinancePriceTickerJson,
BinancePriceTickerModel,
BinancePriceTickersJson,
BinanceRateLimitJson, BinanceRateLimitModel,
BinanceRawRequestsRateLimitJson,
BinanceRawRequestsRateLimitModel,
BinanceRequestWeightRateLimitJson,
BinanceRequestWeightRateLimitModel,
BinanceServerTimeJson,
BinanceServerTimeModel,
BinanceSymbolFilterJson,
BinanceSymbolFilterModel,
BinanceSymbolFilters,
BinanceSymbolFiltersJson, BinanceSymbolJson,
BinanceSymbolModel, BinanceSymbolsJson,
BinanceTestOrderJson, BinanceTestOrderModel,
BinanceTickerPriceChangeStatJson,
BinanceTickerPriceChangeStatModel,
BinanceTickersPriceChangeStatJson,
BinanceTradeJson, BinanceTradeModel,
BinanceTradesJson)
from exapi.models_mappers.binance.spot import BinanceSpotModelsMapper
@pytest.fixture(scope="module")
def mapper() -> BinanceSpotModelsMapper:
return BinanceSpotModelsMapper()
def test_map_to_error(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceErrorModel(code=-1001, msg="Not enough money.")
json: BinanceErrorJson = {
"code": -1001,
"msg": "Not enough money."
}
assert mapper.map_to_error(json) == expected
def test_map_to_ping(mapper: BinanceSpotModelsMapper) -> None:
expected = BinancePingModel()
json: BinancePingJson = {}
assert mapper.map_to_ping(json) == expected
def test_map_to_server_time(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceServerTimeModel(server_time=1500)
json: BinanceServerTimeJson = {
"serverTime": 1500
}
assert mapper.map_to_server_time(json) == expected
def test_map_to_average_price(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceAveragePriceModel(mins=10, price=Decimal("1500.2"))
json: BinanceAveragePriceJson = {
"mins": 10,
"price": "1500.2"
}
assert mapper.map_to_average_price(json) == expected
def test_map_to_candle(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceCandleModel(
open_time=1500,
open=Decimal("1500.5"),
high=Decimal("1500.6"),
low=Decimal("1500.7"),
close=Decimal("1500.8"),
volume=Decimal("1500.9"),
close_time=1600,
quote_volume=Decimal("1500.0"),
trades_num=150,
taker_buy_base_volume=Decimal("1500.4"),
taker_buy_quote_volume=Decimal("1500.3"),
ignore=Decimal("1500.2"))
json: BinanceCandleJson = (
1500,
"1500.5",
"1500.6",
"1500.7",
"1500.8",
"1500.9",
1600,
"1500.0",
150,
"1500.4",
"1500.3",
"1500.2"
)
assert mapper.map_to_candle(json) == expected
def test_map_to_candles(mapper: BinanceSpotModelsMapper) -> None:
expected = [
BinanceCandleModel(
open_time=1500,
open=Decimal("1500.5"),
high=Decimal("1500.6"),
low=Decimal("1500.7"),
close=Decimal("1500.8"),
volume=Decimal("1500.9"),
close_time=1600,
quote_volume=Decimal("1500.0"),
trades_num=150,
taker_buy_base_volume=Decimal("1500.4"),
taker_buy_quote_volume=Decimal("1500.3"),
ignore=Decimal("1500.2")),
BinanceCandleModel(
open_time=1505,
open=Decimal("1500.5"),
high=Decimal("1500.6"),
low=Decimal("1500.7"),
close=Decimal("1500.8"),
volume=Decimal("1500.9"),
close_time=1600,
quote_volume=Decimal("1500.0"),
trades_num=150,
taker_buy_base_volume=Decimal("1500.4"),
taker_buy_quote_volume=Decimal("1500.3"),
ignore=Decimal("1500.2"))
]
json: BinanceCandlesJson = [
(
1500,
"1500.5",
"1500.6",
"1500.7",
"1500.8",
"1500.9",
1600,
"1500.0",
150,
"1500.4",
"1500.3",
"1500.2"
),
(
1505,
"1500.5",
"1500.6",
"1500.7",
"1500.8",
"1500.9",
1600,
"1500.0",
150,
"1500.4",
"1500.3",
"1500.2"
)
]
assert mapper.map_to_candles(json) == expected
def test_map_to_order_book_ticker(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceOrderBookTickerModel(
symbol="BTCUSDT",
bid_price=Decimal("1500.5"),
bid_qty=Decimal("24.5"),
ask_price=Decimal("1600.5"),
ask_qty=Decimal("30.8"))
json: BinanceOrderBookTickerJson = {
"symbol": "BTCUSDT",
"bidPrice": "1500.5",
"bidQty": "24.5",
"askPrice": "1600.5",
"askQty": "30.8"
}
assert mapper.map_to_order_book_ticker(json) == expected
def test_map_to_order_book_tickers(mapper: BinanceSpotModelsMapper) -> None:
expected = [
BinanceOrderBookTickerModel(
symbol="BTCUSDT",
bid_price=Decimal("1500.5"),
bid_qty=Decimal("24.5"),
ask_price=Decimal("1600.5"),
ask_qty=Decimal("30.8")),
BinanceOrderBookTickerModel(
symbol="ETHUSDT",
bid_price=Decimal("1500.5"),
bid_qty=Decimal("24.5"),
ask_price=Decimal("1600.5"),
ask_qty=Decimal("30.8"))
]
json: BinanceOrderBookTickersJson = [
{
"symbol": "BTCUSDT",
"bidPrice": "1500.5",
"bidQty": "24.5",
"askPrice": "1600.5",
"askQty": "30.8"
},
{
"symbol": "ETHUSDT",
"bidPrice": "1500.5",
"bidQty": "24.5",
"askPrice": "1600.5",
"askQty": "30.8"
}
]
assert mapper.map_to_order_book_tickers(json) == expected
def test_map_to_order_book_order(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceOrderBookOrderModel(
price=Decimal("1200.5"),
quantity=Decimal("399.6"))
json: BinanceOrderBookOrderJson = (
"1200.5",
"399.6"
)
assert mapper.map_to_order_book_order(json) == expected
def test_map_to_order_book_orders(mapper: BinanceSpotModelsMapper) -> None:
expected = [
BinanceOrderBookOrderModel(
price=Decimal("1200.5"),
quantity=Decimal("399.6")),
BinanceOrderBookOrderModel(
price=Decimal("1201.5"),
quantity=Decimal("399.6"))
]
json: BinanceOrderBookOrdersJson = [
(
"1200.5",
"399.6"
),
(
"1201.5",
"399.6"
)
]
assert mapper.map_to_order_book_orders(json) == expected
def test_map_to_order_book(mapper: BinanceSpotModelsMapper) -> None:
orders = [
BinanceOrderBookOrderModel(
price=Decimal("1200.5"),
quantity=Decimal("399.6")),
BinanceOrderBookOrderModel(
price=Decimal("1201.5"),
quantity=Decimal("399.6"))
]
expected = BinanceOrderBookModel(
last_update_id=123,
bids=orders,
asks=orders)
json_orders: BinanceOrderBookOrdersJson = [
(
"1200.5",
"399.6"
),
(
"1201.5",
"399.6"
)
]
json: BinanceOrderBookJson = {
"lastUpdateId": 123,
"bids": json_orders,
"asks": json_orders
}
assert mapper.map_to_order_book(json) == expected
def test_map_to_filled_order(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceFilledOrderModel(
price=Decimal("127.8"),
qty=Decimal("700.7"),
commission=Decimal("10.8"),
commission_asset="BTC")
json: BinanceFilledOrderJson = {
"price": "127.8",
"qty": "700.7",
"commission": "10.8",
"commissionAsset": "BTC"
}
assert mapper.map_to_filled_order(json) == expected
def test_map_to_filled_orders(mapper: BinanceSpotModelsMapper) -> None:
expected = [
BinanceFilledOrderModel(
price=Decimal("127.8"),
qty=Decimal("700.7"),
commission=Decimal("10.8"),
commission_asset="BTC"),
BinanceFilledOrderModel(
price=Decimal("127.8"),
qty=Decimal("700.7"),
commission=Decimal("10.8"),
commission_asset="ETH")
]
json: BinanceFilledOrdersJson = [
{
"price": "127.8",
"qty": "700.7",
"commission": "10.8",
"commissionAsset": "BTC"
},
{
"price": "127.8",
"qty": "700.7",
"commission": "10.8",
"commissionAsset": "ETH"
}
]
assert mapper.map_to_filled_orders(json) == expected
def test_map_to_order(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceOrderModel(
symbol="BTCUSDT",
order_id=1234,
order_list_id=1235,
client_order_id="11cc",
transact_time=200,
price=Decimal("18.7"),
orig_qty=Decimal("18.8"),
executed_qty=Decimal("18.9"),
cummulative_quote_qty=Decimal("18.0"),
status="NEW",
time_in_force="GTC",
type="LIMIT",
side="BUY",
fills=[
BinanceFilledOrderModel(
price=Decimal("127.8"),
qty=Decimal("700.7"),
commission=Decimal("10.8"),
commission_asset="BTC"),
BinanceFilledOrderModel(
price=Decimal("127.8"),
qty=Decimal("700.7"),
commission=Decimal("10.8"),
commission_asset="ETH")
]
)
json: BinanceOrderJson = {
"symbol": "BTCUSDT",
"orderId": 1234,
"orderListId": 1235,
"clientOrderId": "11cc",
"transactTime": 200,
"price": "18.7",
"origQty": "18.8",
"executedQty": "18.9",
"cummulativeQuoteQty": "18.0",
"status": "NEW",
"timeInForce": "GTC",
"type": "LIMIT",
"side": "BUY",
"fills": [
{
"price": "127.8",
"qty": "700.7",
"commission": "10.8",
"commissionAsset": "BTC"
},
{
"price": "127.8",
"qty": "700.7",
"commission": "10.8",
"commissionAsset": "ETH"
}
]
}
assert mapper.map_to_order(json) == expected
def test_map_to_orders(mapper: BinanceSpotModelsMapper) -> None:
expected = [
BinanceOrderModel(
symbol="BTCUSDT",
order_id=1234,
order_list_id=1235,
client_order_id="11cc",
transact_time=200,
price=Decimal("18.7"),
orig_qty=Decimal("18.8"),
executed_qty=Decimal("18.9"),
cummulative_quote_qty=Decimal("18.0"),
status="NEW",
time_in_force="GTC",
type="LIMIT",
side="BUY",
fills=[
BinanceFilledOrderModel(
price=Decimal("127.8"),
qty=Decimal("700.7"),
commission=Decimal("10.8"),
commission_asset="BTC"),
BinanceFilledOrderModel(
price=Decimal("127.8"),
qty=Decimal("700.7"),
commission=Decimal("10.8"),
commission_asset="ETH")
]
),
BinanceOrderModel(
symbol="ETHUSDT",
order_id=1234,
order_list_id=1235,
client_order_id="11cc",
transact_time=200,
price=Decimal("18.7"),
orig_qty=Decimal("18.8"),
executed_qty=Decimal("18.9"),
cummulative_quote_qty=Decimal("18.0"),
status="NEW",
time_in_force="GTC",
type="LIMIT",
side="BUY",
fills=[
BinanceFilledOrderModel(
price=Decimal("127.8"),
qty=Decimal("700.7"),
commission=Decimal("10.8"),
commission_asset="BTC"),
BinanceFilledOrderModel(
price=Decimal("127.8"),
qty=Decimal("700.7"),
commission=Decimal("10.8"),
commission_asset="ETH")
]
)
]
json: BinanceOrdersJson = [
{
"symbol": "BTCUSDT",
"orderId": 1234,
"orderListId": 1235,
"clientOrderId": "11cc",
"transactTime": 200,
"price": "18.7",
"origQty": "18.8",
"executedQty": "18.9",
"cummulativeQuoteQty": "18.0",
"status": "NEW",
"timeInForce": "GTC",
"type": "LIMIT",
"side": "BUY",
"fills": [
{
"price": "127.8",
"qty": "700.7",
"commission": "10.8",
"commissionAsset": "BTC"
},
{
"price": "127.8",
"qty": "700.7",
"commission": "10.8",
"commissionAsset": "ETH"
}
]
},
{
"symbol": "ETHUSDT",
"orderId": 1234,
"orderListId": 1235,
"clientOrderId": "11cc",
"transactTime": 200,
"price": "18.7",
"origQty": "18.8",
"executedQty": "18.9",
"cummulativeQuoteQty": "18.0",
"status": "NEW",
"timeInForce": "GTC",
"type": "LIMIT",
"side": "BUY",
"fills": [
{
"price": "127.8",
"qty": "700.7",
"commission": "10.8",
"commissionAsset": "BTC"
},
{
"price": "127.8",
"qty": "700.7",
"commission": "10.8",
"commissionAsset": "ETH"
}
]
}
]
assert mapper.map_to_orders(json) == expected
def test_map_to_test_order(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceTestOrderModel()
json: BinanceTestOrderJson = {}
assert mapper.map_to_test_order(json) == expected
def test_map_to_canceled_order(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceCanceledOrderModel(
symbol="BTCUSDT",
orig_client_order_id="sdf",
order_id=5,
order_list_id=-1,
client_order_id="sdff",
price=Decimal("100000.5"),
orig_qty=Decimal("0.04"),
executed_qty=Decimal("0.02"),
cummulative_quote_qty=Decimal("4000"),
status=BinanceOrderStatus.CANCELED,
time_in_force=BinanceTimeInForce.GTC,
type=BinanceOrderType.LIMIT,
side=BinanceOrderSide.SELL)
json: BinanceCanceledOrderJson = {
"symbol": "BTCUSDT",
"origClientOrderId": "sdf",
"orderId": 5,
"orderListId": -1,
"clientOrderId": "sdff",
"price": "100000.5",
"origQty": "0.04",
"executedQty": "0.02",
"cummulativeQuoteQty": "4000",
"status": BinanceOrderStatus.CANCELED,
"timeInForce": BinanceTimeInForce.GTC,
"type": BinanceOrderType.LIMIT,
"side": BinanceOrderSide.SELL
}
assert mapper.map_to_canceled_order(json) == expected
def test_map_to_canceled_orders(mapper: BinanceSpotModelsMapper) -> None:
expected = [
BinanceCanceledOrderModel(
symbol="BTCUSDT",
orig_client_order_id="sdf",
order_id=5,
order_list_id=-1,
client_order_id="sdff",
price=Decimal("100000.5"),
orig_qty=Decimal("0.04"),
executed_qty=Decimal("0.02"),
cummulative_quote_qty=Decimal("4000"),
status=BinanceOrderStatus.CANCELED,
time_in_force=BinanceTimeInForce.GTC,
type=BinanceOrderType.LIMIT,
side=BinanceOrderSide.SELL),
BinanceCanceledOrderModel(
symbol="ETHUSDT",
orig_client_order_id="sdf",
order_id=5,
order_list_id=-1,
client_order_id="sdff",
price=Decimal("100000.5"),
orig_qty=Decimal("0.04"),
executed_qty=Decimal("0.02"),
cummulative_quote_qty=Decimal("4000"),
status=BinanceOrderStatus.CANCELED,
time_in_force=BinanceTimeInForce.GTC,
type=BinanceOrderType.LIMIT,
side=BinanceOrderSide.SELL)
]
json: BinanceCanceledOrdersJson = [
{
"symbol": "BTCUSDT",
"origClientOrderId": "sdf",
"orderId": 5,
"orderListId": -1,
"clientOrderId": "sdff",
"price": "100000.5",
"origQty": "0.04",
"executedQty": "0.02",
"cummulativeQuoteQty": "4000",
"status": BinanceOrderStatus.CANCELED,
"timeInForce": BinanceTimeInForce.GTC,
"type": BinanceOrderType.LIMIT,
"side": BinanceOrderSide.SELL
},
{
"symbol": "ETHUSDT",
"origClientOrderId": "sdf",
"orderId": 5,
"orderListId": -1,
"clientOrderId": "sdff",
"price": "100000.5",
"origQty": "0.04",
"executedQty": "0.02",
"cummulativeQuoteQty": "4000",
"status": BinanceOrderStatus.CANCELED,
"timeInForce": BinanceTimeInForce.GTC,
"type": BinanceOrderType.LIMIT,
"side": BinanceOrderSide.SELL
}
]
assert mapper.map_to_canceled_orders(json) == expected
def test_map_to_price_ticker(mapper: BinanceSpotModelsMapper) -> None:
expected = BinancePriceTickerModel(
symbol="BTCUSDT",
price=Decimal("157.8"))
json: BinancePriceTickerJson = {
"symbol": "BTCUSDT",
"price": "157.8"
}
assert mapper.map_to_price_ticker(json) == expected
def test_map_to_price_tickers(mapper: BinanceSpotModelsMapper) -> None:
expected = [
BinancePriceTickerModel(
symbol="BTCUSDT",
price=Decimal("157.8")),
BinancePriceTickerModel(
symbol="ETHUSDT",
price=Decimal("157.8"))
]
json: BinancePriceTickersJson = [
{
"symbol": "BTCUSDT",
"price": "157.8"
},
{
"symbol": "ETHUSDT",
"price": "157.8"
}
]
assert mapper.map_to_price_tickers(json) == expected
def test_map_to_ticker_price_change_stat(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceTickerPriceChangeStatModel(
symbol="BTCUSDT",
price_change=Decimal("178.0"),
weighted_avg_price=Decimal("178.1"),
prev_close_price=Decimal("178.2"),
last_price=Decimal("178.3"),
last_qty=Decimal("178.4"),
bid_price=Decimal("178.5"),
ask_price=Decimal("178.6"),
open_price=Decimal("178.7"),
high_price=Decimal("178.8"),
low_price=Decimal("178.9"),
volume=Decimal("178.11"),
quote_volume=Decimal("178.12"),
open_time=123,
close_time=124,
first_id=847,
last_id=838,
count=158)
json: BinanceTickerPriceChangeStatJson = {
"symbol": "BTCUSDT",
"priceChange": "178.0",
"weightedAvgPrice": "178.1",
"prevClosePrice": "178.2",
"lastPrice": "178.3",
"lastQty": "178.4",
"bidPrice": "178.5",
"askPrice": "178.6",
"openPrice": "178.7",
"highPrice": "178.8",
"lowPrice": "178.9",
"volume": "178.11",
"quoteVolume": "178.12",
"openTime": 123,
"closeTime": 124,
"firstId": 847,
"lastId": 838,
"count": 158
}
assert mapper.map_to_ticker_price_change_stat(json) == expected
def test_map_to_tickers_price_change_stat(mapper: BinanceSpotModelsMapper) -> None:
expected = [
BinanceTickerPriceChangeStatModel(
symbol="BTCUSDT",
price_change=Decimal("178.0"),
weighted_avg_price=Decimal("178.1"),
prev_close_price=Decimal("178.2"),
last_price=Decimal("178.3"),
last_qty=Decimal("178.4"),
bid_price=Decimal("178.5"),
ask_price=Decimal("178.6"),
open_price=Decimal("178.7"),
high_price=Decimal("178.8"),
low_price=Decimal("178.9"),
volume=Decimal("178.11"),
quote_volume=Decimal("178.12"),
open_time=123,
close_time=124,
first_id=847,
last_id=838,
count=158),
BinanceTickerPriceChangeStatModel(
symbol="ETHUSDT",
price_change=Decimal("178.0"),
weighted_avg_price=Decimal("178.1"),
prev_close_price=Decimal("178.2"),
last_price=Decimal("178.3"),
last_qty=Decimal("178.4"),
bid_price=Decimal("178.5"),
ask_price=Decimal("178.6"),
open_price=Decimal("178.7"),
high_price=Decimal("178.8"),
low_price=Decimal("178.9"),
volume=Decimal("178.11"),
quote_volume=Decimal("178.12"),
open_time=123,
close_time=124,
first_id=847,
last_id=838,
count=158)
]
json: BinanceTickersPriceChangeStatJson = [
{
"symbol": "BTCUSDT",
"priceChange": "178.0",
"weightedAvgPrice": "178.1",
"prevClosePrice": "178.2",
"lastPrice": "178.3",
"lastQty": "178.4",
"bidPrice": "178.5",
"askPrice": "178.6",
"openPrice": "178.7",
"highPrice": "178.8",
"lowPrice": "178.9",
"volume": "178.11",
"quoteVolume": "178.12",
"openTime": 123,
"closeTime": 124,
"firstId": 847,
"lastId": 838,
"count": 158
},
{
"symbol": "ETHUSDT",
"priceChange": "178.0",
"weightedAvgPrice": "178.1",
"prevClosePrice": "178.2",
"lastPrice": "178.3",
"lastQty": "178.4",
"bidPrice": "178.5",
"askPrice": "178.6",
"openPrice": "178.7",
"highPrice": "178.8",
"lowPrice": "178.9",
"volume": "178.11",
"quoteVolume": "178.12",
"openTime": 123,
"closeTime": 124,
"firstId": 847,
"lastId": 838,
"count": 158
}
]
assert mapper.map_to_tickers_price_change_stat(json) == expected
def test_map_to_trade(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceTradeModel(
id=5,
price=Decimal("100.3"),
qty=Decimal("100.4"),
quote_qty=Decimal("100.5"),
time=100,
is_buyer_maker=False,
is_best_match=True)
json: BinanceTradeJson = {
"id": 5,
"price": "100.3",
"qty": "100.4",
"quoteQty": "100.5",
"time": 100,
"isBuyerMaker": False,
"isBestMatch": True
}
assert mapper.map_to_trade(json) == expected
def test_map_to_trades(mapper: BinanceSpotModelsMapper) -> None:
expected = [
BinanceTradeModel(
id=5,
price=Decimal("100.3"),
qty=Decimal("100.4"),
quote_qty=Decimal("100.5"),
time=100,
is_buyer_maker=False,
is_best_match=True),
BinanceTradeModel(
id=6,
price=Decimal("100.3"),
qty=Decimal("100.4"),
quote_qty=Decimal("100.5"),
time=100,
is_buyer_maker=False,
is_best_match=True)
]
json: BinanceTradesJson = [
{
"id": 5,
"price": "100.3",
"qty": "100.4",
"quoteQty": "100.5",
"time": 100,
"isBuyerMaker": False,
"isBestMatch": True
},
{
"id": 6,
"price": "100.3",
"qty": "100.4",
"quoteQty": "100.5",
"time": 100,
"isBuyerMaker": False,
"isBestMatch": True
}
]
assert mapper.map_to_trades(json) == expected
def test_map_to_aggregate_trade(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceAggregateTradeModel(
id=5,
price=Decimal("10.5"),
qty=Decimal("10.3"),
first_id=52,
last_id=57,
time=1005,
is_buyer_maker=True,
is_best_match=False)
json: BinanceAggregateTradeJson = {
"a": 5,
"p": "10.5",
"q": "10.3",
"f": 52,
"l": 57,
"T": 1005,
"m": True,
"M": False
}
assert mapper.map_to_aggregate_trade(json) == expected
def test_map_to_aggregate_trades(mapper: BinanceSpotModelsMapper) -> None:
expected = [
BinanceAggregateTradeModel(
id=5,
price=Decimal("10.5"),
qty=Decimal("10.3"),
first_id=52,
last_id=57,
time=1005,
is_buyer_maker=True,
is_best_match=False),
BinanceAggregateTradeModel(
id=6,
price=Decimal("10.5"),
qty=Decimal("10.3"),
first_id=52,
last_id=57,
time=1005,
is_buyer_maker=True,
is_best_match=False)
]
json: BinanceAggregateTradesJson = [
{
"a": 5,
"p": "10.5",
"q": "10.3",
"f": 52,
"l": 57,
"T": 1005,
"m": True,
"M": False
},
{
"a": 6,
"p": "10.5",
"q": "10.3",
"f": 52,
"l": 57,
"T": 1005,
"m": True,
"M": False
}
]
assert mapper.map_to_aggregate_trades(json) == expected
def test_map_to_percent_price_symbol_filter(mapper: BinanceSpotModelsMapper) -> None:
expected = BinancePercentPriceSymbolFilterModel(
filter_type="PERCENT_PRICE",
multiplier_up=Decimal("20.4"),
multiplier_down=Decimal("20.3"),
avg_price_mins=10)
json: BinancePercentPriceSymbolFilterJson = {
"filterType": "PERCENT_PRICE",
"multiplierUp": "20.4",
"multiplierDown": "20.3",
"avgPriceMins": 10
}
assert mapper.map_to_percent_price_symbol_filter(json) == expected
def test_map_to_lot_size_symbol_filter(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceLotSizeSymbolFilterModel(
filter_type="LOT_SIZE",
min_qty=Decimal("20.4"),
max_qty=Decimal("20.3"),
step_size=Decimal("5.3"))
json: BinanceLotSizeSymbolFilterJson = {
"filterType": "LOT_SIZE",
"minQty": "20.4",
"maxQty": "20.3",
"stepSize": "5.3"
}
assert mapper.map_to_lot_size_symbol_filter(json) == expected
def test_map_to_min_notional_symbol_filter(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceMinNotionalSymbolFilterModel(
filter_type="MIN_NOTIONAL",
min_notional=Decimal("10.3"),
apply_to_market=True,
avg_price_mins=10)
json: BinanceMinNotionalSymbolFilterJson = {
"filterType": "MIN_NOTIONAL",
"minNotional": "10.3",
"applyToMarket": True,
"avgPriceMins": 10
}
assert mapper.map_to_min_notional_symbol_filter(json) == expected
def test_map_to_iceberg_parts_symbol_filter(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceIcebergPartsSymbolFilterModel(
filter_type="ICEBERG_PARTS",
limit=10)
json: BinanceIcebergPartsSymbolFilterJson = {
"filterType": "ICEBERG_PARTS",
"limit": 10
}
assert mapper.map_to_iceberg_parts_symbol_filter(json) == expected
def test_map_to_market_lot_size_symbol_filter(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceMarketLotSizeSymbolFilterModel(
filter_type="MARKET_LOT_SIZE",
min_qty=Decimal("20.4"),
max_qty=Decimal("20.3"),
step_size=Decimal("5.3"))
json: BinanceMarketLotSizeSymbolFilterJson = {
"filterType": "MARKET_LOT_SIZE",
"minQty": "20.4",
"maxQty": "20.3",
"stepSize": "5.3"
}
assert mapper.map_to_market_lot_size_symbol_filter(json) == expected
def test_map_to_max_num_orders_symbol_filter(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceMaxNumOrdersSymbolFilterModel(
filter_type="MAX_NUM_ORDERS",
max_num_orders=10)
json: BinanceMaxNumOrdersSymbolFilterJson = {
"filterType": "MAX_NUM_ORDERS",
"maxNumOrders": 10
}
assert mapper.map_to_max_num_orders_symbol_filter(json) == expected
def test_map_to_max_num_algo_orders_symbol_filter(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceMaxNumAlgoOrdersSymbolFilterModel(
filter_type="MAX_NUM_ALGO_ORDERS",
max_num_algo_orders=10)
json: BinanceMaxNumAlgoOrdersSymbolFilterJson = {
"filterType": "MAX_NUM_ALGO_ORDERS",
"maxNumAlgoOrders": 10
}
assert mapper.map_to_max_num_algo_orders_symbol_filter(json) == expected
def test_map_to_max_num_iceberg_orders_symbol_filter(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceMaxNumIcebergOrdersSymbolFilterModel(
filter_type="MAX_NUM_ICEBERG_ORDERS",
max_num_iceberg_orders=10)
json: BinanceMaxNumIcebergOrdersSymbolFilterJson = {
"filterType": "MAX_NUM_ICEBERG_ORDERS",
"maxNumIcebergOrders": 10
}
assert mapper.map_to_max_num_iceberg_orders_symbol_filter(json) == expected
def test_map_to_max_position_symbol_filter(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceMaxPositionSymbolFilterModel(
filter_type="MAX_POSITION",
max_position=Decimal("10.3"))
json: BinanceMaxPositionSymbolFilterJson = {
"filterType": "MAX_POSITION",
"maxPosition": "10.3"
}
assert mapper.map_to_max_position_symbol_filter(json) == expected
def test_map_symbol_filter(mapper: BinanceSpotModelsMapper) -> None:
expected: BinanceSymbolFilterModel
json: BinanceSymbolFilterJson
expected = BinancePercentPriceSymbolFilterModel(
filter_type="PERCENT_PRICE",
multiplier_up=Decimal("20.4"),
multiplier_down=Decimal("20.3"),
avg_price_mins=10)
json = {
"filterType": "PERCENT_PRICE",
"multiplierUp": "20.4",
"multiplierDown": "20.3",
"avgPriceMins": 10
}
assert mapper.map_to_symbol_filter(json) == expected
expected = BinanceLotSizeSymbolFilterModel(
filter_type="LOT_SIZE",
min_qty=Decimal("20.4"),
max_qty=Decimal("20.3"),
step_size=Decimal("5.3"))
json = {
"filterType": "LOT_SIZE",
"minQty": "20.4",
"maxQty": "20.3",
"stepSize": "5.3"
}
assert mapper.map_to_symbol_filter(json) == expected
expected = BinanceMinNotionalSymbolFilterModel(
filter_type="MIN_NOTIONAL",
min_notional=Decimal("10.3"),
apply_to_market=True,
avg_price_mins=10)
json = {
"filterType": "MIN_NOTIONAL",
"minNotional": "10.3",
"applyToMarket": True,
"avgPriceMins": 10
}
assert mapper.map_to_symbol_filter(json) == expected
expected = BinanceIcebergPartsSymbolFilterModel(
filter_type="ICEBERG_PARTS",
limit=10)
json = {
"filterType": "ICEBERG_PARTS",
"limit": 10
}
assert mapper.map_to_symbol_filter(json) == expected
expected = BinanceMarketLotSizeSymbolFilterModel(
filter_type="MARKET_LOT_SIZE",
min_qty=Decimal("20.4"),
max_qty=Decimal("20.3"),
step_size=Decimal("5.3"))
json = {
"filterType": "MARKET_LOT_SIZE",
"minQty": "20.4",
"maxQty": "20.3",
"stepSize": "5.3"
}
assert mapper.map_to_symbol_filter(json) == expected
expected = BinanceMaxNumOrdersSymbolFilterModel(
filter_type="MAX_NUM_ORDERS",
max_num_orders=10)
json = {
"filterType": "MAX_NUM_ORDERS",
"maxNumOrders": 10
}
assert mapper.map_to_symbol_filter(json) == expected
expected = BinanceMaxNumAlgoOrdersSymbolFilterModel(
filter_type="MAX_NUM_ALGO_ORDERS",
max_num_algo_orders=10)
json = {
"filterType": "MAX_NUM_ALGO_ORDERS",
"maxNumAlgoOrders": 10
}
assert mapper.map_to_symbol_filter(json) == expected
expected = BinanceMaxNumIcebergOrdersSymbolFilterModel(
filter_type="MAX_NUM_ICEBERG_ORDERS",
max_num_iceberg_orders=10)
json = {
"filterType": "MAX_NUM_ICEBERG_ORDERS",
"maxNumIcebergOrders": 10
}
assert mapper.map_to_symbol_filter(json) == expected
expected = BinanceMaxPositionSymbolFilterModel(
filter_type="MAX_POSITION",
max_position=Decimal("10.3"))
json = {
"filterType": "MAX_POSITION",
"maxPosition": "10.3"
}
assert mapper.map_to_symbol_filter(json) == expected
json = {
"filterType": "AA"
}
with pytest.raises(AssertionError):
mapper.map_to_symbol_filter(json)
def test_map_symbol_filters(mapper: BinanceSpotModelsMapper) -> None:
expected: BinanceSymbolFilters
json: BinanceSymbolFiltersJson
expected = [
BinancePercentPriceSymbolFilterModel(
filter_type="PERCENT_PRICE",
multiplier_up=Decimal("20.4"),
multiplier_down=Decimal("20.3"),
avg_price_mins=10),
BinanceLotSizeSymbolFilterModel(
filter_type="LOT_SIZE",
min_qty=Decimal("20.4"),
max_qty=Decimal("20.3"),
step_size=Decimal("5.3")),
BinanceMinNotionalSymbolFilterModel(
filter_type="MIN_NOTIONAL",
min_notional=Decimal("10.3"),
apply_to_market=True,
avg_price_mins=10),
BinanceIcebergPartsSymbolFilterModel(
filter_type="ICEBERG_PARTS",
limit=10),
BinanceMarketLotSizeSymbolFilterModel(
filter_type="MARKET_LOT_SIZE",
min_qty=Decimal("20.4"),
max_qty=Decimal("20.3"),
step_size=Decimal("5.3")),
BinanceMaxNumOrdersSymbolFilterModel(
filter_type="MAX_NUM_ORDERS",
max_num_orders=10),
BinanceMaxNumAlgoOrdersSymbolFilterModel(
filter_type="MAX_NUM_ALGO_ORDERS",
max_num_algo_orders=10),
BinanceMaxNumIcebergOrdersSymbolFilterModel(
filter_type="MAX_NUM_ICEBERG_ORDERS",
max_num_iceberg_orders=10),
BinanceMaxPositionSymbolFilterModel(
filter_type="MAX_POSITION",
max_position=Decimal("10.3"))
]
json = [
{
"filterType": "PERCENT_PRICE",
"multiplierUp": "20.4",
"multiplierDown": "20.3",
"avgPriceMins": 10
},
{
"filterType": "LOT_SIZE",
"minQty": "20.4",
"maxQty": "20.3",
"stepSize": "5.3"
},
{
"filterType": "MIN_NOTIONAL",
"minNotional": "10.3",
"applyToMarket": True,
"avgPriceMins": 10
},
{
"filterType": "ICEBERG_PARTS",
"limit": 10
},
{
"filterType": "MARKET_LOT_SIZE",
"minQty": "20.4",
"maxQty": "20.3",
"stepSize": "5.3"
},
{
"filterType": "MAX_NUM_ORDERS",
"maxNumOrders": 10
},
{
"filterType": "MAX_NUM_ALGO_ORDERS",
"maxNumAlgoOrders": 10
},
{
"filterType": "MAX_NUM_ICEBERG_ORDERS",
"maxNumIcebergOrders": 10
},
{
"filterType": "MAX_POSITION",
"maxPosition": "10.3"
}
]
assert mapper.map_to_symbol_filters(json) == expected
json = [
{
"filterType": "AA"
}
]
with pytest.raises(AssertionError):
mapper.map_to_symbol_filters(json)
json = []
expected = []
assert mapper.map_to_symbol_filters(json) == expected
def test_map_to_symbol(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceSymbolModel(
symbol="BTCUSDT",
status="TRADING",
base_asset="BTC",
base_asset_precision=8,
quote_asset="USDT",
quote_precision=2,
quote_asset_precision=4,
order_types=["LIMIT", "STOP_LOSS"],
iceberg_allowed=False,
oco_allowed=False,
is_spot_trading_allowed=True,
is_margin_trading_allowed=True,
filters=[
BinancePercentPriceSymbolFilterModel(
filter_type="PERCENT_PRICE",
multiplier_down=Decimal("10.3"),
multiplier_up=Decimal("10.4"),
avg_price_mins=10),
BinanceLotSizeSymbolFilterModel(
filter_type="LOT_SIZE",
min_qty=Decimal("10.1"),
max_qty=Decimal("10.4"),
step_size=Decimal("1.2"))
],
permissions=["SPOT", "MARGIN"])
json: BinanceSymbolJson = {
"symbol": "BTCUSDT",
"status": "TRADING",
"baseAsset": "BTC",
"baseAssetPrecision": 8,
"quoteAsset": "USDT",
"quotePrecision": 2,
"quoteAssetPrecision": 4,
"orderTypes": ["LIMIT", "STOP_LOSS"],
"icebergAllowed": False,
"ocoAllowed": False,
"isSpotTradingAllowed": True,
"isMarginTradingAllowed": True,
"filters": [
{
"filterType": "PERCENT_PRICE",
"multiplierDown": "10.3",
"multiplierUp": "10.4",
"avgPriceMins": 10
},
{
"filterType": "LOT_SIZE",
"minQty": "10.1",
"maxQty": "10.4",
"stepSize": "1.2"
}
],
"permissions": ["SPOT", "MARGIN"]
}
assert mapper.map_to_symbol(json) == expected
def test_map_to_symbols(mapper: BinanceSpotModelsMapper) -> None:
expected = [
BinanceSymbolModel(
symbol="BTCUSDT",
status="TRADING",
base_asset="BTC",
base_asset_precision=8,
quote_asset="USDT",
quote_precision=2,
quote_asset_precision=4,
order_types=["LIMIT", "STOP_LOSS"],
iceberg_allowed=False,
oco_allowed=False,
is_spot_trading_allowed=True,
is_margin_trading_allowed=True,
filters=[
BinancePercentPriceSymbolFilterModel(
filter_type="PERCENT_PRICE",
multiplier_down=Decimal("10.3"),
multiplier_up=Decimal("10.4"),
avg_price_mins=10),
BinanceLotSizeSymbolFilterModel(
filter_type="LOT_SIZE",
min_qty=Decimal("10.1"),
max_qty=Decimal("10.4"),
step_size=Decimal("1.2"))
],
permissions=["SPOT", "MARGIN"]),
BinanceSymbolModel(
symbol="BTCUSDT",
status="TRADING",
base_asset="BTC",
base_asset_precision=8,
quote_asset="USDT",
quote_precision=2,
quote_asset_precision=4,
order_types=["LIMIT", "STOP_LOSS"],
iceberg_allowed=False,
oco_allowed=False,
is_spot_trading_allowed=True,
is_margin_trading_allowed=True,
filters=[
BinancePercentPriceSymbolFilterModel(
filter_type="PERCENT_PRICE",
multiplier_down=Decimal("10.3"),
multiplier_up=Decimal("10.4"),
avg_price_mins=10),
BinanceLotSizeSymbolFilterModel(
filter_type="LOT_SIZE",
min_qty=Decimal("10.1"),
max_qty=Decimal("10.4"),
step_size=Decimal("1.2"))
],
permissions=["SPOT", "MARGIN"])
]
json: BinanceSymbolsJson = [
{
"symbol": "BTCUSDT",
"status": "TRADING",
"baseAsset": "BTC",
"baseAssetPrecision": 8,
"quoteAsset": "USDT",
"quotePrecision": 2,
"quoteAssetPrecision": 4,
"orderTypes": ["LIMIT", "STOP_LOSS"],
"icebergAllowed": False,
"ocoAllowed": False,
"isSpotTradingAllowed": True,
"isMarginTradingAllowed": True,
"filters": [
{
"filterType": "PERCENT_PRICE",
"multiplierDown": "10.3",
"multiplierUp": "10.4",
"avgPriceMins": 10
},
{
"filterType": "LOT_SIZE",
"minQty": "10.1",
"maxQty": "10.4",
"stepSize": "1.2"
}
],
"permissions": ["SPOT", "MARGIN"]
},
{
"symbol": "BTCUSDT",
"status": "TRADING",
"baseAsset": "BTC",
"baseAssetPrecision": 8,
"quoteAsset": "USDT",
"quotePrecision": 2,
"quoteAssetPrecision": 4,
"orderTypes": ["LIMIT", "STOP_LOSS"],
"icebergAllowed": False,
"ocoAllowed": False,
"isSpotTradingAllowed": True,
"isMarginTradingAllowed": True,
"filters": [
{
"filterType": "PERCENT_PRICE",
"multiplierDown": "10.3",
"multiplierUp": "10.4",
"avgPriceMins": 10
},
{
"filterType": "LOT_SIZE",
"minQty": "10.1",
"maxQty": "10.4",
"stepSize": "1.2"
}
],
"permissions": ["SPOT", "MARGIN"]
}
]
assert mapper.map_to_symbols(json) == expected
expected = []
json = []
assert mapper.map_to_symbols(json) == expected
def test_map_to_max_num_orders_exchange_filter(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceMaxNumOrdersExchangeFilterModel(
filter_type="EXCHANGE_MAX_NUM_ORDERS",
max_num_orders=10)
json: BinanceMaxNumOrdersExchangeFilterJson = {
"filterType": "EXCHANGE_MAX_NUM_ORDERS",
"maxNumOrders": 10
}
assert mapper.map_to_max_num_orders_exchange_filter(json) == expected
def test_map_to_max_num_algo_orders_exchange_filter(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceMaxNumAlgoOrdersExchangeFilterModel(
filter_type="EXCHANGE_MAX_ALGO_ORDERS",
max_num_algo_orders=10)
json: BinanceMaxNumAlgoOrdersExchangeFilterModel = {
"filterType": "EXCHANGE_MAX_ALGO_ORDERS",
"maxNumAlgoOrders": 10
}
assert mapper.map_to_max_num_algo_orders_exchange_filter(json) == expected
def test_map_to_exchange_filter(mapper: BinanceSpotModelsMapper) -> None:
expected: BinanceExchangeFilterModel
json: BinanceExchangeFilterJson
expected = BinanceMaxNumOrdersExchangeFilterModel(
filter_type="EXCHANGE_MAX_NUM_ORDERS",
max_num_orders=10)
json = {
"filterType": "EXCHANGE_MAX_NUM_ORDERS",
"maxNumOrders": 10
}
assert mapper.map_to_exchange_filter(json) == expected
expected = BinanceMaxNumAlgoOrdersExchangeFilterModel(
filter_type="EXCHANGE_MAX_ALGO_ORDERS",
max_num_algo_orders=10)
json = {
"filterType": "EXCHANGE_MAX_ALGO_ORDERS",
"maxNumAlgoOrders": 10
}
assert mapper.map_to_exchange_filter(json) == expected
json = {"filterType": "aa"}
with pytest.raises(AssertionError):
mapper.map_to_exchange_filter(json)
def test_map_to_exchange_filters(mapper: BinanceSpotModelsMapper) -> None:
expected: BinanceExchangeFilters
json: BinanceExchangeFiltersJson
expected = [
BinanceMaxNumOrdersExchangeFilterModel(
filter_type="EXCHANGE_MAX_NUM_ORDERS",
max_num_orders=10),
BinanceMaxNumAlgoOrdersExchangeFilterModel(
filter_type="EXCHANGE_MAX_ALGO_ORDERS",
max_num_algo_orders=10)
]
json = [
{
"filterType": "EXCHANGE_MAX_NUM_ORDERS",
"maxNumOrders": 10
},
{
"filterType": "EXCHANGE_MAX_ALGO_ORDERS",
"maxNumAlgoOrders": 10
}
]
assert mapper.map_to_exchange_filters(json) == expected
expected = []
json = []
assert mapper.map_to_exchange_filters(json) == expected
def test_map_to_request_weight_rate_limit(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceRequestWeightRateLimitModel(
rate_limit_type="REQUEST_WEIGHT",
interval="SECOND",
interval_num=10,
limit=20)
json: BinanceRequestWeightRateLimitJson = {
"rateLimitType": "REQUEST_WEIGHT",
"interval": "SECOND",
"intervalNum": 10,
"limit": 20
}
assert mapper.map_to_request_weight_rate_limit(json) == expected
def test_map_to_orders_rate_limit(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceOrdersRateLimitModel(
rate_limit_type="ORDERS",
interval="SECOND",
interval_num=10,
limit=20)
json: BinanceOrdersRateLimitJson = {
"rateLimitType": "ORDERS",
"interval": "SECOND",
"intervalNum": 10,
"limit": 20
}
assert mapper.map_to_orders_rate_limit(json) == expected
def test_map_to_raw_requests_rate_limit(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceRawRequestsRateLimitModel(
rate_limit_type="RAW_REQUESTS",
interval="SECOND",
interval_num=10,
limit=20)
json: BinanceRawRequestsRateLimitJson = {
"rateLimitType": "RAW_REQUESTS",
"interval": "SECOND",
"intervalNum": 10,
"limit": 20
}
assert mapper.map_to_raw_requests_rate_limit(json) == expected
def test_map_to_rate_limit(mapper: BinanceSpotModelsMapper) -> None:
expected: BinanceRateLimitModel
json: BinanceRateLimitJson
expected = BinanceRequestWeightRateLimitModel(
rate_limit_type="REQUEST_WEIGHT",
interval="SECOND",
interval_num=10,
limit=20)
json = {
"rateLimitType": "REQUEST_WEIGHT",
"interval": "SECOND",
"intervalNum": 10,
"limit": 20
}
assert mapper.map_to_rate_limit(json) == expected
expected = BinanceOrdersRateLimitModel(
rate_limit_type="ORDERS",
interval="SECOND",
interval_num=10,
limit=20)
json = {
"rateLimitType": "ORDERS",
"interval": "SECOND",
"intervalNum": 10,
"limit": 20
}
assert mapper.map_to_rate_limit(json) == expected
expected = BinanceRawRequestsRateLimitModel(
rate_limit_type="RAW_REQUESTS",
interval="SECOND",
interval_num=10,
limit=20)
json = {
"rateLimitType": "RAW_REQUESTS",
"interval": "SECOND",
"intervalNum": 10,
"limit": 20
}
assert mapper.map_to_rate_limit(json) == expected
def test_map_to_rate_limits(mapper: BinanceSpotModelsMapper) -> None:
expected: BinanceRateLimitModel
json: BinanceRateLimitJson
expected = [
BinanceRequestWeightRateLimitModel(
rate_limit_type="REQUEST_WEIGHT",
interval="SECOND",
interval_num=10,
limit=20),
BinanceOrdersRateLimitModel(
rate_limit_type="ORDERS",
interval="SECOND",
interval_num=10,
limit=20),
BinanceRawRequestsRateLimitModel(
rate_limit_type="RAW_REQUESTS",
interval="SECOND",
interval_num=10,
limit=20)
]
json = [
{
"rateLimitType": "REQUEST_WEIGHT",
"interval": "SECOND",
"intervalNum": 10,
"limit": 20
},
{
"rateLimitType": "ORDERS",
"interval": "SECOND",
"intervalNum": 10,
"limit": 20
},
{
"rateLimitType": "RAW_REQUESTS",
"interval": "SECOND",
"intervalNum": 10,
"limit": 20
}
]
assert mapper.map_to_rate_limits(json) == expected
expected = []
json = []
assert mapper.map_to_rate_limits(json) == expected
def test_map_to_exchange_info(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceExchangeInfoModel(
timezone="UTC",
server_time=500,
rate_limits=[
BinanceRequestWeightRateLimitModel(
rate_limit_type="REQUEST_WEIGHT",
interval="SECOND",
interval_num=10,
limit=20),
BinanceOrdersRateLimitModel(
rate_limit_type="ORDERS",
interval="SECOND",
interval_num=10,
limit=20),
BinanceRawRequestsRateLimitModel(
rate_limit_type="RAW_REQUESTS",
interval="SECOND",
interval_num=10,
limit=20)
],
exchange_filters=[
BinanceMaxNumOrdersExchangeFilterModel(
filter_type="EXCHANGE_MAX_NUM_ORDERS",
max_num_orders=10),
BinanceMaxNumAlgoOrdersExchangeFilterModel(
filter_type="EXCHANGE_MAX_ALGO_ORDERS",
max_num_algo_orders=10)
],
symbols=[
BinanceSymbolModel(
symbol="BTCUSDT",
status="TRADING",
base_asset="BTC",
base_asset_precision=8,
quote_asset="USDT",
quote_precision=2,
quote_asset_precision=4,
order_types=["LIMIT", "STOP_LOSS"],
iceberg_allowed=False,
oco_allowed=False,
is_spot_trading_allowed=True,
is_margin_trading_allowed=True,
filters=[
BinancePercentPriceSymbolFilterModel(
filter_type="PERCENT_PRICE",
multiplier_down=Decimal("10.3"),
multiplier_up=Decimal("10.4"),
avg_price_mins=10),
BinanceLotSizeSymbolFilterModel(
filter_type="LOT_SIZE",
min_qty=Decimal("10.1"),
max_qty=Decimal("10.4"),
step_size=Decimal("1.2"))
],
permissions=["SPOT", "MARGIN"]),
BinanceSymbolModel(
symbol="BTCUSDT",
status="TRADING",
base_asset="BTC",
base_asset_precision=8,
quote_asset="USDT",
quote_precision=2,
quote_asset_precision=4,
order_types=["LIMIT", "STOP_LOSS"],
iceberg_allowed=False,
oco_allowed=False,
is_spot_trading_allowed=True,
is_margin_trading_allowed=True,
filters=[
BinancePercentPriceSymbolFilterModel(
filter_type="PERCENT_PRICE",
multiplier_down=Decimal("10.3"),
multiplier_up=Decimal("10.4"),
avg_price_mins=10),
BinanceLotSizeSymbolFilterModel(
filter_type="LOT_SIZE",
min_qty=Decimal("10.1"),
max_qty=Decimal("10.4"),
step_size=Decimal("1.2"))
],
permissions=["SPOT", "MARGIN"])
])
json: BinanceExchangeInfoJson = {
"timezone": "UTC",
"serverTime": 500,
"rateLimits": [
{
"rateLimitType": "REQUEST_WEIGHT",
"interval": "SECOND",
"intervalNum": 10,
"limit": 20
},
{
"rateLimitType": "ORDERS",
"interval": "SECOND",
"intervalNum": 10,
"limit": 20
},
{
"rateLimitType": "RAW_REQUESTS",
"interval": "SECOND",
"intervalNum": 10,
"limit": 20
}
],
"exchangeFilters": [
{
"filterType": "EXCHANGE_MAX_NUM_ORDERS",
"maxNumOrders": 10
},
{
"filterType": "EXCHANGE_MAX_ALGO_ORDERS",
"maxNumAlgoOrders": 10
}
],
"symbols": [
{
"symbol": "BTCUSDT",
"status": "TRADING",
"baseAsset": "BTC",
"baseAssetPrecision": 8,
"quoteAsset": "USDT",
"quotePrecision": 2,
"quoteAssetPrecision": 4,
"orderTypes": ["LIMIT", "STOP_LOSS"],
"icebergAllowed": False,
"ocoAllowed": False,
"isSpotTradingAllowed": True,
"isMarginTradingAllowed": True,
"filters": [
{
"filterType": "PERCENT_PRICE",
"multiplierDown": "10.3",
"multiplierUp": "10.4",
"avgPriceMins": 10
},
{
"filterType": "LOT_SIZE",
"minQty": "10.1",
"maxQty": "10.4",
"stepSize": "1.2"
}
],
"permissions": ["SPOT", "MARGIN"]
},
{
"symbol": "BTCUSDT",
"status": "TRADING",
"baseAsset": "BTC",
"baseAssetPrecision": 8,
"quoteAsset": "USDT",
"quotePrecision": 2,
"quoteAssetPrecision": 4,
"orderTypes": ["LIMIT", "STOP_LOSS"],
"icebergAllowed": False,
"ocoAllowed": False,
"isSpotTradingAllowed": True,
"isMarginTradingAllowed": True,
"filters": [
{
"filterType": "PERCENT_PRICE",
"multiplierDown": "10.3",
"multiplierUp": "10.4",
"avgPriceMins": 10
},
{
"filterType": "LOT_SIZE",
"minQty": "10.1",
"maxQty": "10.4",
"stepSize": "1.2"
}
],
"permissions": ["SPOT", "MARGIN"]
}
]
}
assert mapper.map_to_exchange_info(json) == expected
def test_map_to_balance(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceCurrencyBalanceModel(
asset="BTC",
free=Decimal("10.2"),
locked=Decimal("1.1"))
json: BinanceCurrencyBalanceJson = {
"asset": "BTC",
"free": "10.2",
"locked": "1.1"
}
assert mapper.map_to_balance(json) == expected
def test_map_to_balances(mapper: BinanceSpotModelsMapper) -> None:
expected = [
BinanceCurrencyBalanceModel(
asset="BTC",
free=Decimal("10.2"),
locked=Decimal("1.1")),
BinanceCurrencyBalanceModel(
asset="ETH",
free=Decimal("10.2"),
locked=Decimal("1.1"))
]
json: BinanceCurrencyBalancesJson = [
{
"asset": "BTC",
"free": "10.2",
"locked": "1.1"
},
{
"asset": "ETH",
"free": "10.2",
"locked": "1.1"
}
]
assert mapper.map_to_balances(json) == expected
expected = []
json = []
assert mapper.map_to_balances(json) == expected
def test_map_to_account_info(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceAccountInfoModel(
maker_commission=10,
taker_commission=15,
buyer_commission=11,
seller_commission=12,
can_trade=True,
can_withdraw=True,
can_deposit=False,
update_time=500,
account_type="SPOT",
balances=[
BinanceCurrencyBalanceModel(
asset="BTC",
free=Decimal("10.2"),
locked=Decimal("1.1")),
BinanceCurrencyBalanceModel(
asset="ETH",
free=Decimal("10.2"),
locked=Decimal("1.1"))
]
)
json: BinanceAccountInfoJson = {
"makerCommission": 10,
"takerCommission": 15,
"buyerCommission": 11,
"sellerCommission": 12,
"canTrade": True,
"canWithdraw": True,
"canDeposit": False,
"updateTime": 500,
"accountType": "SPOT",
"balances": [
{
"asset": "BTC",
"free": "10.2",
"locked": "1.1"
},
{
"asset": "ETH",
"free": "10.2",
"locked": "1.1"
}
]
}
assert mapper.map_to_account_info(json) == expected
def test_map_to_order_info(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceOrderInfoModel(
symbol="BTCUSDT",
order_id=1234,
order_list_id=1235,
client_order_id="11cc",
is_working=True,
update_time=1500
)
json: BinanceOrderInfoJson = {
"symbol": "BTCUSDT",
"orderId": 1234,
"orderListId": 1235,
"clientOrderId": "11cc",
"isWorking": True,
"updateTime": 1500
}
assert mapper.map_to_order_info(json) == expected
expected = BinanceOrderInfoModel(
symbol="BTCUSDT",
order_id=1234,
order_list_id=1235,
client_order_id="11cc",
is_working=True,
update_time=1500,
price=Decimal("18.7")
)
json: BinanceOrderInfoJson = {
"symbol": "BTCUSDT",
"orderId": 1234,
"orderListId": 1235,
"clientOrderId": "11cc",
"isWorking": True,
"updateTime": 1500,
"price": "18.7"
}
assert mapper.map_to_order_info(json) == expected
expected = BinanceOrderInfoModel(
symbol="BTCUSDT",
order_id=1234,
order_list_id=1235,
client_order_id="11cc",
is_working=True,
update_time=1500,
price=Decimal("18.7"),
orig_qty=Decimal("18.8")
)
json: BinanceOrderInfoJson = {
"symbol": "BTCUSDT",
"orderId": 1234,
"orderListId": 1235,
"clientOrderId": "11cc",
"isWorking": True,
"updateTime": 1500,
"price": "18.7",
"origQty": "18.8"
}
assert mapper.map_to_order_info(json) == expected
expected = BinanceOrderInfoModel(
symbol="BTCUSDT",
order_id=1234,
order_list_id=1235,
client_order_id="11cc",
is_working=True,
update_time=1500,
price=Decimal("18.7"),
orig_qty=Decimal("18.8"),
executed_qty=Decimal("18.9")
)
json: BinanceOrderInfoJson = {
"symbol": "BTCUSDT",
"orderId": 1234,
"orderListId": 1235,
"clientOrderId": "11cc",
"isWorking": True,
"updateTime": 1500,
"price": "18.7",
"origQty": "18.8",
"executedQty": "18.9"
}
assert mapper.map_to_order_info(json) == expected
expected = BinanceOrderInfoModel(
symbol="BTCUSDT",
order_id=1234,
order_list_id=1235,
client_order_id="11cc",
is_working=True,
update_time=1500,
price=Decimal("18.7"),
orig_qty=Decimal("18.8"),
executed_qty=Decimal("18.9"),
cummulative_quote_qty=Decimal("18.0")
)
json: BinanceOrderInfoJson = {
"symbol": "BTCUSDT",
"orderId": 1234,
"orderListId": 1235,
"clientOrderId": "11cc",
"isWorking": True,
"updateTime": 1500,
"price": "18.7",
"origQty": "18.8",
"executedQty": "18.9",
"cummulativeQuoteQty": "18.0"
}
assert mapper.map_to_order_info(json) == expected
expected = BinanceOrderInfoModel(
symbol="BTCUSDT",
order_id=1234,
order_list_id=1235,
client_order_id="11cc",
is_working=True,
update_time=1500,
price=Decimal("18.7"),
orig_qty=Decimal("18.8"),
executed_qty=Decimal("18.9"),
cummulative_quote_qty=Decimal("18.0"),
status="NEW"
)
json: BinanceOrderInfoJson = {
"symbol": "BTCUSDT",
"orderId": 1234,
"orderListId": 1235,
"clientOrderId": "11cc",
"isWorking": True,
"updateTime": 1500,
"price": "18.7",
"origQty": "18.8",
"executedQty": "18.9",
"cummulativeQuoteQty": "18.0",
"status": "NEW"
}
assert mapper.map_to_order_info(json) == expected
expected = BinanceOrderInfoModel(
symbol="BTCUSDT",
order_id=1234,
order_list_id=1235,
client_order_id="11cc",
is_working=True,
update_time=1500,
price=Decimal("18.7"),
orig_qty=Decimal("18.8"),
executed_qty=Decimal("18.9"),
cummulative_quote_qty=Decimal("18.0"),
status="NEW",
time_in_force="GTC"
)
json: BinanceOrderInfoJson = {
"symbol": "BTCUSDT",
"orderId": 1234,
"orderListId": 1235,
"clientOrderId": "11cc",
"isWorking": True,
"updateTime": 1500,
"price": "18.7",
"origQty": "18.8",
"executedQty": "18.9",
"cummulativeQuoteQty": "18.0",
"status": "NEW",
"timeInForce": "GTC"
}
assert mapper.map_to_order_info(json) == expected
expected = BinanceOrderInfoModel(
symbol="BTCUSDT",
order_id=1234,
order_list_id=1235,
client_order_id="11cc",
is_working=True,
update_time=1500,
price=Decimal("18.7"),
orig_qty=Decimal("18.8"),
executed_qty=Decimal("18.9"),
cummulative_quote_qty=Decimal("18.0"),
status="NEW",
time_in_force="GTC",
type="LIMIT"
)
json: BinanceOrderInfoJson = {
"symbol": "BTCUSDT",
"orderId": 1234,
"orderListId": 1235,
"clientOrderId": "11cc",
"isWorking": True,
"updateTime": 1500,
"price": "18.7",
"origQty": "18.8",
"executedQty": "18.9",
"cummulativeQuoteQty": "18.0",
"status": "NEW",
"timeInForce": "GTC",
"type": "LIMIT"
}
assert mapper.map_to_order_info(json) == expected
expected = BinanceOrderInfoModel(
symbol="BTCUSDT",
order_id=1234,
order_list_id=1235,
client_order_id="11cc",
is_working=True,
update_time=1500,
price=Decimal("18.7"),
orig_qty=Decimal("18.8"),
executed_qty=Decimal("18.9"),
cummulative_quote_qty=Decimal("18.0"),
status="NEW",
time_in_force="GTC",
type="LIMIT",
side="BUY"
)
json: BinanceOrderInfoJson = {
"symbol": "BTCUSDT",
"orderId": 1234,
"orderListId": 1235,
"clientOrderId": "11cc",
"isWorking": True,
"updateTime": 1500,
"price": "18.7",
"origQty": "18.8",
"executedQty": "18.9",
"cummulativeQuoteQty": "18.0",
"status": "NEW",
"timeInForce": "GTC",
"type": "LIMIT",
"side": "BUY"
}
assert mapper.map_to_order_info(json) == expected
expected = BinanceOrderInfoModel(
symbol="BTCUSDT",
order_id=1234,
order_list_id=1235,
client_order_id="11cc",
is_working=True,
update_time=1500,
price=Decimal("18.7"),
orig_qty=Decimal("18.8"),
executed_qty=Decimal("18.9"),
cummulative_quote_qty=Decimal("18.0"),
status="NEW",
time_in_force="GTC",
type="LIMIT",
side="BUY",
orig_quote_order_qty=Decimal("15000")
)
json: BinanceOrderInfoJson = {
"symbol": "BTCUSDT",
"orderId": 1234,
"orderListId": 1235,
"clientOrderId": "11cc",
"isWorking": True,
"updateTime": 1500,
"price": "18.7",
"origQty": "18.8",
"executedQty": "18.9",
"cummulativeQuoteQty": "18.0",
"status": "NEW",
"timeInForce": "GTC",
"type": "LIMIT",
"side": "BUY",
"origQuoteOrderQty": "15000"
}
assert mapper.map_to_order_info(json) == expected
expected = BinanceOrderInfoModel(
symbol="BTCUSDT",
order_id=1234,
order_list_id=1235,
client_order_id="11cc",
is_working=True,
update_time=1500,
price=Decimal("18.7"),
orig_qty=Decimal("18.8"),
executed_qty=Decimal("18.9"),
cummulative_quote_qty=Decimal("18.0"),
status="NEW",
time_in_force="GTC",
type="LIMIT",
side="BUY",
orig_quote_order_qty=Decimal("15000"),
fills=[
BinanceFilledOrderModel(
price=Decimal("127.8"),
qty=Decimal("700.7"),
commission=Decimal("10.8"),
commission_asset="BTC"),
BinanceFilledOrderModel(
price=Decimal("127.8"),
qty=Decimal("700.7"),
commission=Decimal("10.8"),
commission_asset="ETH")
]
)
json: BinanceOrderInfoJson = {
"symbol": "BTCUSDT",
"orderId": 1234,
"orderListId": 1235,
"clientOrderId": "11cc",
"isWorking": True,
"updateTime": 1500,
"price": "18.7",
"origQty": "18.8",
"executedQty": "18.9",
"cummulativeQuoteQty": "18.0",
"status": "NEW",
"timeInForce": "GTC",
"type": "LIMIT",
"side": "BUY",
"origQuoteOrderQty": "15000",
"fills": [
{
"price": "127.8",
"qty": "700.7",
"commission": "10.8",
"commissionAsset": "BTC"
},
{
"price": "127.8",
"qty": "700.7",
"commission": "10.8",
"commissionAsset": "ETH"
}
]
}
assert mapper.map_to_order_info(json) == expected
def test_map_to_order_infos(mapper: BinanceSpotModelsMapper) -> None:
expected = [
BinanceOrderInfoModel(
symbol="BTCUSDT",
order_id=1234,
order_list_id=1235,
client_order_id="11cc",
is_working=True,
update_time=1500),
BinanceOrderInfoModel(
symbol="ETHUSDT",
order_id=1234,
order_list_id=1235,
client_order_id="11cc",
is_working=True,
update_time=1500)
]
json: BinanceOrderInfosJson = [
{
"symbol": "BTCUSDT",
"orderId": 1234,
"orderListId": 1235,
"clientOrderId": "11cc",
"isWorking": True,
"updateTime": 1500
},
{
"symbol": "ETHUSDT",
"orderId": 1234,
"orderListId": 1235,
"clientOrderId": "11cc",
"isWorking": True,
"updateTime": 1500
}
]
assert mapper.map_to_order_infos(json) == expected
def test_map_to_account_trade(mapper: BinanceSpotModelsMapper) -> None:
expected = BinanceAccountTradeModel(
symbol="BTCUSDT",
id=5,
order_id=10,
order_list_id=-1,
price=Decimal("60000"),
qty=Decimal("0.01"),
quote_qty=Decimal("600"),
commission=Decimal("0.00001"),
commission_asset="BTC",
time=1600,
is_buyer=True,
is_maker=False,
is_best_match=True)
json: BinanceAccountTradeJson = {
"symbol": "BTCUSDT",
"id": 5,
"orderId": 10,
"orderListId": -1,
"price": "60000",
"qty": "0.01",
"quoteQty": "600",
"commission": "0.00001",
"commissionAsset": "BTC",
"time": 1600,
"isBuyer": True,
"isMaker": False,
"isBestMatch": True
}
assert mapper.map_to_account_trade(json) == expected
def test_map_to_account_trades(mapper: BinanceSpotModelsMapper) -> None:
expected = [
BinanceAccountTradeModel(
symbol="BTCUSDT",
id=5,
order_id=10,
order_list_id=-1,
price=Decimal("60000"),
qty=Decimal("0.01"),
quote_qty=Decimal("600"),
commission=Decimal("0.00001"),
commission_asset="BTC",
time=1600,
is_buyer=True,
is_maker=False,
is_best_match=True),
BinanceAccountTradeModel(
symbol="ETHUSDT",
id=5,
order_id=10,
order_list_id=-1,
price=Decimal("60000"),
qty=Decimal("0.01"),
quote_qty=Decimal("600"),
commission=Decimal("0.00001"),
commission_asset="BTC",
time=1600,
is_buyer=True,
is_maker=False,
is_best_match=True)
]
json: BinanceAccountTradesJson = [
{
"symbol": "BTCUSDT",
"id": 5,
"orderId": 10,
"orderListId": -1,
"price": "60000",
"qty": "0.01",
"quoteQty": "600",
"commission": "0.00001",
"commissionAsset": "BTC",
"time": 1600,
"isBuyer": True,
"isMaker": False,
"isBestMatch": True
},
{
"symbol": "ETHUSDT",
"id": 5,
"orderId": 10,
"orderListId": -1,
"price": "60000",
"qty": "0.01",
"quoteQty": "600",
"commission": "0.00001",
"commissionAsset": "BTC",
"time": 1600,
"isBuyer": True,
"isMaker": False,
"isBestMatch": True
}
]
assert mapper.map_to_account_trades(json) == expected
| 31.531224
| 94
| 0.51959
| 6,431
| 76,747
| 5.981496
| 0.061266
| 0.018067
| 0.024307
| 0.036239
| 0.848779
| 0.835183
| 0.811449
| 0.785712
| 0.758182
| 0.742247
| 0
| 0.061966
| 0.361187
| 76,747
| 2,433
| 95
| 31.544184
| 0.722643
| 0
| 0
| 0.72467
| 0
| 0
| 0.1577
| 0.008052
| 0
| 0
| 0
| 0
| 0.037445
| 1
| 0.02511
| false
| 0
| 0.002203
| 0.000441
| 0.027753
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6f99fa919d44a0dcdf0752804f53f7cd5c0ee0c2
| 1,521
|
py
|
Python
|
Assignments/Assignment 03/main.py
|
linbo0518/CSE-6363-Machine-Learning
|
29eef2629748af8af6116ea7f9c5c51e7190cdaf
|
[
"MIT"
] | null | null | null |
Assignments/Assignment 03/main.py
|
linbo0518/CSE-6363-Machine-Learning
|
29eef2629748af8af6116ea7f9c5c51e7190cdaf
|
[
"MIT"
] | null | null | null |
Assignments/Assignment 03/main.py
|
linbo0518/CSE-6363-Machine-Learning
|
29eef2629748af8af6116ea7f9c5c51e7190cdaf
|
[
"MIT"
] | null | null | null |
import numpy as np
import matplotlib.pyplot as plt
from kmeans import KMeans
SIGMA_1 = (3, 3)
SIGMA_2 = (-3, 3)
SIGMA_3 = (0, -3)
MU = ((4, 0), (0, 4))
SIZE = 100
dist_1 = np.random.multivariate_normal(SIGMA_1, MU, size=SIZE)
dist_2 = np.random.multivariate_normal(SIGMA_2, MU, size=SIZE)
dist_3 = np.random.multivariate_normal(SIGMA_3, MU, size=SIZE)
plt.plot(dist_1[:, 0], dist_1[:, 1], 'o')
plt.plot(dist_2[:, 0], dist_2[:, 1], 'o')
plt.plot(dist_3[:, 0], dist_3[:, 1], 'o')
plt.title("Training data (sigma = 2)")
plt.show()
x = np.concatenate((dist_1, dist_2, dist_3), axis=0)
for i in range(5):
model = KMeans(3)
centroids, labels, loss = model.fit(x)
model.plot(x, labels, centroids)
print("Centroids:")
for centroid in centroids:
print(f"\t{centroid}")
print(f"KMeans Loss: {loss:.7f}")
MU = ((16, 0), (0, 16))
dist_1 = np.random.multivariate_normal(SIGMA_1, MU, size=SIZE)
dist_2 = np.random.multivariate_normal(SIGMA_2, MU, size=SIZE)
dist_3 = np.random.multivariate_normal(SIGMA_3, MU, size=SIZE)
plt.plot(dist_1[:, 0], dist_1[:, 1], 'o')
plt.plot(dist_2[:, 0], dist_2[:, 1], 'o')
plt.plot(dist_3[:, 0], dist_3[:, 1], 'o')
plt.title("Training data (sigma = 4)")
plt.show()
x = np.concatenate((dist_1, dist_2, dist_3), axis=0)
for i in range(5):
model = KMeans(3)
centroids, labels, loss = model.fit(x)
model.plot(x, labels, centroids)
print("Centroids:")
for centroid in centroids:
print(f"\t{centroid}")
print(f"KMeans Loss: {loss:.7f}")
| 29.25
| 62
| 0.646943
| 265
| 1,521
| 3.566038
| 0.177358
| 0.042328
| 0.126984
| 0.165079
| 0.88254
| 0.88254
| 0.88254
| 0.88254
| 0.88254
| 0.88254
| 0
| 0.057903
| 0.159763
| 1,521
| 52
| 63
| 29.25
| 0.681534
| 0
| 0
| 0.744186
| 0
| 0
| 0.095926
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.069767
| 0
| 0.069767
| 0.139535
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6fb7b2e69ad42e3266a8cc309ac718f2f2856758
| 184
|
py
|
Python
|
src/flask_password/exceptions.py
|
oittaa/flask-password
|
3faf572776b7209091d1a8ddc41eb88946bdf31c
|
[
"MIT"
] | null | null | null |
src/flask_password/exceptions.py
|
oittaa/flask-password
|
3faf572776b7209091d1a8ddc41eb88946bdf31c
|
[
"MIT"
] | 4
|
2022-03-02T06:05:34.000Z
|
2022-03-25T07:33:51.000Z
|
src/flask_password/exceptions.py
|
oittaa/flask-password
|
3faf572776b7209091d1a8ddc41eb88946bdf31c
|
[
"MIT"
] | null | null | null |
class ShortPassword(ValueError):
"""
Raised if the password is too short.
"""
class CommonPassword(ValueError):
"""
Raised if the password is too common.
"""
| 16.727273
| 41
| 0.630435
| 20
| 184
| 5.8
| 0.6
| 0.275862
| 0.310345
| 0.362069
| 0.586207
| 0.586207
| 0.586207
| 0
| 0
| 0
| 0
| 0
| 0.26087
| 184
| 10
| 42
| 18.4
| 0.852941
| 0.402174
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
6fd591bdc6c04ab805286d398ecbbd2adde06aa7
| 23,967
|
py
|
Python
|
models/stoch.py
|
AntoineHX/BU_Stoch_pool
|
5504ca67ab893f2f740fddfbb1820a13ad881531
|
[
"MIT"
] | null | null | null |
models/stoch.py
|
AntoineHX/BU_Stoch_pool
|
5504ca67ab893f2f740fddfbb1820a13ad881531
|
[
"MIT"
] | null | null | null |
models/stoch.py
|
AntoineHX/BU_Stoch_pool
|
5504ca67ab893f2f740fddfbb1820a13ad881531
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
import torch.nn.functional as F
import math
import opt_einsum as oe
class SConv2dStride(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1, groups=1,ceil_mode=True,bias=False):
super(SConv2dStride, self).__init__()
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size , stride=stride, padding=padding,dilation=dilation,bias=bias)
self.stride = stride
self.ceil_mode = ceil_mode
def forward(self, x,stoch = True):
stoch=True #for some reason average does not work...
if stoch:
device= x.device
selh = torch.randint(self.conv.stride[0],(1,), device=device)[0]
selw = torch.randint(self.conv.stride[1],(1,), device=device)[0]
out = self.conv(x[:,:,selh:,selw:])
else:
self.conv.stride = (1,1)
out = self.conv(x)
out = F.avg_pool2d(out,self.stride,ceil_mode=self.ceil_mode)
self.conv.stride = (self.stride,self.stride)
return out
class SConv2dAvg(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1, groups=1,ceil_mode=True, bias = True):
super(SConv2dAvg, self).__init__()
conv = nn.Conv2d(in_channels, out_channels, kernel_size)
self.deconv = nn.ConvTranspose2d(1, 1, kernel_size, stride=1, padding=padding, output_padding=0, groups=1, bias=False, dilation=1, padding_mode='zeros')
nn.init.constant_(self.deconv.weight, 1)
self.pooldeconv = nn.ConvTranspose2d(1, 1, kernel_size=stride,padding=0,stride=stride, output_padding=0, groups=1, bias=False, dilation=1, padding_mode='zeros')
nn.init.constant_(self.pooldeconv.weight, 1)
self.weight = nn.Parameter(conv.weight)
if bias:
self.bias = nn.Parameter(conv.bias)
else:
self.bias = None
self.stride = stride
self.dilation = dilation
self.padding = padding
self.kernel_size = kernel_size
self.ceil_mode = ceil_mode
def forward(self, input, index=-torch.ones(1), mask=-torch.ones(1,1),stoch=True,stride=-1): #ceil_mode = True not right
device=input.device
if stride==-1:
stride = self.stride #if stride not defined use self.stride
if stoch==False:
stride=1 #test with real average pooling
batch_size, in_channels, in_h, in_w = input.shape
out_channels, in_channels, kh, kw = self.weight.shape
afterconv_h,afterconv_w,out_h,out_w = self.get_size(in_h,in_w,stride)
unfold = torch.nn.Unfold(kernel_size=(kh, kw), dilation=self.dilation, padding=self.padding, stride=1)
inp_unf = unfold(input) #transform into a matrix (batch_size, in_channels*kh*kw,afterconv_h,afterconv_w)
if stride!=1:
if len(index.shape)==1: #or stride!=1:
index,mask = self.sample(in_h,in_w,batch_size,device,mask)
if mask[0,0]==-1:# in case of not given mask use only sampled selection
#inp_unf = inp_unf[:,:,rng_h,rng_w].view(batch_size,in_channels*kh*kw,-1)
index = index.repeat(batch_size,in_channels*kh*kw,1,1)
inp_unf = torch.gather(inp_unf.view(batch_size,in_channels*kh*kw,afterconv_h*afterconv_w),2,index.view(batch_size,in_channels*kh*kw,out_h*out_w)).view(batch_size,in_channels*kh*kw,out_h*out_w)
else:#in case of a valid mask use selection only on the mask locations
#inp_unf = inp_unf[index[:,:,mask>0]]
#mindex = index[mask>0]
mindex = torch.masked_select(index, mask>0)
index = mindex.repeat(batch_size,in_channels*kh*kw,1)
inp_unf = torch.gather(inp_unf.view(batch_size,in_channels*kh*kw,afterconv_h*afterconv_w),2,index.view(batch_size,in_channels*kh*kw,index.shape[2])).view(batch_size,in_channels*kh*kw,index.shape[2])
#Matrix mul
if self.bias is None:
#flt = self.weight.view(self.weight.size(0), -1).t()
#out_unf = inp_unf.transpose(2,1).matmul(flt).transpose(1, 2)
out_unf = oe.contract('bji,kj->bki',inp_unf,self.weight.view(self.weight.size(0), -1),backend='torch')
#print(((out_unf-out_unf1)**2).mean())
else:
#out_unf = oe.contract('bji,kj,k->bki',inp_unf,self.weight.view(self.weight.size(0), -1),self.bias,backend='torch')#+self.bias.view(1,-1,1)#wrong
out_unf = oe.contract('bji,kj->bki',inp_unf,self.weight.view(self.weight.size(0), -1),backend='torch')+self.bias.view(1,-1,1)#sligthly slower but correct
#out_unf1 = (inp_unf.transpose(1, 2).matmul(self.weight.view(self.weight.size(0), -1).t()) + self.bias).transpose(1, 2)
#print(((out_unf-out_unf1)**2).mean())
#self.flt = self.weight.view(self.weight.size(0), -1).t()
#out_unf = (inp_unf.transpose(1, 2).matmul(self.flt) + self.bias).transpose(1, 2)
if stride==1 or mask[0,0]==-1:# in case of no mask and stride==1
out = out_unf.view(batch_size,out_channels,out_h,out_w) #Fold
#if stoch==False: #this is done outside for more clarity
# out = F.avg_pool2d(out,self.stride,ceil_mode=False)
#print(self.stride)
else:#in case of mask
out = torch.zeros(batch_size, out_channels,out_h,out_w,device=device)
#out = torch.gather(out.view(batch_size,in_channels*kh*kw,afterconv_h*afterconv_w),2,index.view(batch_size,in_channels*kh*kw,index.shape[2])).view(batch_size,in_channels*kh*kw,index.shape[2])
out[:,:,mask>0] = out_unf
#out.masked_scatter_(mask>0, out_unf)
return out
def forward_slow(self, input, selh=-torch.ones(1,1), selw=-torch.ones(1,1), mask=-torch.ones(1,1),stoch=True,stride=-1):
device=input.device
if stride==-1:
stride = self.stride #if stride not defined use self.stride
if stoch==False:
stride=1 #test with real average pooling
batch_size, in_channels, in_h, in_w = input.shape
out_channels, in_channels, kh, kw = self.weight.shape
afterconv_h = in_h+2*self.padding-(kh-1) #size after conv
afterconv_w = in_w+2*self.padding-(kw-1)
if self.ceil_mode: #ceil_mode = talse default mode for strided conv
out_h = math.ceil(afterconv_h/stride)
out_w = math.ceil(afterconv_w/stride)
else: #ceil_mode = false default mode for pooling
out_h = math.floor(afterconv_h/stride)
out_w = math.floor(afterconv_w/stride)
unfold = torch.nn.Unfold(kernel_size=(kh, kw), dilation=self.dilation, padding=self.padding, stride=1)
inp_unf = unfold(input) #transform into a matrix (batch_size, in_channels*kh*kw,afterconv_h,afterconv_w)
if stride!=1: # if stride==1 there is no pooling
inp_unf = inp_unf.view(batch_size,in_channels*kh*kw,afterconv_h,afterconv_w)
if selh[0,0]==-1: # if not given sampled selection
#selction of where to sample for each pooling location
selh = torch.randint(stride,(out_h,out_w), device=device)
selw = torch.randint(stride,(out_h,out_w), device=device)
resth = (out_h*stride)-afterconv_h
restw = (out_w*stride)-afterconv_w
if resth!=0 and self.ceil_mode: #in case of ceil_mode need to select only the good locations for the last regions
selh[-1,:]=selh[-1,:]%(stride-resth);selh[:,-1]=selh[:,-1]%(stride-restw)
selw[-1,:]=selw[-1,:]%(stride-resth);selw[:,-1]=selw[:,-1]%(stride-restw)
#the postion should be global by adding range...
rng_h = selh + torch.arange(0,out_h*stride,stride,device=device).view(-1,1)
rng_w = selw + torch.arange(0,out_w*stride,stride,device=device)
if mask[0,0]==-1:# in case of not given mask use only sampled selection
inp_unf = inp_unf[:,:,rng_h,rng_w].view(batch_size,in_channels*kh*kw,-1)
else:#in case of a valid mask use selection only on the mask locations
inp_unf = inp_unf[:,:,rng_h[mask>0],rng_w[mask>0]]
#Matrix mul
if self.bias is None:
out_unf = inp_unf.transpose(1, 2).matmul(self.weight.view(self.weight.size(0), -1).t()).transpose(1, 2)
#out_unf = oe.contract('bji,kj->bki',inp_unf,self.weight.view(self.weight.size(0), -1),backend='torch')
else:
#out_unf = oe.contract('bji,kj->bki',inp_unf,self.weight.view(self.weight.size(0), -1),backend='torch')+self.bias.view(1,-1,1)
out_unf = (inp_unf.transpose(1, 2).matmul(self.weight.view(self.weight.size(0), -1).t()) + self.bias).transpose(1, 2)
if stride==1 or mask[0,0]==-1:# in case of no mask and stride==1
out = out_unf.view(batch_size,out_channels,out_h,out_w) #Fold
#if stoch==False: #this is done outside for more clarity
# out = F.avg_pool2d(out,self.stride,ceil_mode=self.ceil_mode)
else:#in case of mask
out = torch.zeros(batch_size, out_channels,out_h,out_w,device=device)
out[:,:,mask>0] = out_unf
return out
def forward_test(self, input, selh=-torch.ones(1,1), selw=-torch.ones(1,1), mask=-torch.ones(1,1),stoch=True,stride=-1):#ugly but faster
device=input.device
if stride==-1:
stride = self.stride #if stride not defined use self.stride
if stoch==False:
stride=1 #test with real average pooling
batch_size, in_channels, in_h, in_w = input.shape
out_channels, in_channels, kh, kw = self.weight.shape
#afterconv_h,afterconv_w,out_h,out_w = self.get_size(in_h,in_w)
#if selh[0,0]==-1:
# index,mask = self.sample(in_h,in_w,batch_size,device,mask)
if 1:
afterconv_h = in_h+2*self.padding-(kh-1) #size after conv
afterconv_w = in_w+2*self.padding-(kw-1)
if self.ceil_mode: #ceil_mode = talse default mode for strided conv
out_h = math.ceil(afterconv_h/stride)
out_w = math.ceil(afterconv_w/stride)
else: #ceil_mode = false default mode for pooling
out_h = math.floor(afterconv_h/stride)
out_w = math.floor(afterconv_w/stride)
unfold = torch.nn.Unfold(kernel_size=(kh, kw), dilation=self.dilation, padding=self.padding, stride=1)
inp_unf = unfold(input) #transform into a matrix (batch_size, in_channels*kh*kw,afterconv_h,afterconv_w)
if 1:
if stride!=1: # if stride==1 there is no pooling
inp_unf = inp_unf.view(batch_size,in_channels*kh*kw,afterconv_h,afterconv_w)
if selh[0,0]==-1: # if not given sampled selection
#selction of where to sample for each pooling location
sel = torch.randint(stride*stride,(out_h,out_w), device=device)
if self.ceil_mode: #in case of ceil_mode need to select only the good locations for the last regions
resth = (out_h*stride)-afterconv_h
restw = (out_w*stride)-afterconv_w
if resth!=0:
sel[-1] = (sel[-1]//stride)%(stride-resth)*stride+(sel[-1]%stride)
sel[:,-1] = (sel[:,-1]%stride)%(stride-restw)+sel[:,-1]//stride*stride
#print(stride-resth,sel[-1])
#print(stride-restw,sel[:,-1])
#rng = torch.arange(0,afterconv_h*afterconv_w,stride*stride,device=device).view(out_h,out_w)
rng = torch.arange(0,out_h*stride*out_w*stride,stride*stride,device=device).view(out_h,out_w)
index = sel+rng
index = index.repeat(batch_size,in_channels*kh*kw,1,1)
if mask[0,0]==-1:# in case of not given mask use only sampled selection
#inp_unf = inp_unf[:,:,rng_h,rng_w].view(batch_size,in_channels*kh*kw,-1)
inp_unf = torch.gather(inp_unf.view(batch_size,in_channels*kh*kw,afterconv_h*afterconv_w),2,index.view(batch_size,in_channels*kh*kw,out_h*out_w)).view(batch_size,in_channels*kh*kw,out_h*out_w)
else:#in case of a valid mask use selection only on the mask locations
inp_unf = inp_unf[:,:,rng_h[mask>0],rng_w[mask>0]]
#Matrix mul
if self.bias is None:
#flt = self.weight.view(self.weight.size(0), -1).t()
#out_unf = inp_unf.transpose(2,1).matmul(flt).transpose(1, 2)
out_unf = oe.contract('bji,kj->bki',inp_unf,self.weight.view(self.weight.size(0), -1),backend='torch')
#print(((out_unf-out_unf1)**2).mean())
else:
#out_unf = oe.contract('bji,kj,b->bki',inp_unf,self.weight.view(self.weight.size(0), -1),self.bias,backend='torch')#+self.bias.view(1,-1,1)#still slow
out_unf = oe.contract('bji,kj->bki',inp_unf,self.weight.view(self.weight.size(0), -1),backend='torch')+self.bias.view(1,-1,1)#still slow
#self.flt = self.weight.view(self.weight.size(0), -1).t()
#out_unf = (inp_unf.transpose(1, 2).matmul(self.flt) + self.bias).transpose(1, 2)
if stride==1 or mask[0,0]==-1:# in case of no mask and stride==1
out = out_unf.view(batch_size,out_channels,out_h,out_w) #Fold
if stoch==False: #this is done outside for more clarity
out = F.avg_pool2d(out,self.stride,ceil_mode=True)
else:#in case of mask
out = torch.zeros(batch_size, out_channels,out_h,out_w,device=device)
out[:,:,mask>0] = out_unf
return out
def forward_slowwithbatch(self, input, selh=-torch.ones(1,1), selw=-torch.ones(1,1), mask=-torch.ones(1,1),stoch=True,stride=-1):
device=input.device
if stride==-1:
stride = self.stride
#stoch=True
if stoch==False:
stride=1 #test with real average pooling
batch_size, in_channels, in_h, in_w = input.shape
out_channels, in_channels, kh, kw = self.weight.shape
afterconv_h = in_h+2*padding-(kh-1) #size after conv
afterconv_w = in_w+2*padding-(kw-1)
if self.ceil_mode:
out_h = math.ceil(afterconv_h/stride)
out_w = math.ceil(afterconv_w/stride)
else:
out_h = math.floor(afterconv_h/stride)
out_w = math.floor(afterconv_w/stride)
unfold = torch.nn.Unfold(kernel_size=(kh, kw), dilation=self.dilation, padding=self.padding, stride=1)
inp_unf = unfold(input)
if stride!=1:
inp_unf = inp_unf.view(batch_size,in_channels,kh*kw,afterconv_h,afterconv_w)
if selh[0,0]==-1:
resth = (out_h*stride)-afterconv_h
restw = (out_w*stride)-afterconv_w
selh = torch.randint(stride,(in_channels,out_h,out_w), device=device)
selw = torch.randint(stride,(in_channels,out_h,out_w), device=device)
# print(selh.shape)
if resth!=0:
# Cas : (stride-resth)=0 ?
selh[-1,:]=selh[-1,:]%(stride-resth);selh[:,-1]=selh[:,-1]%(stride-restw)
selw[-1,:]=selw[-1,:]%(stride-resth);selw[:,-1]=selw[:,-1]%(stride-restw)
rng_h = selh + torch.arange(0,out_h*stride,stride,device=device).view(1,-1,1)
rng_w = selw + torch.arange(0,out_w*stride,stride,device=device).view(1,1,-1)
selc = torch.arange(0,in_channels,device=input.device).view(in_channels,1,1).repeat(1,out_h,out_w)
if mask[0,0]==-1:
inp_unf = inp_unf.transpose(1,2)[:,:,selc,rng_h,rng_w].transpose(2,1).reshape(batch_size,in_channels*kh*kw,-1)
else:
inp_unf = inp_unf[:,:,rng_h[mask>0],rng_w[mask>0]]
#Matrix mul
if self.bias is None:
out_unf = inp_unf.transpose(1, 2).matmul(self.weight.view(self.weight.size(0), -1).t()).transpose(1, 2)
else:
out_unf = (inp_unf.transpose(1, 2).matmul(self.weight.view(self.weight.size(0), -1).t()) + self.bias).transpose(1, 2)
if stride==1 or mask[0,0]==-1:
out = out_unf.view(batch_size,out_channels,out_h,out_w) #Fold
# if stoch==False:
# out = F.avg_pool2d(out,self.stride,ceil_mode=True)
else:
out = torch.zeros(batch_size, out_channels,out_h,out_w,device=device)
out[:,:,mask>0] = out_unf
return out
def comp(self,h,w,mask=-torch.ones(1,1)):
out_h = (h-(self.kernel_size))/self.stride
out_w = (w-(self.kernel_size))/self.stride
if self.ceil_mode:
out_h = math.ceil(out_h)
out_w = math.ceil(out_w)
else:
out_h = math.floor(out_h)
out_w = math.florr(out_w)
if mask[0,0]==-1:
comp = self.weight.numel()*out_h*out_w
else:
comp = self.weight.numel()*(mask>0).sum()
return comp
def sample_slow(self,h,w,mask):
'''
h, w : forward input shape
mask : mask of output used in computation
'''
stride = self.stride
out_channels, in_channels, kh, kw = self.weight.shape
device=mask.device
#Shape after simple forward conv ?
afterconv_h = h+2*padding-(kh-1)
afterconv_w = w+2*padding-(kw-1)
# print(afterconv_h)
# print(afterconv_h/stride)
#Shape after forward ? (== mask.shape ?) #Padding, Dilatation pas pris en compte ?
if self.ceil_mode:
out_h = math.ceil(afterconv_h/stride)
out_w = math.ceil(afterconv_w/stride)
else:
out_h = math.floor(afterconv_h/stride)
out_w = math.floor(afterconv_w/stride)
#selh = torch.randint(stride,(out_h,out_w), device=device)
#selw = torch.randint(stride,(out_h,out_w), device=device)
resth = (out_h*stride)-afterconv_h #reste de ceil/floor, 0 ou 1
restw = (out_w*stride)-afterconv_w
# print('rest', resth, restw)
if resth!=0:
selh[-1,:]=selh[-1,:]%(stride-resth);selh[:,-1]=selh[:,-1]%(stride-restw)
selw[-1,:]=selw[-1,:]%(stride-resth);selw[:,-1]=selw[:,-1]%(stride-restw)
maskh = (out_h)*stride
maskw = (out_w)*stride
# print('mask', maskh, maskw)
rng_h = selh + torch.arange(0,out_h*stride,stride,device=device).view(-1,1)
rng_w = selw + torch.arange(0,out_w*stride,stride,device=device)
# rng_w = selw + torch.arange(0,out_w*self.stride,self.stride,device=device).view(-1,1)
nmask = torch.zeros((maskh,maskw),device=device)
nmask[rng_h,rng_w] = 1
#rmask = mask * nmask
dmask = self.pooldeconv(mask.float().view(1,1,mask.shape[0],mask.shape[1]))
rmask = nmask * dmask
#rmask = rmask[:,:,:out_h,:out_w]
# print('rmask', rmask.shape)
fmask = self.deconv(rmask)
# print('fmask', fmask.shape)
fmask = fmask[0,0]
return selh,selw,fmask.long()
def sample(self,in_h,in_w,batch_size,device,mask=-torch.ones(1,1)):
'''
h, w : forward input shape
mask : mask of output used in computation
'''
stride = self.stride
out_channels, in_channels, kh, kw = self.weight.shape
#device=mask.device
#Shape after simple forward conv ?
afterconv_h = in_h+2*self.padding-(kh-1) #size after conv
afterconv_w = in_w+2*self.padding-(kw-1)
#Shape after forward ? (== mask.shape ?) #Padding, Dilatation pas pris en compte ?
if self.ceil_mode:
out_h = math.ceil(afterconv_h/stride)
out_w = math.ceil(afterconv_w/stride)
else:
out_h = math.floor(afterconv_h/stride)
out_w = math.floor(afterconv_w/stride)
sel = torch.randint(stride*stride,(out_h,out_w), device=device)
if self.ceil_mode: #in case of ceil_mode need to select only the good locations for the last regions
resth = (out_h*stride)-afterconv_h
restw = (out_w*stride)-afterconv_w
if resth!=0:
print("stride",stride,"str-rest",stride-resth,stride-restw)
print('before',sel[-1],sel[:,-1])
sel[-1] = (sel[-1]//stride)%(stride-resth)*stride+(sel[-1]%stride)
sel[:,-1] = (sel[:,-1]%stride)%(stride-restw)+sel[:,-1]//stride*stride
print('after',sel[-1],sel[:,-1])
input()
rng = torch.arange(0,out_h*stride*out_w*stride,stride*stride,device=device).view(out_h,out_w)
index = sel+rng
#index = index.repeat(batch_size,in_channels*kh*kw,1,1)
#inp_unf = torch.gather(inp_unf.view(batch_size,in_channels*kh*kw,afterconv_h*afterconv_w),2,index.view(batch_size,in_channels*kh*kw,out_h*out_w)).view(batch_size,in_channels*kh*kw,out_h*out_w)
if mask[0,0]!=-1:
maskh = (out_h)*stride
maskw = (out_w)*stride
nmask = torch.zeros((maskh,maskw),device=device).view(-1)
#inp_unf = torch.gather(inp_unf.view(batch_size,in_channels*kh*kw,afterconv_h*afterconv_w),2,index.view(batch_size,in_channels*kh*kw,out_h*out_w)).view(batch_size,in_channels*kh*kw,out_h*out_w)
nmask[index] = 1
#rmask = mask * nmask
dmask = self.pooldeconv(mask.float().view(1,1,mask.shape[0],mask.shape[1]))
rmask = nmask.view(1,1,maskh,maskw) * dmask
#rmask = rmask[:,:,:out_h,:out_w]
# print('rmask', rmask.shape)
fmask = self.deconv(rmask)
# print('fmask', fmask.shape)
mask = fmask[0,0].long()
return index,mask#.long()
def get_mask(self,in_h,in_w,batch_size,device,mask=-torch.ones(1,1)):
maskh = (out_h)*stride
maskw = (out_w)*stride
nmask = torch.zeros((maskh,maskw),device=device).view(-1)
#inp_unf = torch.gather(inp_unf.view(batch_size,in_channels*kh*kw,afterconv_h*afterconv_w),2,index.view(batch_size,in_channels*kh*kw,out_h*out_w)).view(batch_size,in_channels*kh*kw,out_h*out_w)
nmask[index[0,0]] = 1
#rmask = mask * nmask
dmask = self.pooldeconv(mask.float().view(1,1,mask.shape[0],mask.shape[1]))
rmask = nmask.view(1,1,maskh,maskw) * dmask
#rmask = rmask[:,:,:out_h,:out_w]
# print('rmask', rmask.shape)
fmask = self.deconv(rmask)
# print('fmask', fmask.shape)
mask = fmask[0,0].long()
return mask
def get_size(self,in_h,in_w,stride=-1):
if stride==-1:
stride = self.stride
out_channels, in_channels, kh, kw = self.weight.shape
afterconv_h = in_h+2*self.padding-(kh-1) #size after conv
afterconv_w = in_w+2*self.padding-(kw-1)
if self.ceil_mode: #ceil_mode = talse default mode for strided conv
out_h = math.ceil(afterconv_h/stride)
out_w = math.ceil(afterconv_w/stride)
else: #ceil_mode = false default mode for pooling
out_h = math.floor(afterconv_h/stride)
out_w = math.floor(afterconv_w/stride)
#newh=math.floor(((h + 2*self.padding - self.dilation*(self.kernel_size-1) - 1)/self.stride) + 1)
#neww=math.floor(((w + 2*self.padding - self.dilation*(self.kernel_size-1) - 1)/self.stride) + 1)
return afterconv_h,afterconv_w,out_h,out_w
| 53.497768
| 214
| 0.6002
| 3,594
| 23,967
| 3.843628
| 0.056483
| 0.01969
| 0.036485
| 0.042566
| 0.88309
| 0.864775
| 0.857898
| 0.846098
| 0.828218
| 0.817576
| 0
| 0.023427
| 0.257312
| 23,967
| 447
| 215
| 53.61745
| 0.75264
| 0.261526
| 0
| 0.706452
| 0
| 0
| 0.005663
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03871
| false
| 0
| 0.016129
| 0
| 0.093548
| 0.009677
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
965101c6fcb837b899b4473ca822b13bd997a0cc
| 7,455
|
py
|
Python
|
test/test_api.py
|
prz-ziaja/streaming_project
|
42be7d8855bc78e1b1127d4ff67d94ce5738af5f
|
[
"BSD-3-Clause"
] | null | null | null |
test/test_api.py
|
prz-ziaja/streaming_project
|
42be7d8855bc78e1b1127d4ff67d94ce5738af5f
|
[
"BSD-3-Clause"
] | null | null | null |
test/test_api.py
|
prz-ziaja/streaming_project
|
42be7d8855bc78e1b1127d4ff67d94ce5738af5f
|
[
"BSD-3-Clause"
] | null | null | null |
import unittest
from selenium import webdriver
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.wait import WebDriverWait
class TestLogin(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Firefox()
self.driver.get("http://localhost")
def testCorrectLoginIncorrectPassword(self):
username_field = self.driver.find_element_by_id("username")
username_field.clear()
username_field.send_keys("test")
password_field = self.driver.find_element_by_id("password")
password_field.clear()
password_field.send_keys("wrongpass")
self.driver.find_element_by_xpath("//input[@type='submit' and @value='Login']").click()
msg_field = self.driver.find_elements_by_xpath("//div[@class='msg']")
msg_field_text = msg_field.pop().text
assert msg_field_text == 'Incorrect username/password!'
def testCorrectLoginEmptyPassword(self):
username_field = self.driver.find_element_by_id("username")
username_field.clear()
username_field.send_keys("test")
password_field = self.driver.find_element_by_id("password")
password_field.clear()
self.driver.find_element_by_xpath("//input[@type='submit' and @value='Login']").click()
assert password_field.get_attribute("validationMessage") == "Please fill out this field."
def testEmptyLoginCorrectPassword(self):
username_field = self.driver.find_element_by_id("username")
username_field.clear()
password_field = self.driver.find_element_by_id("password")
password_field.clear()
password_field.send_keys("test")
self.driver.find_element_by_xpath("//input[@type='submit' and @value='Login']").click()
assert username_field.get_attribute("validationMessage") == "Please fill out this field."
def testSucceedingLogin(self):
username_field = self.driver.find_element_by_id("username")
username_field.clear()
username_field.send_keys("test")
password_field = self.driver.find_element_by_id("password")
password_field.clear()
password_field.send_keys("test")
self.driver.find_element_by_xpath("//input[@type='submit' and @value='Login']").click()
assert WebDriverWait(self.driver, 5).until(EC.title_is("Home"))
def tearDown(self):
self.driver.close()
class TestRegister(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Firefox()
self.driver.get("http://127.0.0.1/pythonlogin/register")
def testEmptyUsernameCorrectPasswordCorrectEmail(self):
username_field = self.driver.find_element_by_id("username")
username_field.clear()
password_field = self.driver.find_element_by_id("password")
password_field.clear()
password_field.send_keys("test")
email_field = self.driver.find_element_by_id("email")
email_field.clear()
email_field.send_keys("test@test.com")
self.driver.find_element_by_xpath("//input[@type='submit' and @value='Register']").click()
assert username_field.get_attribute("validationMessage") == "Please fill out this field."
def testCorrectUsernameEmptyPasswordCorrectEmail(self):
username_field = self.driver.find_element_by_id("username")
username_field.clear()
username_field.send_keys("test")
password_field = self.driver.find_element_by_id("password")
password_field.clear()
email_field = self.driver.find_element_by_id("email")
email_field.clear()
email_field.send_keys("test@test.com")
self.driver.find_element_by_xpath("//input[@type='submit' and @value='Register']").click()
assert password_field.get_attribute("validationMessage") == "Please fill out this field."
def testCorrectUsernameCorrectPasswordEmptyEmail(self):
username_field = self.driver.find_element_by_id("username")
username_field.clear()
username_field.send_keys("test")
password_field = self.driver.find_element_by_id("password")
password_field.clear()
password_field.send_keys("pass")
email_field = self.driver.find_element_by_id("email")
email_field.clear()
self.driver.find_element_by_xpath("//input[@type='submit' and @value='Register']").click()
assert email_field.get_attribute("validationMessage") == "Please fill out this field."
def testCorrectUsernameCorrectPasswordIncorrectEmail(self):
"""
"test@test", "@test.com", "test@@test.com", "test.com@"
"""
emails_to_test = ["test@test", "@test.com", "test@@test.com", "test.com@"]
for email in emails_to_test:
assert self.checkMail(email)
def checkMail(self, email):
username_field = self.driver.find_element_by_id("username")
username_field.clear()
username_field.send_keys("testuser")
password_field = self.driver.find_element_by_id("password")
password_field.clear()
password_field.send_keys("testuser")
email_field = self.driver.find_element_by_id("email")
email_field.clear()
email_field.send_keys(email)
self.driver.find_element_by_xpath("//input[@type='submit' and @value='Register']").click()
msg_field = self.driver.find_elements_by_xpath("//div[@class='msg']")
msg_field_text = msg_field.pop().text
return msg_field_text == 'Invalid email address!'
def testUsedCredentials(self):
username_field = self.driver.find_element_by_id("username")
username_field.clear()
username_field.send_keys("test")
password_field = self.driver.find_element_by_id("password")
password_field.clear()
password_field.send_keys("test")
email_field = self.driver.find_element_by_id("email")
email_field.clear()
email_field.send_keys("test@test.com")
self.driver.find_element_by_xpath("//input[@type='submit' and @value='Register']").click()
msg_field = self.driver.find_elements_by_xpath("//div[@class='msg']")
msg_field_text = msg_field.pop().text
return msg_field_text == 'Account already exists!'
def testIncorrectUsernameCorrectPasswordCorrectEmail(self):
"""
"ad@m2020", "test2020!"
"""
usernames_to_test = ["ad@m2020", "test2020!", "test%", "test20&"]
for username in usernames_to_test:
assert self.checkUsername(username)
def checkUsername(self, username):
username_field = self.driver.find_element_by_id("username")
username_field.clear()
username_field.send_keys(username)
password_field = self.driver.find_element_by_id("password")
password_field.clear()
password_field.send_keys("pass")
email_field = self.driver.find_element_by_id("email")
email_field.clear()
email_field.send_keys("test@example.com")
self.driver.find_element_by_xpath("//input[@type='submit' and @value='Register']").click()
msg_field = self.driver.find_elements_by_xpath("//div[@class='msg']")
msg_field_text = msg_field.pop().text
return msg_field_text == 'Username must contain only characters and numbers!'
def tearDown(self):
self.driver.close()
if __name__ == '__main__':
unittest.main()
| 39.031414
| 98
| 0.682495
| 890
| 7,455
| 5.417978
| 0.122472
| 0.09747
| 0.116134
| 0.156781
| 0.781626
| 0.781626
| 0.769183
| 0.769183
| 0.769183
| 0.755496
| 0
| 0.004147
| 0.191415
| 7,455
| 190
| 99
| 39.236842
| 0.795786
| 0.010597
| 0
| 0.708029
| 0
| 0
| 0.178742
| 0.030018
| 0
| 0
| 0
| 0
| 0.065693
| 1
| 0.116788
| false
| 0.284672
| 0.029197
| 0
| 0.182482
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
96868bbd4806f28a3d427708fff5fba30465fe93
| 220
|
py
|
Python
|
wrappers/serial/calibration/iterators.py
|
ChrisHad/algorithm-reference-library
|
bded1b62ea801ea4f4f5bd0794c18cd81d4b2810
|
[
"Apache-2.0"
] | 22
|
2016-12-14T11:20:07.000Z
|
2021-08-13T15:23:41.000Z
|
wrappers/serial/calibration/iterators.py
|
ChrisHad/algorithm-reference-library
|
bded1b62ea801ea4f4f5bd0794c18cd81d4b2810
|
[
"Apache-2.0"
] | 30
|
2017-06-27T09:15:38.000Z
|
2020-09-11T18:16:37.000Z
|
wrappers/serial/calibration/iterators.py
|
ChrisHad/algorithm-reference-library
|
bded1b62ea801ea4f4f5bd0794c18cd81d4b2810
|
[
"Apache-2.0"
] | 20
|
2017-07-02T03:45:49.000Z
|
2019-12-11T17:19:01.000Z
|
""" GainTable iterators for iterating through a GainTable
"""
from processing_components.calibration.iterators import gaintable_null_iter
from processing_components.calibration.iterators import gaintable_timeslice_iter
| 36.666667
| 80
| 0.872727
| 25
| 220
| 7.44
| 0.56
| 0.150538
| 0.258065
| 0.376344
| 0.634409
| 0.634409
| 0.634409
| 0
| 0
| 0
| 0
| 0
| 0.081818
| 220
| 6
| 80
| 36.666667
| 0.920792
| 0.240909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
969bec12cd65302453bd2b75c4d15b0180781498
| 3,872
|
py
|
Python
|
tests/core/eth-module/conftest.py
|
bhardwajRahul/web3.py
|
efecadcdea64f9481fcace558a8ea103462e2923
|
[
"MIT"
] | null | null | null |
tests/core/eth-module/conftest.py
|
bhardwajRahul/web3.py
|
efecadcdea64f9481fcace558a8ea103462e2923
|
[
"MIT"
] | null | null | null |
tests/core/eth-module/conftest.py
|
bhardwajRahul/web3.py
|
efecadcdea64f9481fcace558a8ea103462e2923
|
[
"MIT"
] | null | null | null |
import json
import pytest
@pytest.fixture
def account_password():
return "this-is-not-a-secure-password"
@pytest.fixture
def extra_accounts(w3, account_password):
num_accounts_to_create = 10 - len(w3.eth.accounts)
for i in range(num_accounts_to_create):
w3.personal.new_account(account_password)
return w3.eth.accounts
CONTRACT_CODE = b"606060405261022e806100126000396000f360606040523615610074576000357c01000000000000000000000000000000000000000000000000000000009004806316216f391461007657806361bc221a146100995780637cf5dab0146100bc578063a5f3c23b146100e8578063d09de08a1461011d578063dcf537b11461014057610074565b005b610083600480505061016c565b6040518082815260200191505060405180910390f35b6100a6600480505061017f565b6040518082815260200191505060405180910390f35b6100d26004808035906020019091905050610188565b6040518082815260200191505060405180910390f35b61010760048080359060200190919080359060200190919050506101ea565b6040518082815260200191505060405180910390f35b61012a6004805050610201565b6040518082815260200191505060405180910390f35b6101566004808035906020019091905050610217565b6040518082815260200191505060405180910390f35b6000600d9050805080905061017c565b90565b60006000505481565b6000816000600082828250540192505081905550600060005054905080507f3496c3ede4ec3ab3686712aa1c238593ea6a42df83f98a5ec7df9834cfa577c5816040518082815260200191505060405180910390a18090506101e5565b919050565b6000818301905080508090506101fb565b92915050565b600061020d6001610188565b9050610214565b90565b60006007820290508050809050610229565b91905056" # noqa: E501
CONTRACT_RUNTIME = b"0x60606040523615610074576000357c01000000000000000000000000000000000000000000000000000000009004806316216f391461007657806361bc221a146100995780637cf5dab0146100bc578063a5f3c23b146100e8578063d09de08a1461011d578063dcf537b11461014057610074565b005b610083600480505061016c565b6040518082815260200191505060405180910390f35b6100a6600480505061017f565b6040518082815260200191505060405180910390f35b6100d26004808035906020019091905050610188565b6040518082815260200191505060405180910390f35b61010760048080359060200190919080359060200190919050506101ea565b6040518082815260200191505060405180910390f35b61012a6004805050610201565b6040518082815260200191505060405180910390f35b6101566004808035906020019091905050610217565b6040518082815260200191505060405180910390f35b6000600d9050805080905061017c565b90565b60006000505481565b6000816000600082828250540192505081905550600060005054905080507f3496c3ede4ec3ab3686712aa1c238593ea6a42df83f98a5ec7df9834cfa577c5816040518082815260200191505060405180910390a18090506101e5565b919050565b6000818301905080508090506101fb565b92915050565b600061020d6001610188565b9050610214565b90565b60006007820290508050809050610229565b91905056" # noqa: E501
CONTRACT_ABI = json.loads('[{"constant":false,"inputs":[],"name":"return13","outputs":[{"name":"result","type":"int256"}],"type":"function"},{"constant":true,"inputs":[],"name":"counter","outputs":[{"name":"","type":"uint256"}],"type":"function"},{"constant":false,"inputs":[{"name":"amt","type":"uint256"}],"name":"increment","outputs":[{"name":"result","type":"uint256"}],"type":"function"},{"constant":false,"inputs":[{"name":"a","type":"int256"},{"name":"b","type":"int256"}],"name":"add","outputs":[{"name":"result","type":"int256"}],"type":"function"},{"constant":false,"inputs":[],"name":"increment","outputs":[{"name":"","type":"uint256"}],"type":"function"},{"constant":false,"inputs":[{"name":"a","type":"int256"}],"name":"multiply7","outputs":[{"name":"result","type":"int256"}],"type":"function"},{"anonymous":false,"inputs":[{"indexed":false,"name":"value","type":"uint256"}],"name":"Increased","type":"event"}]') # noqa: E501
@pytest.fixture(scope="session")
def MATH_CODE():
return CONTRACT_CODE
@pytest.fixture(scope="session")
def MATH_RUNTIME():
return CONTRACT_RUNTIME
@pytest.fixture(scope="session")
def MATH_ABI():
return CONTRACT_ABI
| 92.190476
| 1,185
| 0.858213
| 198
| 3,872
| 16.681818
| 0.318182
| 0.019982
| 0.028762
| 0.034817
| 0.131396
| 0.131396
| 0.09779
| 0.085982
| 0.057523
| 0.057523
| 0
| 0.564812
| 0.0297
| 3,872
| 41
| 1,186
| 94.439024
| 0.314347
| 0.008264
| 0
| 0.217391
| 0
| 0.043478
| 0.838634
| 0.83316
| 0
| 1
| 0.291449
| 0
| 0
| 1
| 0.217391
| false
| 0.173913
| 0.086957
| 0.173913
| 0.521739
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 11
|
96aba4b12d29287abc97b16c300ae1a65d328c6e
| 16,348
|
py
|
Python
|
tests/test_etree.py
|
IamTheLime/xmlschema_hotfixes
|
6ebccce863c7c4d044ba207fa2fa65dcda523264
|
[
"MIT"
] | null | null | null |
tests/test_etree.py
|
IamTheLime/xmlschema_hotfixes
|
6ebccce863c7c4d044ba207fa2fa65dcda523264
|
[
"MIT"
] | null | null | null |
tests/test_etree.py
|
IamTheLime/xmlschema_hotfixes
|
6ebccce863c7c4d044ba207fa2fa65dcda523264
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
#
# Copyright (c), 2018-2020, SISSA (International School for Advanced Studies).
# All rights reserved.
# This file is distributed under the terms of the MIT License.
# See the file 'LICENSE' in the root directory of the present
# distribution, or http://opensource.org/licenses/MIT.
#
# @author Davide Brunato <brunato@sissa.it>
#
import unittest
import os
import platform
import lxml.etree
from xmlschema.etree import ElementTree, PyElementTree, ParseError, \
SafeXMLParser, etree_tostring, etree_getpath, etree_iter_location_hints, \
etree_iterpath, etree_elements_assert_equal, prune_etree
TEST_CASES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'test_cases/')
def casepath(relative_path):
return os.path.join(TEST_CASES_DIR, relative_path)
class TestElementTree(unittest.TestCase):
def test_element_string_serialization(self):
self.assertRaises(TypeError, etree_tostring, '<element/>')
elem = ElementTree.Element('element')
self.assertEqual(etree_tostring(elem), '<element />')
self.assertEqual(etree_tostring(elem, xml_declaration=True), '<element />')
self.assertEqual(etree_tostring(elem, encoding='us-ascii'), b'<element />')
self.assertEqual(etree_tostring(elem, encoding='us-ascii', indent=' '),
b' <element />')
self.assertEqual(etree_tostring(elem, encoding='us-ascii', xml_declaration=True),
b'<?xml version="1.0" encoding="us-ascii"?>\n<element />')
self.assertEqual(etree_tostring(elem, encoding='ascii'),
b"<?xml version='1.0' encoding='ascii'?>\n<element />")
self.assertEqual(etree_tostring(elem, encoding='ascii', xml_declaration=False),
b'<element />')
self.assertEqual(etree_tostring(elem, encoding='utf-8'), b'<element />')
self.assertEqual(etree_tostring(elem, encoding='utf-8', xml_declaration=True),
b'<?xml version="1.0" encoding="utf-8"?>\n<element />')
self.assertEqual(etree_tostring(elem, encoding='iso-8859-1'),
b"<?xml version='1.0' encoding='iso-8859-1'?>\n<element />")
self.assertEqual(etree_tostring(elem, encoding='iso-8859-1', xml_declaration=False),
b"<element />")
self.assertEqual(etree_tostring(elem, method='html'), '<element></element>')
self.assertEqual(etree_tostring(elem, method='text'), '')
root = ElementTree.XML('<root>\n'
' text1\n'
' <elem>text2</elem>\n'
'</root>')
self.assertEqual(etree_tostring(root, method='text'), '\n text1\n text2')
def test_py_element_string_serialization(self):
elem = PyElementTree.Element('element')
self.assertEqual(etree_tostring(elem), '<element />')
self.assertEqual(etree_tostring(elem, xml_declaration=True), '<element />')
self.assertEqual(etree_tostring(elem, encoding='us-ascii'), b'<element />')
self.assertEqual(etree_tostring(elem, encoding='us-ascii', xml_declaration=True),
b'<?xml version="1.0" encoding="us-ascii"?>\n<element />')
self.assertEqual(etree_tostring(elem, encoding='ascii'),
b"<?xml version='1.0' encoding='ascii'?>\n<element />")
self.assertEqual(etree_tostring(elem, encoding='ascii', xml_declaration=False),
b'<element />')
self.assertEqual(etree_tostring(elem, encoding='utf-8'), b'<element />')
self.assertEqual(etree_tostring(elem, encoding='utf-8', xml_declaration=True),
b'<?xml version="1.0" encoding="utf-8"?>\n<element />')
self.assertEqual(etree_tostring(elem, encoding='iso-8859-1'),
b"<?xml version='1.0' encoding='iso-8859-1'?>\n<element />")
self.assertEqual(etree_tostring(elem, encoding='iso-8859-1', xml_declaration=False),
b"<element />")
self.assertEqual(etree_tostring(elem, method='html'), '<element></element>')
self.assertEqual(etree_tostring(elem, method='text'), '')
root = PyElementTree.XML('<root>\n'
' text1\n'
' <elem>text2</elem>\n'
'</root>')
self.assertEqual(etree_tostring(root, method='text'), '\n text1\n text2')
def test_lxml_element_string_serialization(self):
elem = lxml.etree.Element('element')
self.assertEqual(etree_tostring(elem), '<element/>')
self.assertEqual(etree_tostring(elem, xml_declaration=True), '<element/>')
self.assertEqual(etree_tostring(elem, encoding='us-ascii'), b'<element/>')
self.assertEqual(etree_tostring(elem, encoding='us-ascii', xml_declaration=True),
b'<?xml version="1.0" encoding="us-ascii"?>\n<element/>')
self.assertEqual(etree_tostring(elem, encoding='ascii'), b'<element/>')
self.assertEqual(etree_tostring(elem, encoding='ascii', xml_declaration=True),
b'<?xml version="1.0" encoding="ascii"?>\n<element/>')
self.assertEqual(etree_tostring(elem, encoding='utf-8'), b'<element/>')
self.assertEqual(etree_tostring(elem, encoding='utf-8', xml_declaration=True),
b'<?xml version="1.0" encoding="utf-8"?>\n<element/>')
self.assertEqual(etree_tostring(elem, encoding='iso-8859-1'),
b"<?xml version='1.0' encoding='iso-8859-1'?>\n<element/>")
self.assertEqual(etree_tostring(elem, encoding='iso-8859-1', xml_declaration=False),
b"<element/>")
self.assertEqual(etree_tostring(elem, method='html'), '<element></element>')
self.assertEqual(etree_tostring(elem, method='text'), '')
root = lxml.etree.XML('<root>\n'
' text1\n'
' <elem>text2</elem>\n'
'</root>')
self.assertEqual(etree_tostring(root, method='text'), '\n text1\n text2')
def test_defuse_xml_entities(self):
xml_file = casepath('resources/with_entity.xml')
elem = ElementTree.parse(xml_file).getroot()
self.assertEqual(elem.text, 'abc')
parser = SafeXMLParser(target=PyElementTree.TreeBuilder())
with self.assertRaises(PyElementTree.ParseError) as ctx:
ElementTree.parse(xml_file, parser=parser)
self.assertEqual("Entities are forbidden (entity_name='e')", str(ctx.exception))
def test_defuse_xml_external_entities(self):
xml_file = casepath('resources/external_entity.xml')
with self.assertRaises(ParseError) as ctx:
ElementTree.parse(xml_file)
self.assertIn("undefined entity &ee", str(ctx.exception))
parser = SafeXMLParser(target=PyElementTree.TreeBuilder())
with self.assertRaises(PyElementTree.ParseError) as ctx:
ElementTree.parse(xml_file, parser=parser)
self.assertEqual("Entities are forbidden (entity_name='ee')", str(ctx.exception))
def test_defuse_xml_unused_external_entities(self):
xml_file = casepath('resources/unused_external_entity.xml')
elem = ElementTree.parse(xml_file).getroot()
self.assertEqual(elem.text, 'abc')
parser = SafeXMLParser(target=PyElementTree.TreeBuilder())
with self.assertRaises(PyElementTree.ParseError) as ctx:
ElementTree.parse(xml_file, parser=parser)
self.assertEqual("Entities are forbidden (entity_name='ee')", str(ctx.exception))
def test_defuse_xml_unparsed_entities(self):
xml_file = casepath('resources/unparsed_entity.xml')
parser = SafeXMLParser(target=PyElementTree.TreeBuilder())
with self.assertRaises(PyElementTree.ParseError) as ctx:
ElementTree.parse(xml_file, parser=parser)
self.assertEqual("Unparsed entities are forbidden (entity_name='logo_file')",
str(ctx.exception))
def test_defuse_xml_unused_unparsed_entities(self):
xml_file = casepath('resources/unused_unparsed_entity.xml')
elem = ElementTree.parse(xml_file).getroot()
self.assertIsNone(elem.text)
parser = SafeXMLParser(target=PyElementTree.TreeBuilder())
with self.assertRaises(PyElementTree.ParseError) as ctx:
ElementTree.parse(xml_file, parser=parser)
self.assertEqual("Unparsed entities are forbidden (entity_name='logo_file')",
str(ctx.exception))
def test_etree_iterpath(self):
root = ElementTree.XML('<a><b1><c1/><c2/></b1><b2/><b3><c3/></b3></a>')
items = list(etree_iterpath(root))
self.assertListEqual(items, [
(root, '.'), (root[0], './b1'), (root[0][0], './b1/c1'),
(root[0][1], './b1/c2'), (root[1], './b2'), (root[2], './b3'),
(root[2][0], './b3/c3')
])
self.assertListEqual(items, list(etree_iterpath(root, tag='*')))
self.assertListEqual(items, list(etree_iterpath(root, path='')))
self.assertListEqual(items, list(etree_iterpath(root, path=None)))
self.assertListEqual(list(etree_iterpath(root, path='/')), [
(root, '/'), (root[0], '/b1'), (root[0][0], '/b1/c1'),
(root[0][1], '/b1/c2'), (root[1], '/b2'), (root[2], '/b3'),
(root[2][0], '/b3/c3')
])
def test_etree_getpath(self):
root = ElementTree.XML('<a><b1><c1/><c2/></b1><b2/><b3><c3/></b3></a>')
self.assertEqual(etree_getpath(root, root), '.')
self.assertEqual(etree_getpath(root[0], root), './b1')
self.assertEqual(etree_getpath(root[2][0], root), './b3/c3')
self.assertEqual(etree_getpath(root[0], root, parent_path=True), '.')
self.assertEqual(etree_getpath(root[2][0], root, parent_path=True), './b3')
self.assertIsNone(etree_getpath(root, root[0]))
self.assertIsNone(etree_getpath(root[0], root[1]))
self.assertIsNone(etree_getpath(root, root, parent_path=True))
def test_etree_elements_assert_equal(self):
e1 = ElementTree.XML('<a><b1>text<c1 a="1"/></b1>\n<b2/><b3/></a>\n')
e2 = ElementTree.XML('<a><b1>text<c1 a="1"/></b1>\n<b2/><b3/></a>\n')
self.assertIsNone(etree_elements_assert_equal(e1, e1))
self.assertIsNone(etree_elements_assert_equal(e1, e2))
e2 = lxml.etree.XML('<a><b1>text<c1 a="1"/></b1>\n<b2/><b3/></a>\n')
self.assertIsNone(etree_elements_assert_equal(e1, e2))
e2 = ElementTree.XML('<a><b1>text<c1 a="1"/></b1>\n<b2/><b3/><b4/></a>\n')
with self.assertRaises(AssertionError) as ctx:
etree_elements_assert_equal(e1, e2)
self.assertIn("has lesser children than <Element 'a'", str(ctx.exception))
e2 = ElementTree.XML('<a><b1>text <c1 a="1"/></b1>\n<b2/><b3/></a>\n')
self.assertIsNone(etree_elements_assert_equal(e1, e2, strict=False))
with self.assertRaises(AssertionError) as ctx:
etree_elements_assert_equal(e1, e2)
self.assertIn("texts differ: 'text' != 'text '", str(ctx.exception))
e2 = ElementTree.XML('<a><b1>text<c1 a="1"/></b1>\n<b2>text</b2><b3/></a>\n')
with self.assertRaises(AssertionError) as ctx:
etree_elements_assert_equal(e1, e2, strict=False)
self.assertIn("texts differ: None != 'text'", str(ctx.exception))
e2 = ElementTree.XML('<a><b1>text<c1 a="1"/></b1>\n<b2/><b3/></a>')
self.assertIsNone(etree_elements_assert_equal(e1, e2))
e2 = ElementTree.XML('<a><b1>text<c1 a="1"/></b1><b2/><b3/></a>\n')
self.assertIsNone(etree_elements_assert_equal(e1, e2, strict=False))
with self.assertRaises(AssertionError) as ctx:
etree_elements_assert_equal(e1, e2)
self.assertIn(r"tails differ: '\n' != None", str(ctx.exception))
e2 = ElementTree.XML('<a><b1>text<c1 a="1 "/></b1>\n<b2/><b3/></a>\n')
self.assertIsNone(etree_elements_assert_equal(e1, e2, strict=False))
with self.assertRaises(AssertionError) as ctx:
etree_elements_assert_equal(e1, e2)
self.assertIn("attributes differ: {'a': '1'} != {'a': '1 '}", str(ctx.exception))
e2 = ElementTree.XML('<a><b1>text<c1 a="2 "/></b1>\n<b2/><b3/></a>\n')
with self.assertRaises(AssertionError) as ctx:
etree_elements_assert_equal(e1, e2, strict=False)
self.assertIn("attribute 'a' values differ: '1' != '2'", str(ctx.exception))
e2 = ElementTree.XML('<a><!--comment--><b1>text<c1 a="1"/></b1>\n<b2/><b3/></a>\n')
self.assertIsNone(etree_elements_assert_equal(e1, e2))
self.assertIsNone(etree_elements_assert_equal(e1, e2, skip_comments=False))
e2 = lxml.etree.XML('<a><!--comment--><b1>text<c1 a="1"/></b1>\n<b2/><b3/></a>\n')
self.assertIsNone(etree_elements_assert_equal(e1, e2))
e1 = ElementTree.XML('<a><b1>+1</b1></a>')
e2 = ElementTree.XML('<a><b1>+ 1 </b1></a>')
self.assertIsNone(etree_elements_assert_equal(e1, e2, strict=False))
e1 = ElementTree.XML('<a><b1>+1</b1></a>')
e2 = ElementTree.XML('<a><b1>+1.1 </b1></a>')
with self.assertRaises(AssertionError) as ctx:
etree_elements_assert_equal(e1, e2, strict=False)
self.assertIn("texts differ: '+1' != '+1.1 '", str(ctx.exception))
e1 = ElementTree.XML('<a><b1>1</b1></a>')
e2 = ElementTree.XML('<a><b1>true </b1></a>')
self.assertIsNone(etree_elements_assert_equal(e1, e2, strict=False))
self.assertIsNone(etree_elements_assert_equal(e2, e1, strict=False))
e2 = ElementTree.XML('<a><b1>false </b1></a>')
with self.assertRaises(AssertionError) as ctx:
etree_elements_assert_equal(e1, e2, strict=False)
self.assertIn("texts differ: '1' != 'false '", str(ctx.exception))
e1 = ElementTree.XML('<a><b1> 0</b1></a>')
self.assertIsNone(etree_elements_assert_equal(e1, e2, strict=False))
self.assertIsNone(etree_elements_assert_equal(e2, e1, strict=False))
e2 = ElementTree.XML('<a><b1>true </b1></a>')
with self.assertRaises(AssertionError) as ctx:
etree_elements_assert_equal(e1, e2, strict=False)
self.assertIn("texts differ: ' 0' != 'true '", str(ctx.exception))
e1 = ElementTree.XML('<a><b1>text<c1 a="1"/></b1>\n<b2/><b3/></a>\n')
e2 = ElementTree.XML('<a><b1>text<c1 a="1"/>tail</b1>\n<b2/><b3/></a>\n')
with self.assertRaises(AssertionError) as ctx:
etree_elements_assert_equal(e1, e2, strict=False)
self.assertIn("tails differ: None != 'tail'", str(ctx.exception))
def test_iter_location_hints(self):
elem = ElementTree.XML(
"""<root xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://example.com/xmlschema/ns-A import-case4a.xsd"/>"""
)
self.assertListEqual(
list(etree_iter_location_hints(elem)),
[('http://example.com/xmlschema/ns-A', 'import-case4a.xsd')]
)
elem = ElementTree.XML(
"""<foo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:noNamespaceSchemaLocation="schema.xsd"/>"""
)
self.assertListEqual(
list(etree_iter_location_hints(elem)), [('', 'schema.xsd')]
)
def test_prune_etree(self):
root = ElementTree.XML('<a><b1><c1/><c2/></b1><b2/><b3><c3/></b3></a>')
prune_etree(root, selector=lambda x: x.tag == 'b1')
self.assertListEqual([e.tag for e in root.iter()], ['a', 'b2', 'b3', 'c3'])
root = ElementTree.XML('<a><b1><c1/><c2/></b1><b2/><b3><c3/></b3></a>')
prune_etree(root, selector=lambda x: x.tag.startswith('c'))
self.assertListEqual([e.tag for e in root.iter()], ['a', 'b1', 'b2', 'b3'])
if __name__ == '__main__':
header_template = "ElementTree tests for xmlschema with Python {} on {}"
header = header_template.format(platform.python_version(), platform.platform())
print('{0}\n{1}\n{0}'.format("*" * len(header), header))
unittest.main()
| 48.510386
| 92
| 0.611267
| 2,018
| 16,348
| 4.822597
| 0.098117
| 0.080148
| 0.092478
| 0.115084
| 0.835286
| 0.808878
| 0.797781
| 0.756473
| 0.724723
| 0.696363
| 0
| 0.030654
| 0.213788
| 16,348
| 336
| 93
| 48.654762
| 0.726523
| 0.020431
| 0
| 0.568548
| 0
| 0.072581
| 0.212778
| 0.078449
| 0
| 0
| 0
| 0
| 0.483871
| 1
| 0.056452
| false
| 0
| 0.024194
| 0.004032
| 0.08871
| 0.004032
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
736e8a5afc0497b595c669ffd21a2850c72d56c7
| 21,192
|
py
|
Python
|
tests/terraform/checks/resource/azure/test_WAFSpecifiedModeAppGW.py
|
jamesholland-uk/checkov
|
d73fd4bd7096d48ab3434a92a177bcc55605460a
|
[
"Apache-2.0"
] | 4,013
|
2019-12-09T13:16:54.000Z
|
2022-03-31T14:31:01.000Z
|
tests/terraform/checks/resource/azure/test_WAFSpecifiedModeAppGW.py
|
jamesholland-uk/checkov
|
d73fd4bd7096d48ab3434a92a177bcc55605460a
|
[
"Apache-2.0"
] | 1,258
|
2019-12-17T09:55:51.000Z
|
2022-03-31T19:17:17.000Z
|
tests/terraform/checks/resource/azure/test_WAFSpecifiedModeAppGW.py
|
jamesholland-uk/checkov
|
d73fd4bd7096d48ab3434a92a177bcc55605460a
|
[
"Apache-2.0"
] | 638
|
2019-12-19T08:57:38.000Z
|
2022-03-30T21:38:37.000Z
|
import unittest
import hcl2
from checkov.common.models.enums import CheckResult
from checkov.terraform.checks.resource.azure.AppGWUseWAFMode import check
class TestAppGWUseWAFMode(unittest.TestCase):
def test_failure1(self):
hcl_res = hcl2.loads("""
resource "azurerm_web_application_firewall_policy" "example" {
name = "example-wafpolicy"
resource_group_name = azurerm_resource_group.example.name
location = azurerm_resource_group.example.location
custom_rules {
name = "Rule1"
priority = 1
rule_type = "MatchRule"
match_conditions {
match_variables {
variable_name = "RemoteAddr"
}
operator = "IPMatch"
negation_condition = false
match_values = ["192.168.1.0/24", "10.0.0.0/24"]
}
action = "Block"
}
custom_rules {
name = "Rule2"
priority = 2
rule_type = "MatchRule"
match_conditions {
match_variables {
variable_name = "RemoteAddr"
}
operator = "IPMatch"
negation_condition = false
match_values = ["192.168.1.0/24"]
}
match_conditions {
match_variables {
variable_name = "RequestHeaders"
selector = "UserAgent"
}
operator = "Contains"
negation_condition = false
match_values = ["Windows"]
}
action = "Block"
}
policy_settings {
enabled = false
request_body_check = true
file_upload_limit_in_mb = 100
max_request_body_size_in_kb = 128
}
managed_rules {
exclusion {
match_variable = "RequestHeaderNames"
selector = "x-company-secret-header"
selector_match_operator = "Equals"
}
exclusion {
match_variable = "RequestCookieNames"
selector = "too-tasty"
selector_match_operator = "EndsWith"
}
managed_rule_set {
type = "OWASP"
version = "3.1"
rule_group_override {
rule_group_name = "REQUEST-920-PROTOCOL-ENFORCEMENT"
disabled_rules = [
"920300",
"920440"
]
}
}
}
}
""")
resource_conf = hcl_res['resource'][0]['azurerm_web_application_firewall_policy']['example']
scan_result = check.scan_resource_conf(conf=resource_conf)
self.assertEqual(CheckResult.FAILED, scan_result)
def test_failure2(self):
hcl_res = hcl2.loads("""
resource "azurerm_web_application_firewall_policy" "example" {
name = "example-wafpolicy"
resource_group_name = azurerm_resource_group.example.name
location = azurerm_resource_group.example.location
custom_rules {
name = "Rule1"
priority = 1
rule_type = "MatchRule"
match_conditions {
match_variables {
variable_name = "RemoteAddr"
}
operator = "IPMatch"
negation_condition = false
match_values = ["192.168.1.0/24", "10.0.0.0/24"]
}
action = "Block"
}
custom_rules {
name = "Rule2"
priority = 2
rule_type = "MatchRule"
match_conditions {
match_variables {
variable_name = "RemoteAddr"
}
operator = "IPMatch"
negation_condition = false
match_values = ["192.168.1.0/24"]
}
match_conditions {
match_variables {
variable_name = "RequestHeaders"
selector = "UserAgent"
}
operator = "Contains"
negation_condition = false
match_values = ["Windows"]
}
action = "Block"
}
policy_settings {
enabled = false
mode = "Prevention"
request_body_check = true
file_upload_limit_in_mb = 100
max_request_body_size_in_kb = 128
}
managed_rules {
exclusion {
match_variable = "RequestHeaderNames"
selector = "x-company-secret-header"
selector_match_operator = "Equals"
}
exclusion {
match_variable = "RequestCookieNames"
selector = "too-tasty"
selector_match_operator = "EndsWith"
}
managed_rule_set {
type = "OWASP"
version = "3.1"
rule_group_override {
rule_group_name = "REQUEST-920-PROTOCOL-ENFORCEMENT"
disabled_rules = [
"920300",
"920440"
]
}
}
}
}
""")
resource_conf = hcl_res['resource'][0]['azurerm_web_application_firewall_policy']['example']
scan_result = check.scan_resource_conf(conf=resource_conf)
self.assertEqual(CheckResult.FAILED, scan_result)
def test_success1(self):
hcl_res = hcl2.loads("""
resource "azurerm_web_application_firewall_policy" "example" {
name = "example-wafpolicy"
resource_group_name = azurerm_resource_group.example.name
location = azurerm_resource_group.example.location
custom_rules {
name = "Rule1"
priority = 1
rule_type = "MatchRule"
match_conditions {
match_variables {
variable_name = "RemoteAddr"
}
operator = "IPMatch"
negation_condition = false
match_values = ["192.168.1.0/24", "10.0.0.0/24"]
}
action = "Block"
}
custom_rules {
name = "Rule2"
priority = 2
rule_type = "MatchRule"
match_conditions {
match_variables {
variable_name = "RemoteAddr"
}
operator = "IPMatch"
negation_condition = false
match_values = ["192.168.1.0/24"]
}
match_conditions {
match_variables {
variable_name = "RequestHeaders"
selector = "UserAgent"
}
operator = "Contains"
negation_condition = false
match_values = ["Windows"]
}
action = "Block"
}
policy_settings {
enabled = true
mode = "Prevention"
request_body_check = true
file_upload_limit_in_mb = 100
max_request_body_size_in_kb = 128
}
managed_rules {
exclusion {
match_variable = "RequestHeaderNames"
selector = "x-company-secret-header"
selector_match_operator = "Equals"
}
exclusion {
match_variable = "RequestCookieNames"
selector = "too-tasty"
selector_match_operator = "EndsWith"
}
managed_rule_set {
type = "OWASP"
version = "3.1"
rule_group_override {
rule_group_name = "REQUEST-920-PROTOCOL-ENFORCEMENT"
disabled_rules = [
"920300",
"920440"
]
}
}
}
}
""")
resource_conf = hcl_res['resource'][0]['azurerm_web_application_firewall_policy']['example']
scan_result = check.scan_resource_conf(conf=resource_conf)
self.assertEqual(CheckResult.PASSED, scan_result)
def test_success2(self):
hcl_res = hcl2.loads("""
resource "azurerm_web_application_firewall_policy" "example" {
name = "example-wafpolicy"
resource_group_name = azurerm_resource_group.example.name
location = azurerm_resource_group.example.location
custom_rules {
name = "Rule1"
priority = 1
rule_type = "MatchRule"
match_conditions {
match_variables {
variable_name = "RemoteAddr"
}
operator = "IPMatch"
negation_condition = false
match_values = ["192.168.1.0/24", "10.0.0.0/24"]
}
action = "Block"
}
custom_rules {
name = "Rule2"
priority = 2
rule_type = "MatchRule"
match_conditions {
match_variables {
variable_name = "RemoteAddr"
}
operator = "IPMatch"
negation_condition = false
match_values = ["192.168.1.0/24"]
}
match_conditions {
match_variables {
variable_name = "RequestHeaders"
selector = "UserAgent"
}
operator = "Contains"
negation_condition = false
match_values = ["Windows"]
}
action = "Block"
}
policy_settings {
mode = "Prevention"
request_body_check = true
file_upload_limit_in_mb = 100
max_request_body_size_in_kb = 128
}
managed_rules {
exclusion {
match_variable = "RequestHeaderNames"
selector = "x-company-secret-header"
selector_match_operator = "Equals"
}
exclusion {
match_variable = "RequestCookieNames"
selector = "too-tasty"
selector_match_operator = "EndsWith"
}
managed_rule_set {
type = "OWASP"
version = "3.1"
rule_group_override {
rule_group_name = "REQUEST-920-PROTOCOL-ENFORCEMENT"
disabled_rules = [
"920300",
"920440"
]
}
}
}
}
""")
resource_conf = hcl_res['resource'][0]['azurerm_web_application_firewall_policy']['example']
scan_result = check.scan_resource_conf(conf=resource_conf)
self.assertEqual(CheckResult.PASSED, scan_result)
def test_success3(self):
hcl_res = hcl2.loads("""
resource "azurerm_web_application_firewall_policy" "example" {
name = "example-wafpolicy"
resource_group_name = azurerm_resource_group.example.name
location = azurerm_resource_group.example.location
custom_rules {
name = "Rule1"
priority = 1
rule_type = "MatchRule"
match_conditions {
match_variables {
variable_name = "RemoteAddr"
}
operator = "IPMatch"
negation_condition = false
match_values = ["192.168.1.0/24", "10.0.0.0/24"]
}
action = "Block"
}
custom_rules {
name = "Rule2"
priority = 2
rule_type = "MatchRule"
match_conditions {
match_variables {
variable_name = "RemoteAddr"
}
operator = "IPMatch"
negation_condition = false
match_values = ["192.168.1.0/24"]
}
match_conditions {
match_variables {
variable_name = "RequestHeaders"
selector = "UserAgent"
}
operator = "Contains"
negation_condition = false
match_values = ["Windows"]
}
action = "Block"
}
policy_settings {
enabled = true
request_body_check = true
file_upload_limit_in_mb = 100
max_request_body_size_in_kb = 128
}
managed_rules {
exclusion {
match_variable = "RequestHeaderNames"
selector = "x-company-secret-header"
selector_match_operator = "Equals"
}
exclusion {
match_variable = "RequestCookieNames"
selector = "too-tasty"
selector_match_operator = "EndsWith"
}
managed_rule_set {
type = "OWASP"
version = "3.1"
rule_group_override {
rule_group_name = "REQUEST-920-PROTOCOL-ENFORCEMENT"
disabled_rules = [
"920300",
"920440"
]
}
}
}
}
""")
resource_conf = hcl_res['resource'][0]['azurerm_web_application_firewall_policy']['example']
scan_result = check.scan_resource_conf(conf=resource_conf)
self.assertEqual(CheckResult.PASSED, scan_result)
def test_success4(self):
hcl_res = hcl2.loads("""
resource "azurerm_web_application_firewall_policy" "example" {
name = "example-wafpolicy"
resource_group_name = azurerm_resource_group.example.name
location = azurerm_resource_group.example.location
custom_rules {
name = "Rule1"
priority = 1
rule_type = "MatchRule"
match_conditions {
match_variables {
variable_name = "RemoteAddr"
}
operator = "IPMatch"
negation_condition = false
match_values = ["192.168.1.0/24", "10.0.0.0/24"]
}
action = "Block"
}
custom_rules {
name = "Rule2"
priority = 2
rule_type = "MatchRule"
match_conditions {
match_variables {
variable_name = "RemoteAddr"
}
operator = "IPMatch"
negation_condition = false
match_values = ["192.168.1.0/24"]
}
match_conditions {
match_variables {
variable_name = "RequestHeaders"
selector = "UserAgent"
}
operator = "Contains"
negation_condition = false
match_values = ["Windows"]
}
action = "Block"
}
managed_rules {
exclusion {
match_variable = "RequestHeaderNames"
selector = "x-company-secret-header"
selector_match_operator = "Equals"
}
exclusion {
match_variable = "RequestCookieNames"
selector = "too-tasty"
selector_match_operator = "EndsWith"
}
managed_rule_set {
type = "OWASP"
version = "3.1"
rule_group_override {
rule_group_name = "REQUEST-920-PROTOCOL-ENFORCEMENT"
disabled_rules = [
"920300",
"920440"
]
}
}
}
}
""")
resource_conf = hcl_res['resource'][0]['azurerm_web_application_firewall_policy']['example']
scan_result = check.scan_resource_conf(conf=resource_conf)
self.assertEqual(CheckResult.PASSED, scan_result)
if __name__ == '__main__':
unittest.main()
| 38.183784
| 100
| 0.370942
| 1,272
| 21,192
| 5.862421
| 0.095126
| 0.03138
| 0.048277
| 0.070001
| 0.96634
| 0.96634
| 0.96634
| 0.96634
| 0.96634
| 0.96634
| 0
| 0.037029
| 0.570545
| 21,192
| 554
| 101
| 38.252708
| 0.782332
| 0
| 0
| 0.767742
| 0
| 0.012903
| 0.920772
| 0.085126
| 0
| 0
| 0
| 0
| 0.012903
| 1
| 0.012903
| false
| 0.008602
| 0.008602
| 0
| 0.023656
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
737687cea3b916c5a6f47e7ffa32a149f8e1856d
| 2,897
|
py
|
Python
|
generate_emails.py
|
AkselTroan/Email-guesser-validator
|
ab702c61b93ef18beeaa157141cd7a6eba18c3ab
|
[
"MIT"
] | 1
|
2022-02-02T07:42:24.000Z
|
2022-02-02T07:42:24.000Z
|
generate_emails.py
|
AkselTroan/Email-guesser-validator
|
ab702c61b93ef18beeaa157141cd7a6eba18c3ab
|
[
"MIT"
] | null | null | null |
generate_emails.py
|
AkselTroan/Email-guesser-validator
|
ab702c61b93ef18beeaa157141cd7a6eba18c3ab
|
[
"MIT"
] | null | null | null |
def generate_emails(firstname, lastname, domain, year):
emails = []
first = firstname.lower()
last = lastname.lower()
f = first[0]
l = last[0]
fi = first[:1]
la = last[:1]
fir = first[:2]
las = last[:2]
er = year[2:]
emails.append(f'{first}@{domain}')
emails.append(f'{first}{last}@{domain}')
emails.append(f'{first}.{last}@{domain}')
emails.append(f'{first}-{last}@{domain}')
emails.append(f'{first}_{last}@{domain}')
emails.append(f'{last}@{domain}')
emails.append(f'{f}{last}@{domain}')
emails.append(f'{f}.{last}@{domain}')
emails.append(f'{f}-{last}@{domain}')
emails.append(f'{f}_{last}@{domain}')
emails.append(f'{first}{l}@{domain}')
emails.append(f'{first}.{l}@{domain}')
emails.append(f'{first}-{l}@{domain}')
emails.append(f'{first}_{l}@{domain}')
emails.append(f'{last}{f}@{domain}')
emails.append(f'{last}.{f}@{domain}')
emails.append(f'{last}-{f}@{domain}')
emails.append(f'{last}_{f}@{domain}')
emails.append(f'{fir}{las}@{domain}')
emails.append(f'{fir}.{las}@{domain}')
emails.append(f'{fir}-{las}@{domain}')
emails.append(f'{fir}_{las}@{domain}')
emails.append(f'{fi}{la}@{domain}')
emails.append(f'{fi}.{la}@{domain}')
emails.append(f'{fi}-{la}@{domain}')
emails.append(f'{fi}_{la}@{domain}')
## Adding year
emails.append(f'{first}{er}@{domain}')
emails.append(f'{first}.{er}@{domain}')
emails.append(f'{first}-{er}@{domain}')
emails.append(f'{first}_{er}@{domain}')
emails.append(f'{first}{last}{er}@{domain}')
emails.append(f'{first}.{last}.{er}@{domain}')
emails.append(f'{first}-{last}-{er}@{domain}')
emails.append(f'{first}_{last}_{er}@{domain}')
emails.append(f'{last}{er}@{domain}')
emails.append(f'{last}.{er}@{domain}')
emails.append(f'{last}-{er}@{domain}')
emails.append(f'{last}_{er}@{domain}')
emails.append(f'{f}{last}{er}@{domain}')
emails.append(f'{f}.{last}.{er}@{domain}')
emails.append(f'{f}-{last}-{er}@{domain}')
emails.append(f'{f}_{last}_{er}@{domain}')
emails.append(f'{first}{l}{er}@{domain}')
emails.append(f'{first}.{l}.{er}@{domain}')
emails.append(f'{first}-{l}-{er}@{domain}')
emails.append(f'{first}_{l}_{er}@{domain}')
emails.append(f'{last}{f}{er}@{domain}')
emails.append(f'{last}.{f}.{er}@{domain}')
emails.append(f'{last}-{f}-{er}@{domain}')
emails.append(f'{last}_{f}_{er}@{domain}')
emails.append(f'{fir}{las}{er}@{domain}')
emails.append(f'{fir}.{las}.{er}@{domain}')
emails.append(f'{fir}-{las}-{er}@{domain}')
emails.append(f'{fir}_{las}_{er}@{domain}')
emails.append(f'{fi}{la}{er}@{domain}')
emails.append(f'{fi}.{la}.{er}@{domain}')
emails.append(f'{fi}-{la}-{er}@{domain}')
emails.append(f'{fi}_{la}_{er}@{domain}')
return emails
| 32.550562
| 55
| 0.577149
| 409
| 2,897
| 4.036675
| 0.056235
| 0.421563
| 0.456693
| 0.644458
| 0.892187
| 0.892187
| 0.892187
| 0.892187
| 0.892187
| 0.892187
| 0
| 0.002803
| 0.138074
| 2,897
| 88
| 56
| 32.920455
| 0.65839
| 0.003797
| 0
| 0
| 1
| 0
| 0.432686
| 0.256072
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014286
| false
| 0
| 0
| 0
| 0.028571
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
73801c0f662d6e52744b1f1d8b249f5ea11d25e1
| 20,501
|
py
|
Python
|
tests/compute/test_conversions.py
|
pfackeldey/vector
|
87e9e942f2a9ae09a3e250e12f37505eb22e25ea
|
[
"BSD-3-Clause"
] | 40
|
2020-03-26T13:28:36.000Z
|
2022-03-23T22:14:35.000Z
|
tests/compute/test_conversions.py
|
pfackeldey/vector
|
87e9e942f2a9ae09a3e250e12f37505eb22e25ea
|
[
"BSD-3-Clause"
] | 66
|
2020-02-14T13:32:18.000Z
|
2022-03-29T14:35:31.000Z
|
tests/compute/test_conversions.py
|
pfackeldey/vector
|
87e9e942f2a9ae09a3e250e12f37505eb22e25ea
|
[
"BSD-3-Clause"
] | 14
|
2020-01-29T22:03:33.000Z
|
2022-03-16T02:46:25.000Z
|
# Copyright (c) 2019-2021, Jonas Eschle, Jim Pivarski, Eduardo Rodrigues, and Henry Schreiner.
#
# Distributed under the 3-clause BSD license, see accompanying file LICENSE
# or https://github.com/scikit-hep/vector for details.
import pytest
import vector._backends.numpy_
import vector._backends.object_
def test_VectorObject2D():
v = vector.obj(x=1, y=2)
tv = v.to_Vector2D()
assert isinstance(tv, vector._backends.object_.VectorObject2D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
tv = v.to_Vector3D()
assert isinstance(tv, vector._backends.object_.VectorObject3D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
assert tv.z == pytest.approx(0)
tv = v.to_Vector4D()
assert isinstance(tv, vector._backends.object_.VectorObject4D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
assert tv.z == pytest.approx(0)
assert tv.t == pytest.approx(0)
for azimuthal in "xy", "rhophi":
tv = getattr(v, "to_" + azimuthal)()
assert isinstance(tv, vector._backends.object_.VectorObject2D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
for longitudinal in "z", "theta", "eta":
tv = getattr(v, "to_" + azimuthal + longitudinal)()
assert isinstance(tv, vector._backends.object_.VectorObject3D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
assert getattr(tv, longitudinal) == pytest.approx(0)
for temporal in "t", "tau":
tv = getattr(v, "to_" + azimuthal + longitudinal + temporal)()
assert isinstance(tv, vector._backends.object_.VectorObject4D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
assert getattr(tv, longitudinal) == pytest.approx(0)
assert getattr(tv, temporal) == pytest.approx(0)
def test_MomentumObject2D():
v = vector.obj(px=1, py=2)
tv = v.to_Vector2D()
assert isinstance(tv, vector._backends.object_.MomentumObject2D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
tv = v.to_Vector3D()
assert isinstance(tv, vector._backends.object_.MomentumObject3D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
assert tv.z == pytest.approx(0)
tv = v.to_Vector4D()
assert isinstance(tv, vector._backends.object_.MomentumObject4D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
assert tv.z == pytest.approx(0)
assert tv.t == pytest.approx(0)
for azimuthal in "xy", "rhophi":
tv = getattr(v, "to_" + azimuthal)()
assert isinstance(tv, vector._backends.object_.MomentumObject2D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
for longitudinal in "z", "theta", "eta":
tv = getattr(v, "to_" + azimuthal + longitudinal)()
assert isinstance(tv, vector._backends.object_.MomentumObject3D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
assert getattr(tv, longitudinal) == pytest.approx(0)
for temporal in "t", "tau":
tv = getattr(v, "to_" + azimuthal + longitudinal + temporal)()
assert isinstance(tv, vector._backends.object_.MomentumObject4D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
assert getattr(tv, longitudinal) == pytest.approx(0)
assert getattr(tv, temporal) == pytest.approx(0)
def test_VectorNumpy2D():
v = vector.array({"x": [1, 1, 1], "y": [2, 2, 2]})
tv = v.to_Vector2D()
assert isinstance(tv, vector._backends.numpy_.VectorNumpy2D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
tv = v.to_Vector3D()
assert isinstance(tv, vector._backends.numpy_.VectorNumpy3D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
assert tv.z[0] == pytest.approx(0)
tv = v.to_Vector4D()
assert isinstance(tv, vector._backends.numpy_.VectorNumpy4D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
assert tv.z[0] == pytest.approx(0)
assert tv.t[0] == pytest.approx(0)
for azimuthal in "xy", "rhophi":
tv = getattr(v, "to_" + azimuthal)()
assert isinstance(tv, vector._backends.numpy_.VectorNumpy2D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
for longitudinal in "z", "theta", "eta":
tv = getattr(v, "to_" + azimuthal + longitudinal)()
assert isinstance(tv, vector._backends.numpy_.VectorNumpy3D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
assert getattr(tv, longitudinal)[0] == pytest.approx(0)
for temporal in "t", "tau":
tv = getattr(v, "to_" + azimuthal + longitudinal + temporal)()
assert isinstance(tv, vector._backends.numpy_.VectorNumpy4D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
assert getattr(tv, longitudinal)[0] == pytest.approx(0)
assert getattr(tv, temporal)[0] == pytest.approx(0)
def test_MomentumNumpy2D():
v = vector.array({"px": [1, 1, 1], "py": [2, 2, 2]})
tv = v.to_Vector2D()
assert isinstance(tv, vector._backends.numpy_.MomentumNumpy2D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
tv = v.to_Vector3D()
assert isinstance(tv, vector._backends.numpy_.MomentumNumpy3D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
assert tv.z[0] == pytest.approx(0)
tv = v.to_Vector4D()
assert isinstance(tv, vector._backends.numpy_.MomentumNumpy4D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
assert tv.z[0] == pytest.approx(0)
assert tv.t[0] == pytest.approx(0)
for azimuthal in "xy", "rhophi":
tv = getattr(v, "to_" + azimuthal)()
assert isinstance(tv, vector._backends.numpy_.MomentumNumpy2D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
for longitudinal in "z", "theta", "eta":
tv = getattr(v, "to_" + azimuthal + longitudinal)()
assert isinstance(tv, vector._backends.numpy_.MomentumNumpy3D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
assert getattr(tv, longitudinal)[0] == pytest.approx(0)
for temporal in "t", "tau":
tv = getattr(v, "to_" + azimuthal + longitudinal + temporal)()
assert isinstance(tv, vector._backends.numpy_.MomentumNumpy4D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
assert getattr(tv, longitudinal)[0] == pytest.approx(0)
assert getattr(tv, temporal)[0] == pytest.approx(0)
def test_VectorObject3D():
v = vector.obj(x=1, y=2, z=3)
tv = v.to_Vector2D()
assert isinstance(tv, vector._backends.object_.VectorObject2D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
tv = v.to_Vector3D()
assert isinstance(tv, vector._backends.object_.VectorObject3D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
assert tv.z == pytest.approx(3)
tv = v.to_Vector4D()
assert isinstance(tv, vector._backends.object_.VectorObject4D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
assert tv.z == pytest.approx(3)
assert tv.t == pytest.approx(0)
for azimuthal in "xy", "rhophi":
tv = getattr(v, "to_" + azimuthal)()
assert isinstance(tv, vector._backends.object_.VectorObject2D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
for longitudinal in "z", "theta", "eta":
tv = getattr(v, "to_" + azimuthal + longitudinal)()
assert isinstance(tv, vector._backends.object_.VectorObject3D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
assert tv.z == pytest.approx(3)
for temporal in "t", "tau":
tv = getattr(v, "to_" + azimuthal + longitudinal + temporal)()
assert isinstance(tv, vector._backends.object_.VectorObject4D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
assert tv.z == pytest.approx(3)
assert getattr(tv, temporal) == pytest.approx(0)
def test_MomentumObject3D():
v = vector.obj(px=1, py=2, pz=3)
tv = v.to_Vector2D()
assert isinstance(tv, vector._backends.object_.MomentumObject2D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
tv = v.to_Vector3D()
assert isinstance(tv, vector._backends.object_.MomentumObject3D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
assert tv.z == pytest.approx(3)
tv = v.to_Vector4D()
assert isinstance(tv, vector._backends.object_.MomentumObject4D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
assert tv.z == pytest.approx(3)
assert tv.t == pytest.approx(0)
for azimuthal in "xy", "rhophi":
tv = getattr(v, "to_" + azimuthal)()
assert isinstance(tv, vector._backends.object_.MomentumObject2D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
for longitudinal in "z", "theta", "eta":
tv = getattr(v, "to_" + azimuthal + longitudinal)()
assert isinstance(tv, vector._backends.object_.MomentumObject3D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
assert tv.z == pytest.approx(3)
for temporal in "t", "tau":
tv = getattr(v, "to_" + azimuthal + longitudinal + temporal)()
assert isinstance(tv, vector._backends.object_.MomentumObject4D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
assert tv.z == pytest.approx(3)
assert getattr(tv, temporal) == pytest.approx(0)
def test_VectorNumpy3D():
v = vector.array({"x": [1, 1, 1], "y": [2, 2, 2], "z": [3, 3, 3]})
tv = v.to_Vector2D()
assert isinstance(tv, vector._backends.numpy_.VectorNumpy2D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
tv = v.to_Vector3D()
assert isinstance(tv, vector._backends.numpy_.VectorNumpy3D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
assert tv.z[0] == pytest.approx(3)
tv = v.to_Vector4D()
assert isinstance(tv, vector._backends.numpy_.VectorNumpy4D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
assert tv.z[0] == pytest.approx(3)
assert tv.t[0] == pytest.approx(0)
for azimuthal in "xy", "rhophi":
tv = getattr(v, "to_" + azimuthal)()
assert isinstance(tv, vector._backends.numpy_.VectorNumpy2D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
for longitudinal in "z", "theta", "eta":
tv = getattr(v, "to_" + azimuthal + longitudinal)()
assert isinstance(tv, vector._backends.numpy_.VectorNumpy3D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
assert tv.z[0] == pytest.approx(3)
for temporal in "t", "tau":
tv = getattr(v, "to_" + azimuthal + longitudinal + temporal)()
assert isinstance(tv, vector._backends.numpy_.VectorNumpy4D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
assert tv.z[0] == pytest.approx(3)
assert getattr(tv, temporal)[0] == pytest.approx(0)
def test_MomentumNumpy3D():
v = vector.array({"px": [1, 1, 1], "py": [2, 2, 2], "pz": [3, 3, 3]})
tv = v.to_Vector2D()
assert isinstance(tv, vector._backends.numpy_.MomentumNumpy2D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
tv = v.to_Vector3D()
assert isinstance(tv, vector._backends.numpy_.MomentumNumpy3D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
assert tv.z[0] == pytest.approx(3)
tv = v.to_Vector4D()
assert isinstance(tv, vector._backends.numpy_.MomentumNumpy4D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
assert tv.z[0] == pytest.approx(3)
assert tv.t[0] == pytest.approx(0)
for azimuthal in "xy", "rhophi":
tv = getattr(v, "to_" + azimuthal)()
assert isinstance(tv, vector._backends.numpy_.MomentumNumpy2D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
for longitudinal in "z", "theta", "eta":
tv = getattr(v, "to_" + azimuthal + longitudinal)()
assert isinstance(tv, vector._backends.numpy_.MomentumNumpy3D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
assert tv.z[0] == pytest.approx(3)
for temporal in "t", "tau":
tv = getattr(v, "to_" + azimuthal + longitudinal + temporal)()
assert isinstance(tv, vector._backends.numpy_.MomentumNumpy4D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
assert tv.z[0] == pytest.approx(3)
assert getattr(tv, temporal)[0] == pytest.approx(0)
def test_VectorObject4D():
v = vector.obj(x=1, y=2, z=3, t=4)
tv = v.to_Vector2D()
assert isinstance(tv, vector._backends.object_.VectorObject2D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
tv = v.to_Vector3D()
assert isinstance(tv, vector._backends.object_.VectorObject3D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
assert tv.z == pytest.approx(3)
tv = v.to_Vector4D()
assert isinstance(tv, vector._backends.object_.VectorObject4D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
assert tv.z == pytest.approx(3)
assert tv.t == pytest.approx(4)
for azimuthal in "xy", "rhophi":
tv = getattr(v, "to_" + azimuthal)()
assert isinstance(tv, vector._backends.object_.VectorObject2D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
for longitudinal in "z", "theta", "eta":
tv = getattr(v, "to_" + azimuthal + longitudinal)()
assert isinstance(tv, vector._backends.object_.VectorObject3D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
assert tv.z == pytest.approx(3)
for temporal in "t", "tau":
tv = getattr(v, "to_" + azimuthal + longitudinal + temporal)()
assert isinstance(tv, vector._backends.object_.VectorObject4D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
assert tv.z == pytest.approx(3)
assert tv.t == pytest.approx(4)
def test_MomentumObject4D():
v = vector.obj(px=1, py=2, pz=3, E=4)
tv = v.to_Vector2D()
assert isinstance(tv, vector._backends.object_.MomentumObject2D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
tv = v.to_Vector3D()
assert isinstance(tv, vector._backends.object_.MomentumObject3D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
assert tv.z == pytest.approx(3)
tv = v.to_Vector4D()
assert isinstance(tv, vector._backends.object_.MomentumObject4D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
assert tv.z == pytest.approx(3)
assert tv.t == pytest.approx(4)
for azimuthal in "xy", "rhophi":
tv = getattr(v, "to_" + azimuthal)()
assert isinstance(tv, vector._backends.object_.MomentumObject2D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
for longitudinal in "z", "theta", "eta":
tv = getattr(v, "to_" + azimuthal + longitudinal)()
assert isinstance(tv, vector._backends.object_.MomentumObject3D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
assert tv.z == pytest.approx(3)
for temporal in "t", "tau":
tv = getattr(v, "to_" + azimuthal + longitudinal + temporal)()
assert isinstance(tv, vector._backends.object_.MomentumObject4D)
assert tv.x == pytest.approx(1)
assert tv.y == pytest.approx(2)
assert tv.z == pytest.approx(3)
assert tv.t == pytest.approx(4)
def test_VectorNumpy4D():
v = vector.array({"x": [1, 1, 1], "y": [2, 2, 2], "z": [3, 3, 3], "t": [4, 4, 4]})
tv = v.to_Vector2D()
assert isinstance(tv, vector._backends.numpy_.VectorNumpy2D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
tv = v.to_Vector3D()
assert isinstance(tv, vector._backends.numpy_.VectorNumpy3D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
assert tv.z[0] == pytest.approx(3)
tv = v.to_Vector4D()
assert isinstance(tv, vector._backends.numpy_.VectorNumpy4D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
assert tv.z[0] == pytest.approx(3)
assert tv.t[0] == pytest.approx(4)
for azimuthal in "xy", "rhophi":
tv = getattr(v, "to_" + azimuthal)()
assert isinstance(tv, vector._backends.numpy_.VectorNumpy2D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
for longitudinal in "z", "theta", "eta":
tv = getattr(v, "to_" + azimuthal + longitudinal)()
assert isinstance(tv, vector._backends.numpy_.VectorNumpy3D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
assert tv.z[0] == pytest.approx(3)
for temporal in "t", "tau":
tv = getattr(v, "to_" + azimuthal + longitudinal + temporal)()
assert isinstance(tv, vector._backends.numpy_.VectorNumpy4D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
assert tv.z[0] == pytest.approx(3)
assert tv.t[0] == pytest.approx(4)
def test_MomentumNumpy4D():
v = vector.array(
{"px": [1, 1, 1], "py": [2, 2, 2], "pz": [3, 3, 3], "E": [4, 4, 4]}
)
tv = v.to_Vector2D()
assert isinstance(tv, vector._backends.numpy_.MomentumNumpy2D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
tv = v.to_Vector3D()
assert isinstance(tv, vector._backends.numpy_.MomentumNumpy3D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
assert tv.z[0] == pytest.approx(3)
tv = v.to_Vector4D()
assert isinstance(tv, vector._backends.numpy_.MomentumNumpy4D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
assert tv.z[0] == pytest.approx(3)
assert tv.t[0] == pytest.approx(4)
for azimuthal in "xy", "rhophi":
tv = getattr(v, "to_" + azimuthal)()
assert isinstance(tv, vector._backends.numpy_.MomentumNumpy2D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
for longitudinal in "z", "theta", "eta":
tv = getattr(v, "to_" + azimuthal + longitudinal)()
assert isinstance(tv, vector._backends.numpy_.MomentumNumpy3D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
assert tv.z[0] == pytest.approx(3)
for temporal in "t", "tau":
tv = getattr(v, "to_" + azimuthal + longitudinal + temporal)()
assert isinstance(tv, vector._backends.numpy_.MomentumNumpy4D)
assert tv.x[0] == pytest.approx(1)
assert tv.y[0] == pytest.approx(2)
assert tv.z[0] == pytest.approx(3)
assert tv.t[0] == pytest.approx(4)
| 41.668699
| 94
| 0.596703
| 2,699
| 20,501
| 4.446462
| 0.031123
| 0.215982
| 0.11699
| 0.143988
| 0.96467
| 0.964086
| 0.964086
| 0.961753
| 0.961753
| 0.956087
| 0
| 0.034415
| 0.25589
| 20,501
| 491
| 95
| 41.753564
| 0.752278
| 0.010682
| 0
| 0.931765
| 0
| 0
| 0.019036
| 0
| 0
| 0
| 0
| 0
| 0.677647
| 1
| 0.028235
| false
| 0
| 0.007059
| 0
| 0.035294
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
7380a37d497be184e6dbc004044ff2d2dfc9bee5
| 39,566
|
py
|
Python
|
tests/unit/command_line_route_handlers/test_frame_set_command_line_route_handler.py
|
zerofox-oss/deepstar
|
fe0fe12317975104fa6ff6c058d141f11e6e951d
|
[
"BSD-3-Clause-Clear"
] | 44
|
2019-08-09T16:14:27.000Z
|
2022-02-10T06:54:35.000Z
|
tests/unit/command_line_route_handlers/test_frame_set_command_line_route_handler.py
|
zerofox-oss/deepstar
|
fe0fe12317975104fa6ff6c058d141f11e6e951d
|
[
"BSD-3-Clause-Clear"
] | 2
|
2020-09-26T00:05:52.000Z
|
2021-03-22T13:27:36.000Z
|
tests/unit/command_line_route_handlers/test_frame_set_command_line_route_handler.py
|
zerofox-oss/deepstar
|
fe0fe12317975104fa6ff6c058d141f11e6e951d
|
[
"BSD-3-Clause-Clear"
] | 14
|
2019-08-19T16:47:32.000Z
|
2022-03-04T03:57:27.000Z
|
from io import StringIO
import json
import mock
import os
import shutil
import sys
import textwrap
import unittest
from deepstar.filesystem.frame_file import FrameFile
from deepstar.filesystem.frame_set_sub_dir import FrameSetSubDir
from deepstar.filesystem.transform_file import TransformFile
from deepstar.filesystem.transform_set_sub_dir import TransformSetSubDir
from deepstar.models.frame_model import FrameModel
from deepstar.models.frame_set_model import FrameSetModel
from deepstar.models.transform_model import TransformModel
from deepstar.models.transform_set_model import TransformSetModel
from deepstar.models.video_model import VideoModel
from deepstar.plugins.plugin import Plugin
from deepstar.util.command_line_route_handler_error import \
CommandLineRouteHandlerError
from deepstar.command_line_route_handlers \
.frame_set_command_line_route_handler \
import FrameSetCommandLineRouteHandler
from deepstar.command_line_route_handlers.video_command_line_route_handler \
import VideoCommandLineRouteHandler
from .. import deepstar_path
class TestFrameSetCommandLineRouteHandler(unittest.TestCase):
"""
This class tests the FrameSetCommandLineRouteHandler class.
"""
def test_list(self):
with deepstar_path():
video_model = VideoModel()
video_model.insert('test1', 'test2')
video_model.insert('test3', 'test4')
video_model.insert('test5', 'test6')
frame_set_model = FrameSetModel()
frame_set_model.insert(1)
frame_set_model.insert(2)
frame_set_model.insert(3)
args = ['main.py', 'list', 'frame_sets']
opts = {}
route_handler = FrameSetCommandLineRouteHandler()
try:
sys.stdout = StringIO()
route_handler.handle(args, opts)
actual = sys.stdout.getvalue().strip()
finally:
sys.stdout = sys.__stdout__
# stdout
expected = textwrap.dedent('''
3 results
id | fk_videos
-------------
1 | 1
2 | 2
3 | 3''').strip()
self.assertEqual(actual, expected)
def test_select_curate_manual(self):
pass
def test_select_curate_manual_fails_to_select_a_frame_set(self):
with deepstar_path():
args = ['main.py', 'select', 'frame_sets', '1', 'curate', 'manual']
opts = {}
with self.assertRaises(CommandLineRouteHandlerError):
try:
FrameSetCommandLineRouteHandler().handle(args, opts)
except CommandLineRouteHandlerError as e:
self.assertEqual(e.message, 'Frame set with ID 00000001 not found') # noqa
raise e
def test_select_extract_one(self):
with deepstar_path():
with mock.patch.dict(os.environ, {'DEBUG_LEVEL': '0'}):
route_handler = VideoCommandLineRouteHandler()
video_0001 = os.path.dirname(os.path.realpath(__file__)) + '/../../support/video_0001.mp4' # noqa
route_handler.insert_file(video_0001)
route_handler.select_extract([1])
class TestPlugin:
def frame_set_select_extract(self, frame_set_id, opts):
return TransformSetModel().insert('test', frame_set_id, None) # noqa
args = ['main.py', 'select', 'frame_sets', '1', 'extract', 'test'] # noqa
opts = {}
route_handler = FrameSetCommandLineRouteHandler()
try:
sys.stdout = StringIO()
with mock.patch.dict(Plugin._map, {'frame_set_select_extract': {'test': TestPlugin}}): # noqa
route_handler.handle(args, opts)
actual = sys.stdout.getvalue().strip()
finally:
sys.stdout = sys.__stdout__
# stdout
self.assertEqual(actual, 'transform_set_id=1, name=test, fk_frame_sets=1, fk_prev_transform_sets=None') # noqa
def test_select_extract_many(self):
with deepstar_path():
with mock.patch.dict(os.environ, {'DEBUG_LEVEL': '0'}):
route_handler = VideoCommandLineRouteHandler()
video_0001 = os.path.dirname(os.path.realpath(__file__)) + '/../../support/video_0001.mp4' # noqa
route_handler.insert_file(video_0001)
route_handler.insert_file(video_0001)
route_handler.insert_file(video_0001)
route_handler.select_extract([1, 2, 3])
class TestPlugin:
def frame_set_select_extract(self, frame_set_id, opts):
return TransformSetModel().insert('test', frame_set_id, None) # noqa
args = ['main.py', 'select', 'frame_sets', '1-2,3', 'extract', 'test'] # noqa
opts = {}
route_handler = FrameSetCommandLineRouteHandler()
try:
sys.stdout = StringIO()
with mock.patch.dict(Plugin._map, {'frame_set_select_extract': {'test': TestPlugin}}): # noqa
route_handler.handle(args, opts)
actual = sys.stdout.getvalue().strip()
finally:
sys.stdout = sys.__stdout__
# stdout
expected = 'transform_set_id=1, name=test, fk_frame_sets=1, ' \
'fk_prev_transform_sets=None\n' \
'transform_set_id=2, name=test, fk_frame_sets=2, ' \
'fk_prev_transform_sets=None\n' \
'transform_set_id=3, name=test, fk_frame_sets=3, ' \
'fk_prev_transform_sets=None'
self.assertEqual(actual, expected)
def test_select_extract_face(self):
with deepstar_path():
with mock.patch.dict(os.environ, {'DEBUG_LEVEL': '0'}):
route_handler = VideoCommandLineRouteHandler()
video_0001 = os.path.dirname(os.path.realpath(__file__)) + '/../../support/video_0001.mp4' # noqa
route_handler.insert_file(video_0001)
route_handler.select_extract([1])
args = ['main.py', 'select', 'frame_sets', '1', 'extract', 'face'] # noqa
opts = {}
route_handler = FrameSetCommandLineRouteHandler()
try:
sys.stdout = StringIO()
route_handler.handle(args, opts)
actual = sys.stdout.getvalue().strip()
finally:
sys.stdout = sys.__stdout__
# stdout
self.assertEqual(actual, 'transform_set_id=1, name=face, fk_frame_sets=1, fk_prev_transform_sets=None') # noqa
# db
result = TransformSetModel().select(1)
self.assertEqual(result, (1, 'face', 1, None))
result = TransformModel().list(1)
self.assertEqual(len(result), 5)
t = list(result[0])
json.loads(t.pop(3))
self.assertEqual(t, [1, 1, 1, 0])
t = list(result[1])
json.loads(t.pop(3))
self.assertEqual(t, [2, 1, 2, 0])
t = list(result[2])
json.loads(t.pop(3))
self.assertEqual(t, [3, 1, 3, 0])
t = list(result[3])
json.loads(t.pop(3))
self.assertEqual(t, [4, 1, 4, 0])
t = list(result[4])
json.loads(t.pop(3))
self.assertEqual(t, [5, 1, 5, 0])
# files
p1 = TransformSetSubDir.path(1)
# transforms
self.assertTrue(os.path.isfile(TransformFile.path(p1, 1, 'jpg')))
self.assertTrue(os.path.isfile(TransformFile.path(p1, 2, 'jpg')))
self.assertTrue(os.path.isfile(TransformFile.path(p1, 3, 'jpg')))
self.assertTrue(os.path.isfile(TransformFile.path(p1, 4, 'jpg')))
self.assertTrue(os.path.isfile(TransformFile.path(p1, 5, 'jpg')))
def test_select_extract_transform_set(self):
with deepstar_path():
with mock.patch.dict(os.environ, {'DEBUG_LEVEL': '0'}):
route_handler = VideoCommandLineRouteHandler()
video_0001 = os.path.dirname(os.path.realpath(__file__)) + '/../../support/video_0001.mp4' # noqa
route_handler.insert_file(video_0001)
route_handler.select_extract([1])
args = ['main.py', 'select', 'frame_sets', '1', 'extract', 'transform_set'] # noqa
opts = {}
route_handler = FrameSetCommandLineRouteHandler()
try:
sys.stdout = StringIO()
route_handler.handle(args, opts)
actual = sys.stdout.getvalue().strip()
finally:
sys.stdout = sys.__stdout__
# stdout
self.assertEqual(actual, 'transform_set_id=1, name=transform_set, fk_frame_sets=1, fk_prev_transform_sets=None') # noqa
# db
result = TransformSetModel().select(1)
self.assertEqual(result, (1, 'transform_set', 1, None))
result = TransformModel().list(1)
self.assertEqual(len(result), 5)
self.assertEqual(result[0], (1, 1, 1, None, 0))
self.assertEqual(result[1], (2, 1, 2, None, 0))
self.assertEqual(result[2], (3, 1, 3, None, 0))
self.assertEqual(result[3], (4, 1, 4, None, 0))
self.assertEqual(result[4], (5, 1, 5, None, 0))
# files
p1 = TransformSetSubDir.path(1)
# transforms
self.assertTrue(os.path.isfile(TransformFile.path(p1, 1, 'jpg')))
self.assertTrue(os.path.isfile(TransformFile.path(p1, 2, 'jpg')))
self.assertTrue(os.path.isfile(TransformFile.path(p1, 3, 'jpg')))
self.assertTrue(os.path.isfile(TransformFile.path(p1, 4, 'jpg')))
self.assertTrue(os.path.isfile(TransformFile.path(p1, 5, 'jpg')))
def test_select_extract_fails_to_select_a_frame_set(self):
with deepstar_path():
args = ['main.py', 'select', 'frame_sets', '1', 'extract', 'test']
opts = {}
with self.assertRaises(CommandLineRouteHandlerError):
try:
FrameSetCommandLineRouteHandler().handle(args, opts)
except CommandLineRouteHandlerError as e:
self.assertEqual(e.message, 'Frame set with ID 00000001 not found') # noqa
raise e
def test_select_extract_fails_to_get_a_plugin(self):
with deepstar_path():
with mock.patch.dict(os.environ, {'DEBUG_LEVEL': '0'}):
route_handler = VideoCommandLineRouteHandler()
video_0001 = os.path.dirname(os.path.realpath(__file__)) + '/../../support/video_0001.mp4' # noqa
route_handler.insert_file(video_0001)
route_handler.select_extract([1])
args = ['main.py', 'select', 'frame_sets', '1', 'extract', 'test']
opts = {}
with self.assertRaises(CommandLineRouteHandlerError):
try:
FrameSetCommandLineRouteHandler().handle(args, opts)
except CommandLineRouteHandlerError as e:
self.assertEqual(e.message, "'test' is not a valid frame set extraction plugin name") # noqa
raise e
def test_delete_one(self):
with deepstar_path():
with mock.patch.dict(os.environ, {'DEBUG_LEVEL': '0'}):
route_handler = VideoCommandLineRouteHandler()
video_0001 = os.path.dirname(os.path.realpath(__file__)) + '/../../support/video_0001.mp4' # noqa
route_handler.insert_file(video_0001)
route_handler.select_extract([1])
# files
self.assertTrue(os.path.exists(FrameSetSubDir.path(1)))
# db
frame_set_model = FrameSetModel()
self.assertIsNotNone(frame_set_model.select(1))
frame_model = FrameModel()
self.assertIsNotNone(frame_model.select(1))
self.assertIsNotNone(frame_model.select(2))
self.assertIsNotNone(frame_model.select(3))
self.assertIsNotNone(frame_model.select(4))
self.assertIsNotNone(frame_model.select(5))
args = ['main.py', 'delete', 'frame_sets', '1']
opts = {}
route_handler = FrameSetCommandLineRouteHandler()
try:
sys.stdout = StringIO()
route_handler.handle(args, opts)
actual = sys.stdout.getvalue().strip()
finally:
sys.stdout = sys.__stdout__
# stdout
self.assertEqual(actual, 'Frame set 1 was successfully deleted')
# db
self.assertIsNone(frame_set_model.select(1))
self.assertIsNone(frame_model.select(1))
self.assertIsNone(frame_model.select(2))
self.assertIsNone(frame_model.select(3))
self.assertIsNone(frame_model.select(4))
self.assertIsNone(frame_model.select(5))
# files
self.assertFalse(os.path.exists(FrameSetSubDir.path(1)))
def test_delete_many(self):
with deepstar_path():
with mock.patch.dict(os.environ, {'DEBUG_LEVEL': '0'}):
route_handler = VideoCommandLineRouteHandler()
video_0001 = os.path.dirname(os.path.realpath(__file__)) + '/../../support/video_0001.mp4' # noqa
route_handler.insert_file(video_0001)
route_handler.insert_file(video_0001)
route_handler.insert_file(video_0001)
route_handler.select_extract([1, 2, 3])
args = ['main.py', 'delete', 'frame_sets', '1-2,3']
opts = {}
route_handler = FrameSetCommandLineRouteHandler()
try:
sys.stdout = StringIO()
route_handler.handle(args, opts)
actual = sys.stdout.getvalue().strip()
finally:
sys.stdout = sys.__stdout__
# stdout
expected = textwrap.dedent('''
Frame set 1 was successfully deleted
Frame set 2 was successfully deleted
Frame set 3 was successfully deleted''').strip()
self.assertEqual(actual, expected)
def test_delete_fails_to_select_a_frame_set(self):
with deepstar_path():
args = ['main.py', 'delete', 'frame_sets', '1']
opts = {}
with self.assertRaises(CommandLineRouteHandlerError):
try:
FrameSetCommandLineRouteHandler().handle(args, opts)
except CommandLineRouteHandlerError as e:
self.assertEqual(e.message, 'Frame set with ID 00000001 not found') # noqa
raise e
def test_select_clone_one(self):
with deepstar_path():
with mock.patch.dict(os.environ, {'DEBUG_LEVEL': '0'}):
route_handler = VideoCommandLineRouteHandler()
video_0001 = os.path.dirname(os.path.realpath(__file__)) + '/../../support/video_0001.mp4' # noqa
route_handler.insert_file(video_0001)
route_handler.select_extract([1])
args = ['main.py', 'select', 'frame_sets', '1', 'clone']
opts = {}
route_handler = FrameSetCommandLineRouteHandler()
try:
sys.stdout = StringIO()
with mock.patch.dict(os.environ, {'MODEL_LIST_LENGTH': '4'}):
route_handler.handle(args, opts)
actual = sys.stdout.getvalue().strip()
finally:
sys.stdout = sys.__stdout__
# stdout
self.assertEqual(actual, 'frame_set_id=2, fk_videos=1')
# db
result = FrameSetModel().select(2)
self.assertEqual(result, (2, 1))
result = FrameModel().list(2)
self.assertEqual(len(result), 5)
self.assertEqual(result[0], (6, 2, 0))
self.assertEqual(result[1], (7, 2, 0))
self.assertEqual(result[2], (8, 2, 0))
self.assertEqual(result[3], (9, 2, 0))
self.assertEqual(result[4], (10, 2, 0))
# files
p1 = FrameSetSubDir.path(2)
# frames
self.assertTrue(os.path.isfile(FrameFile.path(p1, 6, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 7, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 8, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 9, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 10, 'jpg')))
# thumbnails
self.assertTrue(os.path.isfile(FrameFile.path(p1, 6, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 7, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 8, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 9, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 10, 'jpg', '192x192'))) # noqa
def test_select_clone_many(self):
with deepstar_path():
with mock.patch.dict(os.environ, {'DEBUG_LEVEL': '0'}):
route_handler = VideoCommandLineRouteHandler()
video_0001 = os.path.dirname(os.path.realpath(__file__)) + '/../../support/video_0001.mp4' # noqa
route_handler.insert_file(video_0001)
route_handler.insert_file(video_0001)
route_handler.insert_file(video_0001)
route_handler.select_extract([1, 2, 3])
args = ['main.py', 'select', 'frame_sets', '1-2,3', 'clone']
opts = {}
route_handler = FrameSetCommandLineRouteHandler()
try:
sys.stdout = StringIO()
with mock.patch.dict(os.environ, {'MODEL_LIST_LENGTH': '2'}):
route_handler.handle(args, opts)
actual = sys.stdout.getvalue().strip()
finally:
sys.stdout = sys.__stdout__
# stdout
expected = textwrap.dedent('''
frame_set_id=4, fk_videos=1
frame_set_id=5, fk_videos=2
frame_set_id=6, fk_videos=3''').strip()
self.assertEqual(actual, expected)
def test_select_clone_fails_to_select_a_frame_set(self):
with deepstar_path():
args = ['main.py', 'select', 'frame_sets', '1', 'clone']
opts = {}
with self.assertRaises(CommandLineRouteHandlerError):
try:
FrameSetCommandLineRouteHandler().handle(args, opts)
except CommandLineRouteHandlerError as e:
self.assertEqual(e.message, 'Frame set with ID 00000001 not found') # noqa
raise e
def test_select_merge(self):
with deepstar_path():
with mock.patch.dict(os.environ, {'DEBUG_LEVEL': '0'}):
route_handler = VideoCommandLineRouteHandler()
video_0001 = os.path.dirname(os.path.realpath(__file__)) + '/../../support/video_0001.mp4' # noqa
route_handler.insert_file(video_0001)
route_handler.insert_file(video_0001)
route_handler.insert_file(video_0001)
route_handler.select_extract([1, 2, 3])
args = ['main.py', 'select', 'frame_sets', '1-2,3', 'merge']
opts = {}
route_handler = FrameSetCommandLineRouteHandler()
try:
sys.stdout = StringIO()
with mock.patch.dict(os.environ, {'MODEL_LIST_LENGTH': '4'}):
route_handler.handle(args, opts)
actual = sys.stdout.getvalue().strip()
finally:
sys.stdout = sys.__stdout__
# stdout
self.assertEqual(actual, 'frame_set_id=4, fk_videos=None')
# db
result = FrameSetModel().select(4)
self.assertEqual(result, (4, None))
result = FrameModel().list(4)
self.assertEqual(len(result), 15)
self.assertEqual(result[0], (16, 4, 0))
self.assertEqual(result[1], (17, 4, 0))
self.assertEqual(result[2], (18, 4, 0))
self.assertEqual(result[3], (19, 4, 0))
self.assertEqual(result[4], (20, 4, 0))
self.assertEqual(result[5], (21, 4, 0))
self.assertEqual(result[6], (22, 4, 0))
self.assertEqual(result[7], (23, 4, 0))
self.assertEqual(result[8], (24, 4, 0))
self.assertEqual(result[9], (25, 4, 0))
self.assertEqual(result[10], (26, 4, 0))
self.assertEqual(result[11], (27, 4, 0))
self.assertEqual(result[12], (28, 4, 0))
self.assertEqual(result[13], (29, 4, 0))
self.assertEqual(result[14], (30, 4, 0))
# files
p1 = FrameSetSubDir.path(4)
# frames
self.assertTrue(os.path.isfile(FrameFile.path(p1, 16, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 17, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 18, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 19, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 20, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 21, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 22, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 23, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 24, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 25, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 26, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 27, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 28, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 29, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 30, 'jpg')))
# thumbnails
self.assertTrue(os.path.isfile(FrameFile.path(p1, 16, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 17, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 18, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 19, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 20, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 21, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 22, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 23, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 24, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 25, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 26, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 27, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 28, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 29, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 30, 'jpg', '192x192'))) # noqa
def test_select_merge_rejected(self):
with deepstar_path():
with mock.patch.dict(os.environ, {'DEBUG_LEVEL': '0'}):
route_handler = VideoCommandLineRouteHandler()
video_0001 = os.path.dirname(os.path.realpath(__file__)) + '/../../support/video_0001.mp4' # noqa
route_handler.insert_file(video_0001)
route_handler.insert_file(video_0001)
route_handler.insert_file(video_0001)
route_handler.select_extract([1, 2, 3])
FrameModel().update(15, rejected=1)
args = ['main.py', 'select', 'frame_sets', '1-2,3', 'merge']
opts = {}
route_handler = FrameSetCommandLineRouteHandler()
try:
sys.stdout = StringIO()
with mock.patch.dict(os.environ, {'MODEL_LIST_LENGTH': '4'}):
route_handler.handle(args, opts)
actual = sys.stdout.getvalue().strip()
finally:
sys.stdout = sys.__stdout__
# stdout
self.assertEqual(actual, 'frame_set_id=4, fk_videos=None')
# db
result = FrameSetModel().select(4)
self.assertEqual(result, (4, None))
result = FrameModel().list(4)
self.assertEqual(len(result), 14)
self.assertEqual(result[0], (16, 4, 0))
self.assertEqual(result[1], (17, 4, 0))
self.assertEqual(result[2], (18, 4, 0))
self.assertEqual(result[3], (19, 4, 0))
self.assertEqual(result[4], (20, 4, 0))
self.assertEqual(result[5], (21, 4, 0))
self.assertEqual(result[6], (22, 4, 0))
self.assertEqual(result[7], (23, 4, 0))
self.assertEqual(result[8], (24, 4, 0))
self.assertEqual(result[9], (25, 4, 0))
self.assertEqual(result[10], (26, 4, 0))
self.assertEqual(result[11], (27, 4, 0))
self.assertEqual(result[12], (28, 4, 0))
self.assertEqual(result[13], (29, 4, 0))
# files
p1 = FrameSetSubDir.path(4)
# frames
self.assertTrue(os.path.isfile(FrameFile.path(p1, 16, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 17, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 18, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 19, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 20, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 21, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 22, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 23, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 24, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 25, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 26, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 27, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 28, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 29, 'jpg')))
# thumbnails
self.assertTrue(os.path.isfile(FrameFile.path(p1, 16, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 17, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 18, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 19, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 20, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 21, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 22, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 23, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 24, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 25, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 26, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 27, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 28, 'jpg', '192x192'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(p1, 29, 'jpg', '192x192'))) # noqa
def test_select_merge_fails_to_select_a_frame_set(self):
with deepstar_path():
args = ['main.py', 'select', 'frame_sets', '1,2', 'merge']
opts = {}
with self.assertRaises(CommandLineRouteHandlerError):
try:
FrameSetCommandLineRouteHandler().handle(args, opts)
except CommandLineRouteHandlerError as e:
self.assertEqual(e.message, 'Frame set with ID 00000001 not found') # noqa
raise e
def test_insert_images(self):
with deepstar_path():
image_0001 = os.path.dirname(os.path.realpath(__file__)) + '/../../support/image_0001.jpg' # noqa
image_0007 = os.path.dirname(os.path.realpath(__file__)) + '/../../support/image_0007.png' # noqa
tmpdir = os.environ['DEEPSTAR_PATH'] + '/test'
os.mkdir(tmpdir)
shutil.copy(image_0001, os.path.join(tmpdir, 'image_0001.jpg'))
shutil.copy(image_0001, os.path.join(tmpdir, 'image_0002.jpg'))
shutil.copy(image_0007, os.path.join(tmpdir, 'image_0003.png'))
shutil.copy(image_0007, os.path.join(tmpdir, 'image_0004.png'))
args = ['main.py', 'insert', 'frame_sets', 'images', tmpdir]
opts = {}
route_handler = FrameSetCommandLineRouteHandler()
try:
sys.stdout = StringIO()
route_handler.handle(args, opts)
actual = sys.stdout.getvalue().strip()
finally:
sys.stdout = sys.__stdout__
# stdout
self.assertEqual(actual, 'frame_set_id=1, fk_videos=None')
# db
result = FrameSetModel().select(1)
self.assertEqual(result, (1, None))
result = FrameModel().list(1)
self.assertEqual(len(result), 4)
self.assertEqual(result[0], (1, 1, 0))
self.assertEqual(result[1], (2, 1, 0))
self.assertEqual(result[2], (3, 1, 0))
self.assertEqual(result[3], (4, 1, 0))
# files
p1 = FrameSetSubDir.path(1)
# frames
self.assertTrue(os.path.isfile(FrameFile.path(p1, 1, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 2, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 3, 'jpg')))
self.assertTrue(os.path.isfile(FrameFile.path(p1, 4, 'jpg')))
def test_insert_images_does_not_insert_non_images(self):
with deepstar_path():
tmpdir = os.environ['DEEPSTAR_PATH'] + '/test'
os.mkdir(tmpdir)
with open(os.path.join(tmpdir, 'test'), 'w') as file_:
file_.write('test')
with open(os.path.join(tmpdir, 'test.txt'), 'w') as file_:
file_.write('test')
args = ['main.py', 'insert', 'frame_sets', 'images', tmpdir]
opts = {}
route_handler = FrameSetCommandLineRouteHandler()
try:
sys.stdout = StringIO()
route_handler.handle(args, opts)
actual = sys.stdout.getvalue().strip()
finally:
sys.stdout = sys.__stdout__
# stdout
self.assertEqual(actual, 'frame_set_id=1, fk_videos=None')
# db
result = FrameSetModel().select(1)
self.assertEqual(result, (1, None))
result = FrameModel().list(0)
def test_insert_images_fails_due_to_isdir_fails(self):
with deepstar_path():
args = ['main.py', 'insert', 'frame_sets', 'images', 'test']
opts = {}
with self.assertRaises(CommandLineRouteHandlerError):
try:
FrameSetCommandLineRouteHandler().handle(args, opts)
except CommandLineRouteHandlerError as e:
self.assertEqual(e.message, 'The path at test is not a directory') # noqa
raise e
def test_select_export_dir_one(self):
with deepstar_path():
with mock.patch.dict(os.environ, {'DEBUG_LEVEL': '0'}):
route_handler = VideoCommandLineRouteHandler()
video_0001 = os.path.dirname(os.path.realpath(__file__)) + '/../../support/video_0001.mp4' # noqa
route_handler.insert_file(video_0001)
route_handler.select_extract([1])
tmpdir = os.environ['DEEPSTAR_PATH'] + '/test'
os.mkdir(tmpdir)
args = ['main.py', 'select', 'frame_sets', '1', 'export', 'dir', tmpdir] # noqa
opts = {}
route_handler = FrameSetCommandLineRouteHandler()
try:
sys.stdout = StringIO()
with mock.patch.dict(os.environ, {'MODEL_LIST_LENGTH': '2'}):
route_handler.handle(args, opts)
actual = sys.stdout.getvalue().strip()
finally:
sys.stdout = sys.__stdout__
# stdout
self.assertEqual(actual, f'5 frames were successfully exported to {tmpdir}') # noqa
# frames
self.assertTrue(os.path.isfile(FrameFile.path(tmpdir, 1, 'jpg'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(tmpdir, 2, 'jpg'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(tmpdir, 3, 'jpg'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(tmpdir, 4, 'jpg'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(tmpdir, 5, 'jpg'))) # noqa
def test_select_export_dir_one_rejected(self):
with deepstar_path():
with mock.patch.dict(os.environ, {'DEBUG_LEVEL': '0'}):
route_handler = VideoCommandLineRouteHandler()
video_0001 = os.path.dirname(os.path.realpath(__file__)) + '/../../support/video_0001.mp4' # noqa
route_handler.insert_file(video_0001)
route_handler.select_extract([1])
FrameModel().update(5, rejected=1)
tmpdir = os.environ['DEEPSTAR_PATH'] + '/test'
os.mkdir(tmpdir)
args = ['main.py', 'select', 'frame_sets', '1', 'export', 'dir', tmpdir] # noqa
opts = {}
route_handler = FrameSetCommandLineRouteHandler()
try:
sys.stdout = StringIO()
with mock.patch.dict(os.environ, {'MODEL_LIST_LENGTH': '2'}):
route_handler.handle(args, opts)
actual = sys.stdout.getvalue().strip()
finally:
sys.stdout = sys.__stdout__
# stdout
self.assertEqual(actual, f'4 frames were successfully exported to {tmpdir}') # noqa
# frames
self.assertTrue(os.path.isfile(FrameFile.path(tmpdir, 1, 'jpg'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(tmpdir, 2, 'jpg'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(tmpdir, 3, 'jpg'))) # noqa
self.assertTrue(os.path.isfile(FrameFile.path(tmpdir, 4, 'jpg'))) # noqa
def test_select_export_dir_one_format(self):
with deepstar_path():
with mock.patch.dict(os.environ, {'DEBUG_LEVEL': '0'}):
route_handler = VideoCommandLineRouteHandler()
video_0001 = os.path.dirname(os.path.realpath(__file__)) + '/../../support/video_0001.mp4' # noqa
route_handler.insert_file(video_0001)
route_handler.select_extract([1])
tmpdir = os.environ['DEEPSTAR_PATH'] + '/test'
os.mkdir(tmpdir)
args = ['main.py', 'select', 'frame_sets', '1', 'export', 'dir', tmpdir] # noqa
opts = {'format': 'frames%04d.jpg'}
route_handler = FrameSetCommandLineRouteHandler()
try:
sys.stdout = StringIO()
with mock.patch.dict(os.environ, {'MODEL_LIST_LENGTH': '2'}):
route_handler.handle(args, opts)
actual = sys.stdout.getvalue().strip()
finally:
sys.stdout = sys.__stdout__
# stdout
self.assertEqual(actual, f'5 frames were successfully exported to {tmpdir}') # noqa
# frames
self.assertTrue(os.path.isfile(os.path.join(tmpdir, 'frames%04d.jpg' % 1))) # noqa
self.assertTrue(os.path.isfile(os.path.join(tmpdir, 'frames%04d.jpg' % 2))) # noqa
self.assertTrue(os.path.isfile(os.path.join(tmpdir, 'frames%04d.jpg' % 3))) # noqa
self.assertTrue(os.path.isfile(os.path.join(tmpdir, 'frames%04d.jpg' % 4))) # noqa
self.assertTrue(os.path.isfile(os.path.join(tmpdir, 'frames%04d.jpg' % 5))) # noqa
def test_select_export_dir_many(self):
with deepstar_path():
with mock.patch.dict(os.environ, {'DEBUG_LEVEL': '0'}):
route_handler = VideoCommandLineRouteHandler()
video_0001 = os.path.dirname(os.path.realpath(__file__)) + '/../../support/video_0001.mp4' # noqa
route_handler.insert_file(video_0001)
route_handler.select_extract([1])
route_handler.select_extract([1])
route_handler.select_extract([1])
tmpdir = os.environ['DEEPSTAR_PATH'] + '/test'
os.mkdir(tmpdir)
args = ['main.py', 'select', 'frame_sets', '1,2-3', 'export', 'dir', tmpdir] # noqa
opts = {}
route_handler = FrameSetCommandLineRouteHandler()
try:
sys.stdout = StringIO()
with mock.patch.dict(os.environ, {'MODEL_LIST_LENGTH': '2'}):
route_handler.handle(args, opts)
actual = sys.stdout.getvalue().strip()
finally:
sys.stdout = sys.__stdout__
# stdout
self.assertEqual(actual, f'15 frames were successfully exported to {tmpdir}') # noqa
def test_select_export_dir_fails_to_select_a_frame_set(self):
with deepstar_path():
args = ['main.py', 'select', 'frame_sets', '1', 'export', 'dir', 'test'] # noqa
opts = {}
with self.assertRaises(CommandLineRouteHandlerError):
try:
FrameSetCommandLineRouteHandler().handle(args, opts)
except CommandLineRouteHandlerError as e:
self.assertEqual(e.message, 'Frame set with ID 00000001 not found') # noqa
raise e
def test_usage(self):
with deepstar_path():
route_handler = FrameSetCommandLineRouteHandler()
args = ['main.py', 'usage', 'frame_sets']
opts = {}
try:
sys.stdout = StringIO()
route_handler.handle(args, opts)
actual = sys.stdout.getvalue().strip()
finally:
sys.stdout = sys.__stdout__
self.assertTrue('Usage - Frame Sets' in actual)
| 41.25756
| 132
| 0.567861
| 4,336
| 39,566
| 5.017066
| 0.048662
| 0.039441
| 0.071343
| 0.089179
| 0.901489
| 0.865542
| 0.847476
| 0.82702
| 0.811943
| 0.776915
| 0
| 0.044291
| 0.299828
| 39,566
| 958
| 133
| 41.300626
| 0.740967
| 0.020801
| 0
| 0.729532
| 0
| 0
| 0.106222
| 0.018321
| 0
| 0
| 0
| 0
| 0.299708
| 1
| 0.042398
| false
| 0.001462
| 0.032164
| 0.002924
| 0.081871
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7380f1089af641acb08941dc4aadbb61dc31e2ea
| 197
|
py
|
Python
|
tests/test_unit/test_checker.py
|
tripcher/flake8-forbidden-func
|
82d8e961f26ed8d3dcdfffd78b50d1de65d4f628
|
[
"MIT"
] | null | null | null |
tests/test_unit/test_checker.py
|
tripcher/flake8-forbidden-func
|
82d8e961f26ed8d3dcdfffd78b50d1de65d4f628
|
[
"MIT"
] | null | null | null |
tests/test_unit/test_checker.py
|
tripcher/flake8-forbidden-func
|
82d8e961f26ed8d3dcdfffd78b50d1de65d4f628
|
[
"MIT"
] | null | null | null |
from __future__ import annotations
from flake8_forbidden_func.checker import FunctionChecker
def test__function_checker__check_name():
assert FunctionChecker.name == 'flake8-forbidden-func'
| 24.625
| 58
| 0.837563
| 23
| 197
| 6.652174
| 0.652174
| 0.196078
| 0.248366
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011364
| 0.106599
| 197
| 7
| 59
| 28.142857
| 0.857955
| 0
| 0
| 0
| 0
| 0
| 0.106599
| 0.106599
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
73eeda642e53eff3ebf2aca58d7ff0563e316f13
| 58,130
|
py
|
Python
|
benchmarks/import_cost/classes_100_with_10_invariants.py
|
kklein/icontract
|
718ef1733cc2cce6d3c8f59a5a37de96f8be6664
|
[
"MIT"
] | 244
|
2018-08-15T22:58:58.000Z
|
2022-03-12T16:10:39.000Z
|
benchmarks/import_cost/classes_100_with_10_invariants.py
|
kklein/icontract
|
718ef1733cc2cce6d3c8f59a5a37de96f8be6664
|
[
"MIT"
] | 157
|
2018-08-29T21:36:47.000Z
|
2022-02-14T19:30:24.000Z
|
benchmarks/import_cost/classes_100_with_10_invariants.py
|
kklein/icontract
|
718ef1733cc2cce6d3c8f59a5a37de96f8be6664
|
[
"MIT"
] | 23
|
2019-04-24T11:09:10.000Z
|
2022-02-14T15:56:26.000Z
|
#!/usr/bin/env python3
import icontract
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass0:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass1:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass2:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass3:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass4:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass5:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass6:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass7:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass8:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass9:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass10:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass11:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass12:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass13:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass14:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass15:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass16:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass17:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass18:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass19:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass20:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass21:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass22:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass23:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass24:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass25:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass26:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass27:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass28:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass29:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass30:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass31:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass32:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass33:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass34:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass35:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass36:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass37:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass38:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass39:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass40:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass41:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass42:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass43:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass44:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass45:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass46:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass47:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass48:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass49:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass50:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass51:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass52:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass53:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass54:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass55:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass56:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass57:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass58:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass59:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass60:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass61:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass62:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass63:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass64:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass65:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass66:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass67:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass68:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass69:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass70:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass71:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass72:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass73:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass74:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass75:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass76:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass77:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass78:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass79:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass80:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass81:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass82:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass83:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass84:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass85:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass86:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass87:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass88:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass89:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass90:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass91:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass92:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass93:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass94:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass95:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass96:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass97:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass98:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
@icontract.invariant(lambda self: self.x > 0)
@icontract.invariant(lambda self: self.x > 1)
@icontract.invariant(lambda self: self.x > 2)
@icontract.invariant(lambda self: self.x > 3)
@icontract.invariant(lambda self: self.x > 4)
@icontract.invariant(lambda self: self.x > 5)
@icontract.invariant(lambda self: self.x > 6)
@icontract.invariant(lambda self: self.x > 7)
@icontract.invariant(lambda self: self.x > 8)
@icontract.invariant(lambda self: self.x > 9)
class SomeClass99:
def __init__(self) -> None:
self.x = 100
def some_func(self) -> None:
pass
| 32.24071
| 45
| 0.700516
| 8,506
| 58,130
| 4.728545
| 0.015048
| 0.136744
| 0.596703
| 0.696154
| 0.972129
| 0.972129
| 0.972129
| 0.972129
| 0.972129
| 0.972129
| 0
| 0.030412
| 0.156614
| 58,130
| 1,802
| 46
| 32.258602
| 0.789989
| 0.000361
| 0
| 0.932712
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.133245
| false
| 0.066622
| 0.000666
| 0
| 0.200533
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 10
|
fb58b9032e5a12183fda70bdf58c7b7da06ce45e
| 27,479
|
py
|
Python
|
tests/python/relay/test_pass_inline.py
|
XiaoSong9905/tvm
|
48940f697e15d5b50fa1f032003e6c700ae1e423
|
[
"Apache-2.0"
] | 4,640
|
2017-08-17T19:22:15.000Z
|
2019-11-04T15:29:46.000Z
|
tests/python/relay/test_pass_inline.py
|
XiaoSong9905/tvm
|
48940f697e15d5b50fa1f032003e6c700ae1e423
|
[
"Apache-2.0"
] | 3,022
|
2020-11-24T14:02:31.000Z
|
2022-03-31T23:55:31.000Z
|
tests/python/relay/test_pass_inline.py
|
XiaoSong9905/tvm
|
48940f697e15d5b50fa1f032003e6c700ae1e423
|
[
"Apache-2.0"
] | 1,352
|
2017-08-17T19:30:38.000Z
|
2019-11-04T16:09:29.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name, missing-docstring, too-many-statements
import tvm
from tvm import relay
def get_recursive_count_loop():
mod = tvm.IRModule({})
sum_up = relay.GlobalVar("sum_up")
i = relay.var("i", shape=[], dtype="int32")
sb = relay.ScopeBuilder()
with sb.if_scope(relay.equal(i, relay.const(0, dtype="int32"))):
sb.ret(i)
with sb.else_scope():
one_less = relay.subtract(i, relay.const(1, dtype="int32"))
rec_call = relay.Call(sum_up, [one_less])
sb.ret(relay.add(rec_call, i))
func = relay.Function([i], sb.get(), ret_type=relay.TensorType([], "int32"))
func = func.with_attr("Inline", tvm.tir.IntImm("int32", 1))
mod[sum_up] = func
iarg = relay.var("i", shape=[], dtype="int32")
mod["main"] = relay.Function([iarg], sum_up(iarg))
return mod, sum_up
def test_call_chain_inline_leaf():
"""Test when only leaf call is inlined.
The call graph is like the following:
main
/ \
g1 g2
/
g11(inline)
"""
def get_mod():
mod = tvm.IRModule({})
x11 = relay.var("x11", shape=(3, 5))
g11 = relay.GlobalVar("g11")
fn11 = relay.Function([x11], x11)
fn11 = fn11.with_attr("Inline", tvm.tir.IntImm("int32", 1))
mod[g11] = fn11
x1 = relay.var("x1", shape=(3, 5))
y1 = relay.var("y1", shape=(3, 5))
sb = relay.ScopeBuilder()
sb.ret(x1 + y1 + g11(x1))
fn1 = relay.Function([x1, y1], sb.get())
g1 = relay.GlobalVar("g1")
mod[g1] = fn1
x2 = relay.var("x2", shape=(3, 5))
y2 = relay.var("y2", shape=(3, 5))
sb1 = relay.ScopeBuilder()
sb1.ret(x2 - y2)
fn2 = relay.Function([x2, y2], sb1.get())
g2 = relay.GlobalVar("g2")
mod[g2] = fn2
p0 = relay.var("p0", shape=(3, 5))
p1 = relay.var("p1", shape=(3, 5))
p2 = relay.var("p2", shape=(3, 5))
p3 = relay.var("p3", shape=(3, 5))
call_fn1 = g1(p0, p1)
call_fn2 = g2(p2, p3)
mod["main"] = relay.Function([p0, p1, p2, p3], call_fn1 * call_fn2)
return mod
def expected():
mod = tvm.IRModule({})
x1 = relay.var("x1", shape=(3, 5))
y1 = relay.var("y1", shape=(3, 5))
sb = relay.ScopeBuilder()
sb.ret(x1 + y1 + x1)
fn1 = relay.Function([x1, y1], sb.get())
g1 = relay.GlobalVar("g1")
mod[g1] = fn1
x2 = relay.var("x2", shape=(3, 5))
y2 = relay.var("y2", shape=(3, 5))
sb1 = relay.ScopeBuilder()
sb1.ret(x2 - y2)
fn2 = relay.Function([x2, y2], sb1.get())
g2 = relay.GlobalVar("g2")
mod[g2] = fn2
p0 = relay.var("p0", shape=(3, 5))
p1 = relay.var("p1", shape=(3, 5))
p2 = relay.var("p2", shape=(3, 5))
p3 = relay.var("p3", shape=(3, 5))
call_fn1 = g1(p0, p1)
call_fn2 = g2(p2, p3)
mod["main"] = relay.Function([p0, p1, p2, p3], call_fn1 * call_fn2)
return mod
mod = get_mod()
mod = relay.transform.Inline()(mod)
assert tvm.ir.structural_equal(mod, expected(), map_free_vars=True)
def test_call_chain_inline_multiple_levels():
"""Test when only leaf call is inlined.
The call graph is like the following:
main
/ \
g1(inline) g2
/
g11(inline)
"""
def get_mod():
mod = tvm.IRModule({})
x11 = relay.var("x11", shape=(3, 5))
g11 = relay.GlobalVar("g11")
fn11 = relay.Function([x11], x11)
fn11 = fn11.with_attr("Inline", tvm.tir.IntImm("int32", 1))
mod[g11] = fn11
x1 = relay.var("x1", shape=(3, 5))
y1 = relay.var("y1", shape=(3, 5))
sb = relay.ScopeBuilder()
sb.ret(x1 + y1 + g11(x1))
fn1 = relay.Function([x1, y1], sb.get())
fn1 = fn1.with_attr("Inline", tvm.tir.IntImm("int32", 1))
g1 = relay.GlobalVar("g1")
mod[g1] = fn1
x2 = relay.var("x2", shape=(3, 5))
y2 = relay.var("y2", shape=(3, 5))
sb1 = relay.ScopeBuilder()
sb1.ret(x2 - y2)
fn2 = relay.Function([x2, y2], sb1.get())
g2 = relay.GlobalVar("g2")
mod[g2] = fn2
p0 = relay.var("p0", shape=(3, 5))
p1 = relay.var("p1", shape=(3, 5))
p2 = relay.var("p2", shape=(3, 5))
p3 = relay.var("p3", shape=(3, 5))
call_fn1 = g1(p0, p1)
call_fn2 = g2(p2, p3)
mod["main"] = relay.Function([p0, p1, p2, p3], call_fn1 * call_fn2)
return mod
def expected():
mod = tvm.IRModule({})
x2 = relay.var("x2", shape=(3, 5))
y2 = relay.var("y2", shape=(3, 5))
sb1 = relay.ScopeBuilder()
sb1.ret(x2 - y2)
fn2 = relay.Function([x2, y2], sb1.get())
g2 = relay.GlobalVar("g2")
mod[g2] = fn2
p0 = relay.var("p0", shape=(3, 5))
p1 = relay.var("p1", shape=(3, 5))
p2 = relay.var("p2", shape=(3, 5))
p3 = relay.var("p3", shape=(3, 5))
call_fn1 = p0 + p1 + p0
call_fn2 = g2(p2, p3)
mod["main"] = relay.Function([p0, p1, p2, p3], call_fn1 * call_fn2)
return mod
mod = get_mod()
mod = relay.transform.Inline()(mod)
assert tvm.ir.structural_equal(mod, expected(), map_free_vars=True)
def test_call_chain_inline_multiple_levels_extern_compiler():
"""Test when only leaf call is inlined.
The call graph is like the following:
main
/ \
g1(inline) g2
/
g11(inline, external compiler)
"""
def get_mod():
mod = tvm.IRModule({})
x11 = relay.var("x11", shape=(3, 5))
g11 = relay.GlobalVar("g11")
fn11 = relay.Function([x11], x11)
fn11 = fn11.with_attr("Inline", tvm.tir.IntImm("int32", 1))
fn11 = fn11.with_attr("Compiler", "a")
mod[g11] = fn11
x1 = relay.var("x1", shape=(3, 5))
y1 = relay.var("y1", shape=(3, 5))
sb = relay.ScopeBuilder()
sb.ret(x1 + y1 + g11(x1))
fn1 = relay.Function([x1, y1], sb.get())
fn1 = fn1.with_attr("Inline", tvm.tir.IntImm("int32", 1))
g1 = relay.GlobalVar("g1")
mod[g1] = fn1
x2 = relay.var("x2", shape=(3, 5))
y2 = relay.var("y2", shape=(3, 5))
sb1 = relay.ScopeBuilder()
sb1.ret(x2 - y2)
fn2 = relay.Function([x2, y2], sb1.get())
g2 = relay.GlobalVar("g2")
mod[g2] = fn2
p0 = relay.var("p0", shape=(3, 5))
p1 = relay.var("p1", shape=(3, 5))
p2 = relay.var("p2", shape=(3, 5))
p3 = relay.var("p3", shape=(3, 5))
call_fn1 = g1(p0, p1)
call_fn2 = g2(p2, p3)
mod["main"] = relay.Function([p0, p1, p2, p3], call_fn1 * call_fn2)
return mod
def expected():
mod = tvm.IRModule({})
x11 = relay.var("x11", shape=(3, 5))
fn11 = relay.Function([x11], x11)
fn11 = fn11.with_attr("Inline", tvm.tir.IntImm("int32", 1))
fn11 = fn11.with_attr("Compiler", "a")
x2 = relay.var("x2", shape=(3, 5))
y2 = relay.var("y2", shape=(3, 5))
sb1 = relay.ScopeBuilder()
sb1.ret(x2 - y2)
fn2 = relay.Function([x2, y2], sb1.get())
g2 = relay.GlobalVar("g2")
mod[g2] = fn2
p0 = relay.var("p0", shape=(3, 5))
p1 = relay.var("p1", shape=(3, 5))
p2 = relay.var("p2", shape=(3, 5))
p3 = relay.var("p3", shape=(3, 5))
call_fn1 = p0 + p1 + fn11(p0)
call_fn2 = g2(p2, p3)
mod["main"] = relay.Function([p0, p1, p2, p3], call_fn1 * call_fn2)
return mod
mod = get_mod()
mod = relay.transform.Inline()(mod)
assert tvm.ir.structural_equal(mod, expected(), map_free_vars=True)
def test_recursive_call_with_global():
def get_mod():
mod = tvm.IRModule({})
x = relay.var("x", shape=[], dtype="int32")
fn0 = relay.Function([x], x)
fn0 = fn0.with_attr("Inline", tvm.tir.IntImm("int32", 1))
gx = relay.GlobalVar("gx")
mod[gx] = fn0
sum_up = relay.GlobalVar("sum_up")
i = relay.var("i", shape=[], dtype="int32")
sb = relay.ScopeBuilder()
with sb.if_scope(relay.equal(i, relay.const(0, dtype="int32"))):
sb.ret(i)
with sb.else_scope():
one_less = relay.subtract(i, relay.const(1, dtype="int32"))
global_call = gx(i)
rec_call = relay.Call(sum_up, [one_less]) + global_call
sb.ret(relay.add(rec_call, i))
func = relay.Function([i], sb.get(), ret_type=relay.TensorType([], "int32"))
func = func.with_attr("Inline", tvm.tir.IntImm("int32", 1))
mod[sum_up] = func
iarg = relay.var("i", shape=[], dtype="int32")
mod["main"] = relay.Function([iarg], sum_up(iarg))
return mod
def expected():
mod = tvm.IRModule({})
sum_up = relay.GlobalVar("sum_up")
i = relay.var("i", shape=[], dtype="int32")
sb = relay.ScopeBuilder()
with sb.if_scope(relay.equal(i, relay.const(0, dtype="int32"))):
sb.ret(i)
with sb.else_scope():
one_less = relay.subtract(i, relay.const(1, dtype="int32"))
rec_call = relay.Call(sum_up, [one_less]) + i
sb.ret(relay.add(rec_call, i))
func = relay.Function([i], sb.get(), ret_type=relay.TensorType([], "int32"))
func = func.with_attr("Inline", tvm.tir.IntImm("int32", 1))
mod[sum_up] = func
iarg = relay.var("i", shape=[], dtype="int32")
mod["main"] = relay.Function([iarg], sum_up(iarg))
return mod
mod = get_mod()
mod = relay.transform.Inline()(mod)
assert tvm.ir.structural_equal(mod, expected(), map_free_vars=True)
def test_recursive_called():
mod, sum_up = get_recursive_count_loop()
iarg = relay.var("i", shape=[], dtype="int32")
mod["main"] = relay.Function([iarg], sum_up(iarg))
ref_mod = mod
mod = relay.transform.Inline()(mod)
assert tvm.ir.structural_equal(mod, ref_mod, map_free_vars=True)
def test_recursive_not_called():
def get_mod():
mod, sum_up = get_recursive_count_loop()
x = relay.var("x", shape=(2, 2))
y = relay.var("y", shape=(2, 2))
x1 = relay.var("x1", shape=(2, 2))
fn1 = relay.Function([x1], x1)
fn1 = fn1.with_attr("Inline", tvm.tir.IntImm("int32", 1))
g1 = relay.GlobalVar("g1")
mod[g1] = fn1
mod["main"] = relay.Function([x, y], x + y + g1(x))
return mod
def expected():
mod, sum_up = get_recursive_count_loop()
x = relay.var("x", shape=(2, 2))
y = relay.var("y", shape=(2, 2))
mod["main"] = relay.Function([x, y], x + y + x)
return mod
mod = get_mod()
mod = relay.transform.Inline()(mod)
ref_mod = expected()
assert tvm.ir.structural_equal(mod, ref_mod, map_free_vars=True)
def test_recursive_not_called_extern_compiler():
def get_mod():
mod, sum_up = get_recursive_count_loop()
x = relay.var("x", shape=(2, 2))
y = relay.var("y", shape=(2, 2))
x1 = relay.var("x1", shape=(2, 2))
fn1 = relay.Function([x1], x1)
fn1 = fn1.with_attr("Inline", tvm.tir.IntImm("int32", 1))
fn1 = fn1.with_attr("Compiler", "a")
g1 = relay.GlobalVar("g1")
mod[g1] = fn1
mod["main"] = relay.Function([x, y], x + y + g1(x))
return mod
def expected():
mod, sum_up = get_recursive_count_loop()
x = relay.var("x", shape=(2, 2))
y = relay.var("y", shape=(2, 2))
x1 = relay.var("x1", shape=(2, 2))
fn1 = relay.Function([x1], x1)
fn1 = fn1.with_attr("Inline", tvm.tir.IntImm("int32", 1))
fn1 = fn1.with_attr("Compiler", "a")
mod["main"] = relay.Function([x, y], x + y + fn1(x))
return mod
mod = get_mod()
mod = relay.transform.Inline()(mod)
ref_mod = expected()
assert tvm.ir.structural_equal(mod, ref_mod, map_free_vars=True)
def test_globalvar_as_call_arg():
def get_mod():
mod = tvm.IRModule({})
x1 = relay.var("x1", shape=(3, 5))
y1 = relay.var("y1", shape=(3, 5))
sb = relay.ScopeBuilder()
sb.ret(x1 + y1)
fn1 = relay.Function([x1, y1], sb.get())
fn1 = fn1.with_attr("Inline", tvm.tir.IntImm("int32", 1))
g1 = relay.GlobalVar("g1")
mod[g1] = fn1
x2 = relay.var("x2", shape=(3, 5))
y2 = relay.var("y2", shape=(3, 5))
sb1 = relay.ScopeBuilder()
sb1.ret(x2 - y2)
fn2 = relay.Function([x2, y2], sb1.get())
fn2 = fn2.with_attr("Inline", tvm.tir.IntImm("int32", 1))
g2 = relay.GlobalVar("g2")
mod[g2] = fn2
p0 = relay.var("p0", shape=(3, 5))
p1 = relay.var("p1", shape=(3, 5))
p2 = relay.var("p2", shape=(3, 5))
p3 = relay.var("p3", shape=(3, 5))
call_fn1 = g1(p0, p1)
call_fn2 = g2(p2, p3)
mod["main"] = relay.Function([p0, p1, p2, p3], call_fn1 * call_fn2)
return mod
def expected():
p0 = relay.var("p0", shape=(3, 5))
p1 = relay.var("p1", shape=(3, 5))
p2 = relay.var("p2", shape=(3, 5))
p3 = relay.var("p3", shape=(3, 5))
call_fn1 = p0 + p1
call_fn2 = p2 - p3
mod["main"] = relay.Function([p0, p1, p2, p3], call_fn1 * call_fn2)
return mod
mod = get_mod()
mod = relay.transform.Inline()(mod)
assert tvm.ir.structural_equal(mod, expected(), map_free_vars=True)
def test_globalvar_as_call_arg_extern_compiler():
def get_mod():
mod = tvm.IRModule({})
x1 = relay.var("x1", shape=(3, 5))
y1 = relay.var("y1", shape=(3, 5))
sb = relay.ScopeBuilder()
sb.ret(x1 + y1)
fn1 = relay.Function([x1, y1], sb.get())
fn1 = fn1.with_attr("Inline", tvm.tir.IntImm("int32", 1))
fn1 = fn1.with_attr("Compiler", "a")
g1 = relay.GlobalVar("g1")
mod[g1] = fn1
x2 = relay.var("x2", shape=(3, 5))
y2 = relay.var("y2", shape=(3, 5))
sb1 = relay.ScopeBuilder()
sb1.ret(x2 - y2)
fn2 = relay.Function([x2, y2], sb1.get())
fn2 = fn2.with_attr("Inline", tvm.tir.IntImm("int32", 1))
fn2 = fn2.with_attr("Compiler", "b")
g2 = relay.GlobalVar("g2")
mod[g2] = fn2
p0 = relay.var("p0", shape=(3, 5))
p1 = relay.var("p1", shape=(3, 5))
p2 = relay.var("p2", shape=(3, 5))
p3 = relay.var("p3", shape=(3, 5))
call_fn1 = g1(p0, p1)
call_fn2 = g2(p2, p3)
mod["main"] = relay.Function([p0, p1, p2, p3], call_fn1 * call_fn2)
return mod
def expected():
mod = tvm.IRModule({})
x1 = relay.var("x1", shape=(3, 5))
y1 = relay.var("y1", shape=(3, 5))
sb = relay.ScopeBuilder()
sb.ret(x1 + y1)
fn1 = relay.Function([x1, y1], sb.get())
fn1 = fn1.with_attr("Inline", tvm.tir.IntImm("int32", 1))
fn1 = fn1.with_attr("Compiler", "a")
x2 = relay.var("x2", shape=(3, 5))
y2 = relay.var("y2", shape=(3, 5))
sb1 = relay.ScopeBuilder()
sb1.ret(x2 - y2)
fn2 = relay.Function([x2, y2], sb1.get())
fn2 = fn2.with_attr("Inline", tvm.tir.IntImm("int32", 1))
fn2 = fn2.with_attr("Compiler", "b")
p0 = relay.var("p0", shape=(3, 5))
p1 = relay.var("p1", shape=(3, 5))
p2 = relay.var("p2", shape=(3, 5))
p3 = relay.var("p3", shape=(3, 5))
call_fn1 = relay.Call(fn1, [p0, p1])
call_fn2 = relay.Call(fn2, [p2, p3])
mod["main"] = relay.Function([p0, p1, p2, p3], call_fn1 * call_fn2)
return mod
mod = get_mod()
mod = relay.transform.Inline()(mod)
assert tvm.ir.structural_equal(mod, expected(), map_free_vars=True)
def test_inline_globalvar_without_args():
def get_mod():
mod = tvm.IRModule({})
fn1 = relay.Function([], relay.const(1))
fn1 = fn1.with_attr("Inline", tvm.tir.IntImm("int32", 1))
fn2 = relay.Function([], relay.const(2))
fn2 = fn2.with_attr("Inline", tvm.tir.IntImm("int32", 1))
g1 = relay.GlobalVar("g1")
g2 = relay.GlobalVar("g2")
mod[g1] = fn1
mod = relay.transform.InferType()(mod)
mod[g2] = fn2
p = relay.var("p", "bool")
mod["main"] = relay.Function([p], relay.Call(relay.If(p, g1, g2), []))
return relay.transform.InferType()(mod)
def expected():
mod = tvm.IRModule({})
fn1 = relay.Function([], relay.const(1))
fn1 = fn1.with_attr("Inline", tvm.tir.IntImm("int32", 1))
fn2 = relay.Function([], relay.const(2))
fn2 = fn2.with_attr("Inline", tvm.tir.IntImm("int32", 1))
p = relay.var("p", "bool")
mod["main"] = relay.Function([p], relay.Call(relay.If(p, fn1, fn2), []))
return relay.transform.InferType()(mod)
mod = get_mod()
mod = relay.transform.Inline()(mod)
assert tvm.ir.structural_equal(mod, expected(), map_free_vars=True)
def test_inline_globalvar_without_args_extern_compiler():
def get_mod():
mod = tvm.IRModule({})
fn1 = relay.Function([], relay.const(1))
fn1 = fn1.with_attr("Inline", tvm.tir.IntImm("int32", 1))
fn1 = fn1.with_attr("Compiler", "a")
fn2 = relay.Function([], relay.const(2))
fn2 = fn2.with_attr("Inline", tvm.tir.IntImm("int32", 1))
fn2 = fn2.with_attr("Compiler", "b")
g1 = relay.GlobalVar("g1")
g2 = relay.GlobalVar("g2")
mod[g1] = fn1
mod[g2] = fn2
p = relay.var("p", "bool")
mod["main"] = relay.Function([p], relay.Call(relay.If(p, g1, g2), []))
return mod
def expected():
mod = tvm.IRModule({})
fn1 = relay.Function([], relay.const(1))
fn1 = fn1.with_attr("Inline", tvm.tir.IntImm("int32", 1))
fn1 = fn1.with_attr("Compiler", "a")
fn2 = relay.Function([], relay.const(2))
fn2 = fn2.with_attr("Inline", tvm.tir.IntImm("int32", 1))
fn2 = fn2.with_attr("Compiler", "b")
p = relay.var("p", "bool")
mod["main"] = relay.Function([p], relay.Call(relay.If(p, fn1, fn2), []))
return mod
mod = get_mod()
mod = relay.transform.Inline()(mod)
assert tvm.ir.structural_equal(mod, expected(), map_free_vars=True)
def test_globalvar_called_by_multiple_functions():
"""Test when only leaf call is inlined.
The call graph is like the following:
main g0
/ \ /
g1 g2(inline)
"""
def get_mod():
mod = tvm.IRModule({})
x1 = relay.var("x1", shape=(3, 5))
y1 = relay.var("y1", shape=(3, 5))
sb = relay.ScopeBuilder()
sb.ret(x1 + y1)
fn1 = relay.Function([x1, y1], sb.get())
g1 = relay.GlobalVar("g1")
mod[g1] = fn1
x2 = relay.var("x2", shape=(3, 5))
y2 = relay.var("y2", shape=(3, 5))
sb1 = relay.ScopeBuilder()
sb1.ret(x2 - y2)
fn2 = relay.Function([x2, y2], sb1.get())
fn2 = fn2.with_attr("Inline", tvm.tir.IntImm("int32", 1))
g2 = relay.GlobalVar("g2")
mod[g2] = fn2
x0 = relay.var("x0", shape=(3, 5))
y0 = relay.var("y0", shape=(3, 5))
z0 = relay.var("z0", shape=(3, 5))
fn0 = relay.Function([x0, y0, z0], g2(x0, y0) + z0)
g0 = relay.GlobalVar("g0")
mod[g0] = fn0
p0 = relay.var("p0", shape=(3, 5))
p1 = relay.var("p1", shape=(3, 5))
p2 = relay.var("p2", shape=(3, 5))
p3 = relay.var("p3", shape=(3, 5))
call_fn1 = g1(p0, p1)
call_fn2 = g2(p2, p3)
mod["main"] = relay.Function([p0, p1, p2, p3], call_fn1 * call_fn2)
return mod
def expected():
mod = tvm.IRModule({})
x1 = relay.var("x1", shape=(3, 5))
y1 = relay.var("y1", shape=(3, 5))
sb = relay.ScopeBuilder()
sb.ret(x1 + y1)
fn1 = relay.Function([x1, y1], sb.get())
g1 = relay.GlobalVar("g1")
mod[g1] = fn1
p0 = relay.var("p0", shape=(3, 5))
p1 = relay.var("p1", shape=(3, 5))
p2 = relay.var("p2", shape=(3, 5))
p3 = relay.var("p3", shape=(3, 5))
call_fn2 = p2 - p3
mod["main"] = relay.Function([p0, p1, p2, p3], g1(p0, p1) * call_fn2)
x0 = relay.var("x0", shape=(3, 5))
y0 = relay.var("y0", shape=(3, 5))
z0 = relay.var("z0", shape=(3, 5))
fn0 = relay.Function([x0, y0, z0], x0 - y0 + z0)
g0 = relay.GlobalVar("g0")
mod[g0] = fn0
return mod
mod = get_mod()
mod = relay.transform.Inline()(mod)
assert tvm.ir.structural_equal(mod, expected(), map_free_vars=True)
def test_entry_with_inline():
"""Test entry function with inline
The call graph is like the following:
g1(inline) g2(inline)
"""
def get_mod():
mod = tvm.IRModule({})
x1 = relay.var("x1", shape=(3, 5))
y1 = relay.var("y1", shape=(3, 5))
fn1 = relay.Function([x1, y1], x1 + y1)
fn1 = fn1.with_attr("Inline", tvm.tir.IntImm("int32", 1))
g1 = relay.GlobalVar("g1")
mod[g1] = fn1
x2 = relay.var("x2", shape=(3, 5))
y2 = relay.var("y2", shape=(3, 5))
fn2 = relay.Function([x2, y2], x2 - y2)
fn2 = fn2.with_attr("Inline", tvm.tir.IntImm("int32", 1))
g2 = relay.GlobalVar("g2")
mod[g2] = fn2
return mod
mod = get_mod()
mod = relay.transform.Inline()(mod)
assert tvm.ir.structural_equal(mod, get_mod(), map_free_vars=True)
def test_callee_not_inline():
"""Test entry function with inline
The call graph is like the following:
main
|
g2(inline)
|
g1
"""
def get_mod():
mod = tvm.IRModule({})
x1 = relay.var("x1", shape=(3, 5))
y1 = relay.var("y1", shape=(3, 5))
fn1 = relay.Function([x1, y1], x1 + y1)
g1 = relay.GlobalVar("g1")
mod[g1] = fn1
x2 = relay.var("x2", shape=(3, 5))
y2 = relay.var("y2", shape=(3, 5))
fn2 = relay.Function([x2, y2], x2 - g1(x2, y2))
fn2 = fn2.with_attr("Inline", tvm.tir.IntImm("int32", 1))
g2 = relay.GlobalVar("g2")
mod[g2] = fn2
return mod
mod = get_mod()
mod = relay.transform.Inline()(mod)
assert tvm.ir.structural_equal(mod, get_mod(), map_free_vars=True)
def test_callee_not_inline_leaf_inline():
"""Test entry function with inline
The call graph is like the following:
main
|
g2(inline)
|
g1
|
g0(inline)
"""
def get_mod():
mod = tvm.IRModule({})
x0 = relay.var("x0", shape=(3, 5))
y0 = relay.var("y0", shape=(3, 5))
fn0 = relay.Function([x0, y0], x0 * y0)
fn0 = fn0.with_attr("Inline", tvm.tir.IntImm("int32", 1))
g0 = relay.GlobalVar("g0")
mod[g0] = fn0
x1 = relay.var("x1", shape=(3, 5))
y1 = relay.var("y1", shape=(3, 5))
fn1 = relay.Function([x1, y1], x1 + g0(x1, y1))
g1 = relay.GlobalVar("g1")
mod[g1] = fn1
x2 = relay.var("x2", shape=(3, 5))
y2 = relay.var("y2", shape=(3, 5))
fn2 = relay.Function([x2, y2], x2 - g1(x2, y2))
fn2 = fn2.with_attr("Inline", tvm.tir.IntImm("int32", 1))
g2 = relay.GlobalVar("g2")
mod[g2] = fn2
return mod
def expected():
mod = tvm.IRModule({})
x1 = relay.var("x1", shape=(3, 5))
y1 = relay.var("y1", shape=(3, 5))
fn1 = relay.Function([x1, y1], x1 + x1 * y1)
g1 = relay.GlobalVar("g1")
mod[g1] = fn1
x2 = relay.var("x2", shape=(3, 5))
y2 = relay.var("y2", shape=(3, 5))
fn2 = relay.Function([x2, y2], x2 - g1(x2, y2))
fn2 = fn2.with_attr("Inline", tvm.tir.IntImm("int32", 1))
g2 = relay.GlobalVar("g2")
mod[g2] = fn2
return mod
mod = get_mod()
mod = relay.transform.Inline()(mod)
assert tvm.ir.structural_equal(mod, expected(), map_free_vars=True)
def test_callee_not_inline_leaf_inline_extern_compiler():
"""Test entry function with inline
The call graph is like the following:
main
|
g2(inline)
|
g1
|
g0(inline, external compiler)
"""
def get_mod():
mod = tvm.IRModule({})
x0 = relay.var("x0", shape=(3, 5))
y0 = relay.var("y0", shape=(3, 5))
fn0 = relay.Function([x0, y0], x0 * y0)
fn0 = fn0.with_attr("Inline", tvm.tir.IntImm("int32", 1))
fn0 = fn0.with_attr("Compiler", "aa")
g0 = relay.GlobalVar("g0")
mod[g0] = fn0
x1 = relay.var("x1", shape=(3, 5))
y1 = relay.var("y1", shape=(3, 5))
fn1 = relay.Function([x1, y1], x1 + g0(x1, y1))
g1 = relay.GlobalVar("g1")
mod[g1] = fn1
x2 = relay.var("x2", shape=(3, 5))
y2 = relay.var("y2", shape=(3, 5))
fn2 = relay.Function([x2, y2], x2 - g1(x2, y2))
fn2 = fn2.with_attr("Inline", tvm.tir.IntImm("int32", 1))
g2 = relay.GlobalVar("g2")
mod[g2] = fn2
return mod
def expected():
mod = tvm.IRModule({})
x0 = relay.var("x0", shape=(3, 5))
y0 = relay.var("y0", shape=(3, 5))
fn0 = relay.Function([x0, y0], x0 * y0)
fn0 = fn0.with_attr("Inline", tvm.tir.IntImm("int32", 1))
fn0 = fn0.with_attr("Compiler", "aa")
x1 = relay.var("x1", shape=(3, 5))
y1 = relay.var("y1", shape=(3, 5))
fn1 = relay.Function([x1, y1], x1 + fn0(x1, y1))
g1 = relay.GlobalVar("g1")
mod[g1] = fn1
x2 = relay.var("x2", shape=(3, 5))
y2 = relay.var("y2", shape=(3, 5))
fn2 = relay.Function([x2, y2], x2 - g1(x2, y2))
fn2 = fn2.with_attr("Inline", tvm.tir.IntImm("int32", 1))
g2 = relay.GlobalVar("g2")
mod[g2] = fn2
return mod
mod = get_mod()
mod = relay.transform.Inline()(mod)
assert tvm.ir.structural_equal(mod, expected(), map_free_vars=True)
if __name__ == "__main__":
pytest.main()
| 33.067389
| 84
| 0.526584
| 3,858
| 27,479
| 3.66563
| 0.04873
| 0.084288
| 0.062367
| 0.04568
| 0.933248
| 0.924834
| 0.920945
| 0.918116
| 0.910762
| 0.9085
| 0
| 0.076478
| 0.294807
| 27,479
| 830
| 85
| 33.107229
| 0.653318
| 0.081153
| 0
| 0.924959
| 0
| 0
| 0.045651
| 0
| 0
| 0
| 0
| 0
| 0.026101
| 1
| 0.073409
| false
| 0
| 0.003263
| 0
| 0.12398
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fb892a79684a152497b7744be235db1f8f1bf922
| 1,528
|
py
|
Python
|
run.py
|
pradeepkr12/kaggle-2014-criteo
|
76a8a9ed586552c2fd492e24e1a70eee97ed7da8
|
[
"Apache-2.0"
] | 1
|
2015-02-13T08:53:28.000Z
|
2015-02-13T08:53:28.000Z
|
run.py
|
pradeepkr12/kaggle-2014-criteo
|
76a8a9ed586552c2fd492e24e1a70eee97ed7da8
|
[
"Apache-2.0"
] | null | null | null |
run.py
|
pradeepkr12/kaggle-2014-criteo
|
76a8a9ed586552c2fd492e24e1a70eee97ed7da8
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
import subprocess, sys, os, time
NR_THREAD = 1
start = time.time()
cmd = 'converters/parallelizer-a.py -s {nr_thread} converters/pre-a.py tr.csv tr.gbdt.dense tr.gbdt.sparse'.format(nr_thread=NR_THREAD)
subprocess.call(cmd, shell=True)
cmd = 'converters/parallelizer-a.py -s {nr_thread} converters/pre-a.py te.csv te.gbdt.dense te.gbdt.sparse'.format(nr_thread=NR_THREAD)
subprocess.call(cmd, shell=True)
cmd = './gbdt -t 30 -s {nr_thread} te.gbdt.dense te.gbdt.sparse tr.gbdt.dense tr.gbdt.sparse te.gbdt.out tr.gbdt.out'.format(nr_thread=NR_THREAD)
subprocess.call(cmd, shell=True)
cmd = 'rm -f te.gbdt.dense te.gbdt.sparse tr.gbdt.dense tr.gbdt.sparse'
subprocess.call(cmd, shell=True)
cmd = 'converters/parallelizer-b.py -s {nr_thread} converters/pre-b.py tr.csv tr.gbdt.out tr.fm'.format(nr_thread=NR_THREAD)
subprocess.call(cmd, shell=True)
cmd = 'converters/parallelizer-b.py -s {nr_thread} converters/pre-b.py te.csv te.gbdt.out te.fm'.format(nr_thread=NR_THREAD)
subprocess.call(cmd, shell=True)
cmd = 'rm -f te.gbdt.out tr.gbdt.out'
subprocess.call(cmd, shell=True)
cmd = './fm -k 4 -t 11 -s {nr_thread} te.fm tr.fm'.format(nr_thread=NR_THREAD)
subprocess.call(cmd, shell=True)
cmd = './utils/calibrate.py te.fm.out te.fm.out.cal'.format(nr_thread=NR_THREAD)
subprocess.call(cmd, shell=True)
cmd = './utils/make_submission.py te.fm.out.cal submission.csv'.format(nr_thread=NR_THREAD)
subprocess.call(cmd, shell=True)
print('time used = {0:.0f}'.format(time.time()-start))
| 38.2
| 146
| 0.740183
| 276
| 1,528
| 4.01087
| 0.177536
| 0.166215
| 0.153568
| 0.198735
| 0.811201
| 0.782294
| 0.70551
| 0.70551
| 0.70551
| 0.70551
| 0
| 0.006503
| 0.094241
| 1,528
| 39
| 147
| 39.179487
| 0.793353
| 0.013743
| 0
| 0.416667
| 0
| 0.25
| 0.488048
| 0.091633
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.041667
| 0
| 0.041667
| 0.041667
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fba8b9060655e271d8120e0fb361aa9af8ee05bf
| 37
|
py
|
Python
|
cardbot/__init__.py
|
janvanzeghbroeck/cardbot
|
c3b6c2f120acfb08529b4c5a1224cc7fe1960bdb
|
[
"MIT"
] | null | null | null |
cardbot/__init__.py
|
janvanzeghbroeck/cardbot
|
c3b6c2f120acfb08529b4c5a1224cc7fe1960bdb
|
[
"MIT"
] | null | null | null |
cardbot/__init__.py
|
janvanzeghbroeck/cardbot
|
c3b6c2f120acfb08529b4c5a1224cc7fe1960bdb
|
[
"MIT"
] | null | null | null |
def simple_function():
return 2
| 9.25
| 22
| 0.675676
| 5
| 37
| 4.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035714
| 0.243243
| 37
| 3
| 23
| 12.333333
| 0.821429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
fbc19904faa19a6d69e958b32063a957bd018021
| 10,088
|
py
|
Python
|
tests/components/group/test_binary_sensor.py
|
orcema/core
|
ce144bf63145813c76fbbe4f9423341764695057
|
[
"Apache-2.0"
] | null | null | null |
tests/components/group/test_binary_sensor.py
|
orcema/core
|
ce144bf63145813c76fbbe4f9423341764695057
|
[
"Apache-2.0"
] | null | null | null |
tests/components/group/test_binary_sensor.py
|
orcema/core
|
ce144bf63145813c76fbbe4f9423341764695057
|
[
"Apache-2.0"
] | null | null | null |
"""The tests for the Group Binary Sensor platform."""
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN
from homeassistant.components.group import DOMAIN
from homeassistant.const import (
ATTR_ENTITY_ID,
STATE_OFF,
STATE_ON,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
)
from homeassistant.helpers import entity_registry as er
from homeassistant.setup import async_setup_component
async def test_default_state(hass):
"""Test binary_sensor group default state."""
hass.states.async_set("binary_sensor.kitchen", "on")
hass.states.async_set("binary_sensor.bedroom", "on")
await async_setup_component(
hass,
BINARY_SENSOR_DOMAIN,
{
BINARY_SENSOR_DOMAIN: {
"platform": DOMAIN,
"entities": ["binary_sensor.kitchen", "binary_sensor.bedroom"],
"name": "Bedroom Group",
"unique_id": "unique_identifier",
"device_class": "presence",
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.bedroom_group")
assert state is not None
assert state.state == STATE_ON
assert state.attributes.get(ATTR_ENTITY_ID) == [
"binary_sensor.kitchen",
"binary_sensor.bedroom",
]
entity_registry = er.async_get(hass)
entry = entity_registry.async_get("binary_sensor.bedroom_group")
assert entry
assert entry.unique_id == "unique_identifier"
assert entry.original_name == "Bedroom Group"
assert entry.original_device_class == "presence"
async def test_state_reporting_all(hass):
"""Test the state reporting in 'all' mode.
The group state is unavailable if all group members are unavailable.
Otherwise, the group state is unknown if at least one group member is unknown or unavailable.
Otherwise, the group state is off if at least one group member is off.
Otherwise, the group state is on.
"""
await async_setup_component(
hass,
BINARY_SENSOR_DOMAIN,
{
BINARY_SENSOR_DOMAIN: {
"platform": DOMAIN,
"entities": ["binary_sensor.test1", "binary_sensor.test2"],
"name": "Binary Sensor Group",
"device_class": "presence",
"all": "true",
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
# Initial state with no group member in the state machine -> unavailable
assert (
hass.states.get("binary_sensor.binary_sensor_group").state == STATE_UNAVAILABLE
)
# All group members unavailable -> unavailable
hass.states.async_set("binary_sensor.test1", STATE_UNAVAILABLE)
hass.states.async_set("binary_sensor.test2", STATE_UNAVAILABLE)
await hass.async_block_till_done()
assert (
hass.states.get("binary_sensor.binary_sensor_group").state == STATE_UNAVAILABLE
)
# At least one member unknown or unavailable -> group unknown
hass.states.async_set("binary_sensor.test1", STATE_ON)
hass.states.async_set("binary_sensor.test2", STATE_UNAVAILABLE)
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.binary_sensor_group").state == STATE_UNKNOWN
hass.states.async_set("binary_sensor.test1", STATE_ON)
hass.states.async_set("binary_sensor.test2", STATE_UNKNOWN)
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.binary_sensor_group").state == STATE_UNKNOWN
hass.states.async_set("binary_sensor.test1", STATE_UNKNOWN)
hass.states.async_set("binary_sensor.test2", STATE_UNKNOWN)
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.binary_sensor_group").state == STATE_UNKNOWN
hass.states.async_set("binary_sensor.test1", STATE_OFF)
hass.states.async_set("binary_sensor.test2", STATE_UNAVAILABLE)
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.binary_sensor_group").state == STATE_UNKNOWN
hass.states.async_set("binary_sensor.test1", STATE_OFF)
hass.states.async_set("binary_sensor.test2", STATE_UNKNOWN)
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.binary_sensor_group").state == STATE_UNKNOWN
hass.states.async_set("binary_sensor.test1", STATE_UNKNOWN)
hass.states.async_set("binary_sensor.test2", STATE_UNAVAILABLE)
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.binary_sensor_group").state == STATE_UNKNOWN
# At least one member off -> group off
hass.states.async_set("binary_sensor.test1", STATE_ON)
hass.states.async_set("binary_sensor.test2", STATE_OFF)
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.binary_sensor_group").state == STATE_OFF
hass.states.async_set("binary_sensor.test1", STATE_OFF)
hass.states.async_set("binary_sensor.test2", STATE_OFF)
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.binary_sensor_group").state == STATE_OFF
# Otherwise -> on
hass.states.async_set("binary_sensor.test1", STATE_ON)
hass.states.async_set("binary_sensor.test2", STATE_ON)
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.binary_sensor_group").state == STATE_ON
# All group members removed from the state machine -> unavailable
hass.states.async_remove("binary_sensor.test1")
hass.states.async_remove("binary_sensor.test2")
await hass.async_block_till_done()
assert (
hass.states.get("binary_sensor.binary_sensor_group").state == STATE_UNAVAILABLE
)
async def test_state_reporting_any(hass):
"""Test the state reporting in 'any' mode.
The group state is unavailable if all group members are unavailable.
Otherwise, the group state is unknown if all group members are unknown.
Otherwise, the group state is on if at least one group member is on.
Otherwise, the group state is off.
"""
await async_setup_component(
hass,
BINARY_SENSOR_DOMAIN,
{
BINARY_SENSOR_DOMAIN: {
"platform": DOMAIN,
"entities": ["binary_sensor.test1", "binary_sensor.test2"],
"name": "Binary Sensor Group",
"device_class": "presence",
"all": "false",
"unique_id": "unique_identifier",
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
entity_registry = er.async_get(hass)
entry = entity_registry.async_get("binary_sensor.binary_sensor_group")
assert entry
assert entry.unique_id == "unique_identifier"
# Initial state with no group member in the state machine -> unavailable
assert (
hass.states.get("binary_sensor.binary_sensor_group").state == STATE_UNAVAILABLE
)
# All group members unavailable -> unavailable
hass.states.async_set("binary_sensor.test1", STATE_UNAVAILABLE)
hass.states.async_set("binary_sensor.test2", STATE_UNAVAILABLE)
await hass.async_block_till_done()
assert (
hass.states.get("binary_sensor.binary_sensor_group").state == STATE_UNAVAILABLE
)
# All group members unknown -> unknown
hass.states.async_set("binary_sensor.test1", STATE_UNKNOWN)
hass.states.async_set("binary_sensor.test2", STATE_UNKNOWN)
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.binary_sensor_group").state == STATE_UNKNOWN
# Group members unknown or unavailable -> unknown
hass.states.async_set("binary_sensor.test1", STATE_UNKNOWN)
hass.states.async_set("binary_sensor.test2", STATE_UNAVAILABLE)
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.binary_sensor_group").state == STATE_UNKNOWN
# At least one member on -> group on
hass.states.async_set("binary_sensor.test1", STATE_ON)
hass.states.async_set("binary_sensor.test2", STATE_UNAVAILABLE)
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.binary_sensor_group").state == STATE_ON
hass.states.async_set("binary_sensor.test1", STATE_ON)
hass.states.async_set("binary_sensor.test2", STATE_OFF)
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.binary_sensor_group").state == STATE_ON
hass.states.async_set("binary_sensor.test1", STATE_ON)
hass.states.async_set("binary_sensor.test2", STATE_ON)
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.binary_sensor_group").state == STATE_ON
hass.states.async_set("binary_sensor.test1", STATE_ON)
hass.states.async_set("binary_sensor.test2", STATE_UNKNOWN)
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.binary_sensor_group").state == STATE_ON
# Otherwise -> off
hass.states.async_set("binary_sensor.test1", STATE_OFF)
hass.states.async_set("binary_sensor.test2", STATE_OFF)
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.binary_sensor_group").state == STATE_OFF
hass.states.async_set("binary_sensor.test1", STATE_UNKNOWN)
hass.states.async_set("binary_sensor.test2", STATE_OFF)
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.binary_sensor_group").state == STATE_OFF
hass.states.async_set("binary_sensor.test1", STATE_UNAVAILABLE)
hass.states.async_set("binary_sensor.test2", STATE_OFF)
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.binary_sensor_group").state == STATE_OFF
# All group members removed from the state machine -> unavailable
hass.states.async_remove("binary_sensor.test1")
hass.states.async_remove("binary_sensor.test2")
await hass.async_block_till_done()
assert (
hass.states.get("binary_sensor.binary_sensor_group").state == STATE_UNAVAILABLE
)
| 40.842105
| 97
| 0.718279
| 1,327
| 10,088
| 5.163527
| 0.060286
| 0.206655
| 0.100701
| 0.110333
| 0.88383
| 0.872154
| 0.826182
| 0.810858
| 0.810858
| 0.795534
| 0
| 0.005799
| 0.17952
| 10,088
| 246
| 98
| 41.00813
| 0.822037
| 0.065226
| 0
| 0.733696
| 0
| 0
| 0.253899
| 0.115252
| 0
| 0
| 0
| 0
| 0.179348
| 1
| 0
| false
| 0
| 0.027174
| 0
| 0.027174
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
83ca168e930838bbefd913fb2ede5d9e631c5e56
| 3,975
|
py
|
Python
|
q_learn_snake.py
|
dsjohns2/Snake-AI
|
7e18abcea4abcf25089eaec5fdf140acd558ed6b
|
[
"MIT"
] | null | null | null |
q_learn_snake.py
|
dsjohns2/Snake-AI
|
7e18abcea4abcf25089eaec5fdf140acd558ed6b
|
[
"MIT"
] | null | null | null |
q_learn_snake.py
|
dsjohns2/Snake-AI
|
7e18abcea4abcf25089eaec5fdf140acd558ed6b
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
import random
import time
import numpy as np
dimensions = (5, 5)
q_table = np.zeros((dimensions[0], dimensions[1], dimensions[0], dimensions[1], 4))
for episode in range(0, 100000):
print(episode)
snake_location = [(random.randint(0, dimensions[0]-1), random.randint(0, dimensions[1]-1))]
free_space = []
for i in range(dimensions[0]):
for j in range(dimensions[1]):
free_space.append((i, j))
free_space.remove(snake_location[0])
alive = True
eaten = True
score = 0
while(alive):
# Spawn ball if needed
if(eaten):
ball_location = free_space[random.randint(0, len(free_space)-1)]
free_space.remove(ball_location)
eaten = False
# Move Snake
initial_state = snake_location[0]
direction = random.randint(0, 3)
if(direction == 0):
snake_location = [(snake_location[0][0]-1, snake_location[0][1])] + snake_location
elif(direction == 1):
snake_location = [(snake_location[0][0], snake_location[0][1]+1)] + snake_location
elif(direction == 2):
snake_location = [(snake_location[0][0]+1, snake_location[0][1])] + snake_location
else:
snake_location = [(snake_location[0][0], snake_location[0][1]-1)] + snake_location
free_space.append(snake_location[-1])
del(snake_location[-1])
new_state = snake_location[0]
# Check if died or eaten
if(snake_location[0] in free_space):
free_space.remove(snake_location[0])
reward = -1
elif(snake_location[0] == ball_location):
eaten = True
reward = 10
score += 100
else:
reward = -100
print("Dead")
print()
print("Final Score: " + str(score))
alive = False
lr = .5
dv = .5
action = direction
if(reward == -100):
q_table[initial_state[0]][initial_state[1]][ball_location[0]][ball_location[1]][action] = (1 - lr) * q_table[initial_state[0]][initial_state[1]][ball_location[0]][ball_location[1]][action] + lr * (reward)
else:
q_table[initial_state[0]][initial_state[1]][ball_location[0]][ball_location[1]][action] = (1 - lr) * q_table[initial_state[0]][initial_state[1]][ball_location[0]][ball_location[1]][action] + lr * (reward + dv * np.amax(q_table[new_state[0]][new_state[1]][ball_location[0]][ball_location[1]]))
snake_location = [(random.randint(0, dimensions[0]-1), random.randint(0, dimensions[1]-1))]
free_space = []
for i in range(dimensions[0]):
for j in range(dimensions[1]):
free_space.append((i, j))
free_space.remove(snake_location[0])
alive = True
eaten = True
score = 0
while(alive):
# Spawn ball if needed
if(eaten):
ball_location = free_space[random.randint(0, len(free_space)-1)]
free_space.remove(ball_location)
eaten = False
# Print game
for i in range(dimensions[1]+2):
print("-", end='')
print()
for i in range(dimensions[0]):
print("|", end='')
for j in range(dimensions[1]):
space = (i, j)
if(ball_location == space):
print("0", end='')
elif(space in free_space):
print(" ", end='')
else:
print("X", end='')
print("|", end='')
print()
for i in range(dimensions[1]+2):
print("-", end='')
print()
print(score)
# Move Snake
direction = np.argmax(q_table[snake_location[0][0]][snake_location[0][1]][ball_location[0]][ball_location[1]])
if(direction == 0):
snake_location = [(snake_location[0][0]-1, snake_location[0][1])] + snake_location
elif(direction == 1):
snake_location = [(snake_location[0][0], snake_location[0][1]+1)] + snake_location
elif(direction == 2):
snake_location = [(snake_location[0][0]+1, snake_location[0][1])] + snake_location
else:
snake_location = [(snake_location[0][0], snake_location[0][1]-1)] + snake_location
free_space.append(snake_location[-1])
del(snake_location[-1])
# Check if died or eaten
if(snake_location[0] in free_space):
free_space.remove(snake_location[0])
elif(snake_location[0] == ball_location):
eaten = True
score += 100
else:
print("Dead")
print()
print("Final Score: " + str(score))
alive = False
time.sleep(.5)
| 29.887218
| 295
| 0.675472
| 605
| 3,975
| 4.252893
| 0.117355
| 0.252623
| 0.152351
| 0.052468
| 0.802565
| 0.802565
| 0.793626
| 0.783521
| 0.715896
| 0.715896
| 0
| 0.044701
| 0.150189
| 3,975
| 132
| 296
| 30.113636
| 0.716992
| 0.030189
| 0
| 0.736364
| 0
| 0
| 0.010658
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.036364
| 0
| 0.036364
| 0.172727
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
83cdc2022e2fbc18724618522c8781f4ef435b56
| 2,244
|
py
|
Python
|
mingla/test_bot.py
|
svt/mingla
|
a3433b29a9db8ce13eb401d36ce073cf2464d330
|
[
"MIT"
] | 2
|
2021-12-30T10:14:23.000Z
|
2022-01-08T13:26:39.000Z
|
mingla/test_bot.py
|
svt/mingla
|
a3433b29a9db8ce13eb401d36ce073cf2464d330
|
[
"MIT"
] | null | null | null |
mingla/test_bot.py
|
svt/mingla
|
a3433b29a9db8ce13eb401d36ce073cf2464d330
|
[
"MIT"
] | 4
|
2021-01-15T08:05:27.000Z
|
2022-03-11T13:22:53.000Z
|
from . import bot
def test_split_users_tree_users_three_rooms_size():
a = list(bot.split_users([1, 2, 3], 3))
b = [[1, 2, 3]]
assert a == b, "Split 3 users, room size 3"
def test_split_users_five_users_three_rooms_size():
a = list(bot.split_users([1, 2, 3, 4, 5], 3))
b = [[1, 2, 3], [4, 5]]
assert a == b, "Split 5 users, room size 3"
def test_split_users_twelve_users_three_rooms_size():
a = list(bot.split_users([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12], 3))
b = [[1, 2, 3], [4, 5, 6], [7, 8, 9], [10, 11, 12]]
assert a == b, "Split 12 users, room size 3"
def test_split_users_twelve_users_three_rooms_size():
a = list(bot.split_users([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12], 2))
b = [[1, 2], [3, 4], [5, 6], [7, 8], [9, 10], [11, 12]]
assert a == b, "Split 12 users, room size 2"
def test_split_users_two_users_three_rooms_size():
a = list(bot.split_users([1, 2], 3))
b = [[1, 2]]
assert a == b, "Split 2 users, room size 3"
def test_split_users_two_users_two_rooms_size():
a = list(bot.split_users([1, 2], 2))
b = [[1, 2]]
assert a == b, "Split 2 users, room size 2"
def test_split_users_tree_users_two_rooms_size():
a = list(bot.split_users([1, 2, 3], 2))
b = [[1, 2, 3]]
assert a == b, "Split 2 users, room size 2"
def test_split_users_four_users_tree_rooms_size():
a = list(bot.split_users([1, 2, 3, 4], 3))
b = [[1, 2, 3, 4]]
assert a == b, "Split 4 users, room size 3"
def test_split_users_five_users_tree_rooms_size():
a = list(bot.split_users([1, 2, 3, 4, 5], 3))
b = [[1, 2, 3], [4, 5]]
assert a == b, "Split 5 users, room size 3"
def test_split_users_six_users_tree_rooms_size():
a = list(bot.split_users([1, 2, 3, 4, 5, 6], 3))
b = [[1, 2, 3], [4, 5, 6]]
assert a == b, "Split 6 users, room size 3"
def test_split_users_seven_users_tree_rooms_size():
a = list(bot.split_users([1, 2, 3, 4, 5, 6, 7], 3))
b = [[1, 2, 3], [4, 5, 6, 7]]
assert a == b, "Split 7 users, room size 3"
def test_split_users_eight_users_tree_rooms_size():
a = list(bot.split_users([1, 2, 3, 4, 5, 6, 7, 8], 3))
b = [[1, 2, 3], [4, 5, 6], [7, 8]]
assert a == b, "Split 8 users, room size 3"
| 30.324324
| 73
| 0.584225
| 441
| 2,244
| 2.755102
| 0.079365
| 0.197531
| 0.051852
| 0.052675
| 0.916049
| 0.91358
| 0.883128
| 0.883128
| 0.778601
| 0.771193
| 0
| 0.108671
| 0.229055
| 2,244
| 73
| 74
| 30.739726
| 0.593642
| 0
| 0
| 0.285714
| 0
| 0
| 0.139929
| 0
| 0
| 0
| 0
| 0
| 0.244898
| 1
| 0.244898
| false
| 0
| 0.020408
| 0
| 0.265306
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f7bcf1ae6037b948f0a7c1643df6405f17059bb6
| 1,201
|
py
|
Python
|
mayan/apps/ocr/icons.py
|
sophiawa/Mayan-EDMS
|
42f20576d0c690b645a60bf53c5169cda4264231
|
[
"Apache-2.0"
] | 1
|
2021-02-24T15:03:23.000Z
|
2021-02-24T15:03:23.000Z
|
mayan/apps/ocr/icons.py
|
sophiawa/Mayan-EDMS
|
42f20576d0c690b645a60bf53c5169cda4264231
|
[
"Apache-2.0"
] | 10
|
2021-03-19T23:48:12.000Z
|
2022-03-12T00:41:49.000Z
|
mayan/apps/ocr/icons.py
|
sophiawa/Mayan-EDMS
|
42f20576d0c690b645a60bf53c5169cda4264231
|
[
"Apache-2.0"
] | 1
|
2021-04-30T09:44:14.000Z
|
2021-04-30T09:44:14.000Z
|
from mayan.apps.appearance.classes import Icon
icon_document_ocr_content = Icon(driver_name='fontawesome', symbol='font')
icon_document_ocr_content_delete = Icon(
driver_name='fontawesome-dual', primary_symbol='font',
secondary_symbol='times'
)
icon_document_page_ocr_content = Icon(driver_name='fontawesome', symbol='font')
icon_document_multiple_submit = Icon(driver_name='fontawesome', symbol='font')
icon_document_ocr_download = Icon(
driver_name='fontawesome-dual', primary_symbol='font',
secondary_symbol='arrow-down'
)
icon_document_ocr_errors_list = Icon(
driver_name='fontawesome-dual', primary_symbol='font',
secondary_symbol='exclamation'
)
icon_document_type_ocr_settings = Icon(
driver_name='fontawesome', symbol='font'
)
icon_document_type_submit = Icon(
driver_name='fontawesome-dual', primary_symbol='font',
secondary_symbol='arrow-right'
)
icon_document_multiple_submit = Icon(
driver_name='fontawesome-dual', primary_symbol='font',
secondary_symbol='arrow-right'
)
icon_entry_list = Icon(
driver_name='fontawesome-dual', primary_symbol='font',
secondary_symbol='exclamation'
)
icon_document_submit = icon_document_multiple_submit
| 35.323529
| 79
| 0.786012
| 150
| 1,201
| 5.893333
| 0.22
| 0.149321
| 0.158371
| 0.282805
| 0.802036
| 0.802036
| 0.802036
| 0.802036
| 0.714932
| 0.654977
| 0
| 0
| 0.099917
| 1,201
| 33
| 80
| 36.393939
| 0.817761
| 0
| 0
| 0.3125
| 0
| 0
| 0.199001
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.03125
| 0
| 0.03125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
790f567ff4c5cff1ff2d0aa74f348563fadc1069
| 8,765
|
py
|
Python
|
test/cnnl/op_test/test_type.py
|
Cambricon/catch
|
2625da389f25a67066d20fb6b0c38250ef98f8ab
|
[
"BSD-2-Clause"
] | 20
|
2022-03-01T11:40:51.000Z
|
2022-03-30T08:17:47.000Z
|
test/cnnl/op_test/test_type.py
|
Cambricon/catch
|
2625da389f25a67066d20fb6b0c38250ef98f8ab
|
[
"BSD-2-Clause"
] | null | null | null |
test/cnnl/op_test/test_type.py
|
Cambricon/catch
|
2625da389f25a67066d20fb6b0c38250ef98f8ab
|
[
"BSD-2-Clause"
] | null | null | null |
from __future__ import print_function
import sys
import logging
import os
os.environ['ENABLE_CNNL_TRYCATCH'] = 'OFF' # pylint: disable=C0413
from itertools import product
import unittest
import torch
import torch_mlu.core.mlu_model as ct
cur_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(cur_dir + "/../../")
from common_utils import testinfo, TestCase # pylint: disable=C0413,C0411
logging.basicConfig(level=logging.DEBUG)
class TestTypeOp(TestCase):
# @unittest.skip("not test")
@testinfo()
def test_type_param_empty(self):
shape_list = [(512, 1024, 2, 2, 4), (2, 3, 4),
(254, 254, 112, 1, 1, 3), (1000,), ()]
dtype_list = [torch.half, torch.float,
torch.uint8, torch.int8, torch.short,
torch.int, torch.long, torch.bool]
for shape, src_type in product(shape_list, dtype_list):
if src_type in [torch.half, torch.float]:
x = torch.randn(shape, dtype=src_type)
elif src_type == torch.uint8:
x = torch.randint(0, 255, shape).to(src_type)
else:
x = torch.randint(-128, 128, shape).to(src_type)
out_cpu_type = x.type()
out_mlu_type = x.to(ct.mlu_device()).type()
l_tmp = out_cpu_type.split('.')
l_tmp.insert(1, 'mlu')
self.assertEqual('.'.join(l_tmp), out_mlu_type)
# @unittest.skip("not test")
@testinfo()
def test_type_param_empty_channels_last(self):
shape_list = [(512, 1024, 2, 2), (2, 3, 4, 5),
(254, 254, 112, 1), (2, 3, 24, 30), (1, 1, 1, 30)]
dtype_list = [torch.half, torch.float,
torch.uint8, torch.int8, torch.short,
torch.int, torch.long, torch.bool]
for shape, src_type in product(shape_list, dtype_list):
if src_type in [torch.half, torch.float]:
x = torch.randn(shape, dtype=src_type).to(memory_format = torch.channels_last)
elif src_type == torch.uint8:
x = torch.randint(0, 255, shape).to(src_type).to(
memory_format = torch.channels_last)
else:
x = torch.randint(-128, 128, shape).to(src_type).to(
memory_format = torch.channels_last)
out_cpu_type = x.type()
out_mlu_type = x.to(ct.mlu_device()).type()
l_tmp = out_cpu_type.split('.')
l_tmp.insert(1, 'mlu')
self.assertEqual('.'.join(l_tmp), out_mlu_type)
# @unittest.skip("not test")
@testinfo()
def test_type_param_empty_not_dense(self):
shape_list = [(16, 32, 2, 30), (2, 3, 4, 32),
(24, 26, 112, 64), (2, 3, 24, 30), (1, 1, 1, 30)]
dtype_list = [torch.half, torch.float,
torch.uint8, torch.int8, torch.short,
torch.int, torch.long, torch.bool]
for shape, src_type in product(shape_list, dtype_list):
if src_type in [torch.half, torch.float]:
x = torch.randn(shape, dtype=src_type)[:, :, :, :15]
elif src_type == torch.uint8:
x = torch.randint(0, 255, shape).to(src_type)[:, :, :, :15]
else:
x = torch.randint(-128, 128, shape).to(src_type)[:, :, :, :15]
out_cpu_type = x.type()
out_mlu_type = x.to(ct.mlu_device()).type()
l_tmp = out_cpu_type.split('.')
l_tmp.insert(1, 'mlu')
self.assertEqual('.'.join(l_tmp), out_mlu_type)
# @unittest.skip("not test")
@testinfo()
def test_type_param_dtype(self):
shape_list = [(512, 1024, 2, 2, 4), (2, 3, 4),
(254, 254, 112, 1, 1, 3), (1000,), ()]
cast_map = {torch.float: {torch.half, torch.int, torch.short, torch.int8, torch.bool},
torch.half: {torch.float, torch.int, torch.short, torch.int8, torch.bool},
torch.long: {torch.float, torch.half, torch.short, torch.int8},
torch.int: {torch.float, torch.half, torch.short, torch.int8},
torch.short: {torch.float, torch.half, torch.int},
torch.int8: {torch.float, torch.half, torch.int},
torch.uint8: {torch.float, torch.half},
torch.bool: {torch.float, torch.half, torch.int},
}
for shape, src_type in product(shape_list, cast_map.keys()):
for dst_type in cast_map[src_type]:
if src_type in [torch.half, torch.float]:
x = torch.randn(shape, dtype=src_type)
elif src_type == torch.uint8:
x = torch.randint(0, 255, shape).to(src_type)
else:
x = torch.randint(-128, 128, shape).to(src_type)
for is_async in [False, True]:
out_cpu = x.type(dst_type, non_blocking=is_async)
out_mlu = x.to(ct.mlu_device()).type(dst_type, non_blocking=is_async)
self.assertEqual(out_mlu.dtype, dst_type)
self.assertEqual(out_cpu, out_mlu.cpu())
# @unittest.skip("not test")
@testinfo()
def test_type_param_dtype_channels_last(self):
shape_list = [(512, 1024, 2, 2), (2, 3, 4, 16),
(254, 254, 112, 1), (2, 3, 24, 30), (1, 1, 1, 30)]
cast_map = {torch.float: {torch.half, torch.int, torch.short, torch.int8, torch.bool},
torch.half: {torch.float, torch.int, torch.short, torch.int8, torch.bool},
torch.long: {torch.float, torch.half, torch.short, torch.int8},
torch.int: {torch.float, torch.half, torch.short, torch.int8},
torch.short: {torch.float, torch.half, torch.int},
torch.int8: {torch.float, torch.half, torch.int},
torch.uint8: {torch.float, torch.half},
torch.bool: {torch.float, torch.half, torch.int},
}
for shape, src_type in product(shape_list, cast_map.keys()):
for dst_type in cast_map[src_type]:
if src_type in [torch.half, torch.float]:
x = torch.randn(shape, dtype=src_type).to(memory_format = torch.channels_last)
elif src_type == torch.uint8:
x = torch.randint(0, 255, shape).to(src_type).to(
memory_format = torch.channels_last)
else:
x = torch.randint(-128, 128, shape).to(src_type).to(
memory_format = torch.channels_last)
for is_async in [False, True]:
out_cpu = x.type(dst_type, non_blocking=is_async)
out_mlu = x.to(ct.mlu_device()).type(dst_type, non_blocking=is_async)
self.assertEqual(out_mlu.dtype, dst_type)
self.assertEqual(out_cpu, out_mlu.cpu())
# @unittest.skip("not test")
@testinfo()
def test_type_param_dtype_not_dense(self):
shape_list = [(16, 32, 2, 30), (2, 3, 4, 32),
(24, 26, 112, 64), (2, 3, 24, 30), (1, 1, 1, 30)]
cast_map = {torch.float: {torch.half, torch.int, torch.short, torch.int8, torch.bool},
torch.half: {torch.float, torch.int, torch.short, torch.int8, torch.bool},
torch.long: {torch.float, torch.half, torch.short, torch.int8},
torch.int: {torch.float, torch.half, torch.short, torch.int8},
torch.short: {torch.float, torch.half, torch.int},
torch.int8: {torch.float, torch.half, torch.int},
torch.uint8: {torch.float, torch.half},
torch.bool: {torch.float, torch.half, torch.int},
}
for shape, src_type in product(shape_list, cast_map.keys()):
for dst_type in cast_map[src_type]:
if src_type in [torch.half, torch.float]:
x = torch.randn(shape, dtype=src_type)[:, :, :, :15]
elif src_type == torch.uint8:
x = torch.randint(0, 255, shape).to(src_type)[:, :, :, :15]
else:
x = torch.randint(-128, 128, shape).to(src_type)[:, :, :, :15]
for is_async in [False, True]:
out_cpu = x.type(dst_type, non_blocking=is_async)
out_mlu = x.to(ct.mlu_device()).type(dst_type, non_blocking=is_async)
self.assertEqual(out_mlu.dtype, dst_type)
self.assertEqual(out_cpu, out_mlu.cpu())
if __name__ == '__main__':
unittest.main()
| 51.25731
| 98
| 0.539875
| 1,145
| 8,765
| 3.950218
| 0.102183
| 0.060358
| 0.102145
| 0.088216
| 0.914658
| 0.914658
| 0.914658
| 0.914658
| 0.914658
| 0.914658
| 0
| 0.051791
| 0.321506
| 8,765
| 170
| 99
| 51.558824
| 0.708761
| 0.024073
| 0
| 0.825806
| 0
| 0
| 0.006202
| 0
| 0
| 0
| 0
| 0
| 0.058065
| 1
| 0.03871
| false
| 0
| 0.058065
| 0
| 0.103226
| 0.006452
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f72386a8a5199feebb6ae6ddf5ad300495566d00
| 7,304
|
py
|
Python
|
src/genie/libs/parser/iosxe/tests/ShowIpRoute/cli/equal/golden_output92_expected.py
|
miwamoto0203/genieparser
|
d0595046f0f804aa4143c13e20a738b41a3a8c25
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/iosxe/tests/ShowIpRoute/cli/equal/golden_output92_expected.py
|
miwamoto0203/genieparser
|
d0595046f0f804aa4143c13e20a738b41a3a8c25
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/iosxe/tests/ShowIpRoute/cli/equal/golden_output92_expected.py
|
miwamoto0203/genieparser
|
d0595046f0f804aa4143c13e20a738b41a3a8c25
|
[
"Apache-2.0"
] | null | null | null |
expected_output = {
"vrf": {
"VRF1": {
"address_family": {
"ipv4": {
"routes": {
"10.0.0.0/24": {
"route": "10.0.0.0/24",
"active": True,
"route_preference": 110,
"metric": 1,
"source_protocol_codes": "O",
"source_protocol": "ospf",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "10.81.1.2",
"updated": "01:02:20",
"outgoing_interface": "GigabitEthernet0/0/2.100",
}
}
},
},
"10.0.1.0/24": {
"route": "10.0.1.0/24",
"active": True,
"route_preference": 110,
"metric": 1,
"source_protocol_codes": "O",
"source_protocol": "ospf",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "10.81.1.2",
"updated": "01:02:20",
"outgoing_interface": "GigabitEthernet0/0/2.100",
}
}
},
},
"10.0.2.0/24": {
"route": "10.0.2.0/24",
"active": True,
"route_preference": 110,
"metric": 1,
"source_protocol_codes": "O IA",
"source_protocol": "ospf",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "10.81.1.2",
"updated": "01:02:20",
"outgoing_interface": "GigabitEthernet0/0/2.100",
}
}
},
},
"10.145.0.0/24": {
"route": "10.145.0.0/24",
"active": True,
"route_preference": 200,
"metric": 1,
"source_protocol_codes": "B",
"source_protocol": "bgp",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "192.168.51.1",
"updated": "01:01:10",
}
}
},
},
"10.145.1.0/24": {
"route": "10.145.1.0/24",
"active": True,
"route_preference": 200,
"metric": 1,
"source_protocol_codes": "B",
"source_protocol": "bgp",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "192.168.51.1",
"updated": "01:01:10",
}
}
},
},
"10.145.2.0/24": {
"route": "10.145.2.0/24",
"active": True,
"route_preference": 200,
"metric": 1,
"source_protocol_codes": "B",
"source_protocol": "bgp",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "192.168.51.1",
"updated": "01:01:10",
}
}
},
},
"10.81.1.0/24": {
"route": "10.81.1.0/24",
"active": True,
"source_protocol_codes": "C",
"source_protocol": "connected",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet0/0/2.100": {
"outgoing_interface": "GigabitEthernet0/0/2.100"
}
}
},
},
"10.81.1.1/32": {
"route": "10.81.1.1/32",
"active": True,
"source_protocol_codes": "L",
"source_protocol": "local",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet0/0/2.100": {
"outgoing_interface": "GigabitEthernet0/0/2.100"
}
}
},
},
"192.168.4.0/24": {
"route": "192.168.4.0/24",
"active": True,
"route_preference": 200,
"metric": 0,
"source_protocol_codes": "B",
"source_protocol": "bgp",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "192.168.51.1",
"updated": "01:01:10",
}
}
},
},
}
}
}
}
}
}
| 45.36646
| 89
| 0.214951
| 396
| 7,304
| 3.780303
| 0.128788
| 0.107548
| 0.114228
| 0.069472
| 0.932532
| 0.804275
| 0.804275
| 0.798931
| 0.774215
| 0.774215
| 0
| 0.143229
| 0.684556
| 7,304
| 160
| 90
| 45.65
| 0.50651
| 0
| 0
| 0.55
| 0
| 0
| 0.21988
| 0.048877
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f76afc2d384e6ee7aae727e40053556027942ae3
| 13,693
|
py
|
Python
|
management_tool/tests/test_views.py
|
casol/MS-User-Management-App
|
fa61dbbbac655217987f7a44488ef72e181e7d81
|
[
"MIT"
] | null | null | null |
management_tool/tests/test_views.py
|
casol/MS-User-Management-App
|
fa61dbbbac655217987f7a44488ef72e181e7d81
|
[
"MIT"
] | 5
|
2021-03-19T09:06:58.000Z
|
2022-02-10T11:31:05.000Z
|
management_tool/tests/test_views.py
|
casol/MS-User-Management-App
|
fa61dbbbac655217987f7a44488ef72e181e7d81
|
[
"MIT"
] | null | null | null |
import datetime
import csv
import io
from django.test import TestCase
from django.contrib.auth import get_user_model
from django.urls import reverse
from django.contrib.messages import get_messages
from users.forms import CustomUserCreationForm, CustomUserChangeForm
User = get_user_model()
class SignupViewTest(TestCase):
"""Test sign up view."""
def test_get_view_url_exists_at_desired_location(self):
response = self.client.get('/signup/')
self.assertEqual(response.status_code, 200)
def test_get_view_url_accessible_by_name(self):
response = self.client.get(reverse('signup'))
self.assertEqual(response.status_code, 200)
def test_get_view_uses_correct_template(self):
response = self.client.get(reverse('signup'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'management_tool/signup.html')
def test_get_view_populates_form_into_context(self):
response = self.client.get(reverse('signup'))
self.assertIsInstance(response.context['user_form'],
CustomUserCreationForm)
self.assertTrue(isinstance(response.context['user_form'],
CustomUserCreationForm))
def test_signup_form_validation_for_blank_items(self):
form_data = {'username': '',
'email': '',
'birth_date': '',
'password': '',
'password2': ''}
form = CustomUserCreationForm(data=form_data)
self.assertFalse(form.is_valid())
for filed, error in form.errors.items():
self.assertEqual(error,
["This field is required."])
def test_signup_form_is_valid(self):
form_data = {'username': 'TestUser',
'email': 'test3@test31.com',
'birth_date': datetime.datetime.now(),
'password1': 'Pw4Newuser',
'password2': 'Pw4Newuser'}
form = CustomUserCreationForm(data=form_data)
self.assertTrue(form.is_valid())
def test_signup_form_success(self):
response = self.client.post('/signup/',
{
'username': 'TestUser',
'email': 'test3@test31.com',
'birth_date': datetime.datetime.now(),
'password1': 'Pw4Newuser',
'password2': 'Pw4Newuser'
})
self.assertEqual(response.status_code, 200)
class HomeViewTest(TestCase):
"""Test home view."""
def test_get_view_url_exists_at_desired_location(self):
response = self.client.get('/home/')
self.assertEqual(response.status_code, 200)
def test_get_view_url_accessible_by_name(self):
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
def test_get_view_uses_correct_template(self):
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'management_tool/home.html')
class UserListViewTest(TestCase):
"""Test user list view."""
def setUp(self):
"""Setup user for @login_required views."""
self.user = User.objects.create_user('john', 'lennon@thebeatles.com',
'johnpassword')
self.client.login(username='john', password='johnpassword')
def test_get_view_url_exists_at_desired_location(self):
response = self.client.get('/users/')
self.assertEqual(response.status_code, 200)
def test_get_view_url_accessible_by_name(self):
response = self.client.get(reverse('user_list'))
self.assertEqual(response.status_code, 200)
def test_get_view_uses_correct_template(self):
response = self.client.get(reverse('user_list'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'management_tool/user_list.html')
def test_get_view_for_anonymous_user_user(self):
self.client.logout()
response = self.client.get(reverse('user_list'))
self.assertEqual(response.status_code, 302)
def test_get_view_redirects_for_anonymous_user(self):
self.client.logout()
response = self.client.get(reverse('user_list'))
self.assertRedirects(response, '/login/?next=/users/')
def test_get_user_list(self):
response = self.client.get(reverse('user_list'))
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context['user_list']), 1)
self.assertEqual(response.context['user_list'][0].username, 'john')
class UserDetailsViewTest(TestCase):
"""Test user details view."""
def setUp(self):
"""Setup user for @login_required views."""
self.user = User.objects.create_user('john', 'lennon@thebeatles.com',
'johnpassword')
self.client.login(username='john', password='johnpassword')
def test_get_view_url_exists_at_desired_location(self):
response = self.client.get('/user/john/')
self.assertEqual(response.status_code, 200)
def test_get_view_url_accessible_by_name(self):
response = self.client.get(reverse('user_details',
kwargs={'username': self.user.username}))
self.assertEqual(response.status_code, 200)
def test_get_view_uses_correct_template(self):
response = self.client.get(reverse('user_details',
kwargs={'username': self.user.username}))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'management_tool/user_details.html')
def test_get_view_for_anonymous_user_user(self):
self.client.logout()
response = self.client.get(reverse('user_details',
kwargs={'username': self.user.username}))
self.assertEqual(response.status_code, 302)
def test_get_view_redirects_for_anonymous_user(self):
self.client.logout()
response = self.client.get(reverse('user_details',
kwargs={'username': self.user.username}))
self.assertRedirects(response, '/login/?next=/user/john/')
def test_get_user_details(self):
response = self.client.get(reverse('user_details',
kwargs={'username': self.user.username}))
self.assertEqual(response.status_code, 200)
self.assertTrue(response.context['user_details'])
self.assertEqual(response.context['user_details'].username, 'john')
def test_get_user_details_get_absolute_url(self):
response = self.client.get(self.user.get_absolute_url())
self.assertEqual(response.status_code, 200)
self.assertTrue(response.context['user_details'])
self.assertEqual(response.context['user_details'].username, 'john')
class UserEditViewTest(TestCase):
"""Test user edit view."""
def setUp(self):
"""Setup user for @login_required views."""
self.user = User.objects.create_user('john', 'lennon@thebeatles.com',
'johnpassword')
self.client.login(username='john', password='johnpassword')
def test_get_view_url_exists_at_desired_location(self):
response = self.client.get('/edit/')
self.assertEqual(response.status_code, 200)
def test_get_view_url_accessible_by_name(self):
response = self.client.get(reverse('user_edit'))
self.assertEqual(response.status_code, 200)
def test_get_view_uses_correct_template(self):
response = self.client.get(reverse('user_edit'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'management_tool/user_edit.html')
def test_get_view_for_anonymous_user_user(self):
self.client.logout()
response = self.client.get(reverse('user_edit'))
self.assertEqual(response.status_code, 302)
def test_get_view_redirects_for_anonymous_user(self):
self.client.logout()
response = self.client.get(reverse('user_edit'))
self.assertRedirects(response, '/login/?next=/edit/')
def test_get_view_populates_form_into_context(self):
response = self.client.get(reverse('user_edit'))
self.assertIsInstance(response.context['user_form'],
CustomUserChangeForm)
self.assertTrue(isinstance(response.context['user_form'],
CustomUserChangeForm))
def test_edit_form_validation_for_blank_items(self):
form_data = {'username': '',
'birth_date': '',
'random_number': ''}
form = CustomUserChangeForm(data=form_data)
self.assertFalse(form.is_valid())
for filed, error in form.errors.items():
self.assertEqual(error,
["This field is required."])
def test_edit_form_error_messages_for_blank_items(self):
response = self.client.post(reverse('user_edit'))
messages = [m.message for m in get_messages(response.wsgi_request)]
self.assertIn('Please correct the error below.', messages)
def test_edit_form_is_valid(self):
form_data = {'username': 'TestUser',
'birth_date': datetime.datetime.now(),
'random_number': '4'}
form = CustomUserChangeForm(data=form_data)
self.assertTrue(form.is_valid())
class UserDeleteViewTest(TestCase):
"""Test user edit view."""
def setUp(self):
"""Setup user for @login_required views."""
self.user = User.objects.create_user('john', 'lennon@thebeatles.com',
'johnpassword')
self.client.login(username='john', password='johnpassword')
def test_get_view_url_exists_at_desired_location(self):
response = self.client.get('/delete/')
self.assertEqual(response.status_code, 200)
def test_get_view_url_accessible_by_name(self):
response = self.client.get(reverse('user_delete'))
self.assertEqual(response.status_code, 200)
def test_get_view_uses_correct_template(self):
response = self.client.get(reverse('user_delete'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'management_tool/user_delete.html')
def test_get_view_for_anonymous_user_user(self):
self.client.logout()
response = self.client.get(reverse('user_delete'))
self.assertEqual(response.status_code, 302)
def test_get_view_redirects_for_anonymous_user(self):
self.client.logout()
response = self.client.get(reverse('user_delete'))
self.assertRedirects(response, '/home/?next=/delete/')
def test_get_view_populates_form_into_context(self):
response = self.client.get(reverse('user_delete'))
self.assertTrue(response.context['user_delete'])
def test_post_view_deletes_user(self):
response = self.client.post(reverse('user_delete'))
self.assertEqual(response.status_code, 302)
self.assertFalse(self.client.login(username='john',
password='johnpassword'))
def test_post_view_redirects_for_anonymous_user(self):
self.client.logout()
response = self.client.post(reverse('user_delete'))
self.assertRedirects(response, '/home/?next=/delete/')
def test_delete_form_successful_messages(self):
response = self.client.post(reverse('user_delete'))
messages = [m.message for m in get_messages(response.wsgi_request)]
self.assertIn('User successfully deleted!', messages)
class ExportUserCSVViewTest(TestCase):
"""Test user list export view."""
def setUp(self):
self.user = User.objects.create_user(username='john',
email='lennon@thebeatles.com',
birth_date=datetime.datetime.now(),
password='johnpassword')
self.client.login(username='john', password='johnpassword')
def test_get_view_url_exists_at_desired_location(self):
response = self.client.get('/download/')
self.assertEqual(response.status_code, 200)
def test_get_view_url_accessible_by_name(self):
response = self.client.get(reverse('export_user_csv'))
self.assertTrue(response.status_code)
def test_get_view_for_anonymous_user_user(self):
self.client.logout()
response = self.client.get(reverse('export_user_csv'))
self.assertEqual(response.status_code, 302)
def test_get_view_redirects_for_anonymous_user(self):
self.client.logout()
response = self.client.get(reverse('export_user_csv'))
self.assertRedirects(response, '/login/?next=/download/')
def test_csv_export(self):
response = self.client.get('/download/')
self.assertEqual(response.status_code, 200)
content = response.content.decode('utf-8')
cvs_reader = csv.reader(io.StringIO(content))
body = list(cvs_reader)
self.assertIn('john', body[1])
self.assertEqual(body.pop(0), ['Username', 'Birthday', 'Eligible',
'Random Number', 'BizzFuzz'])
| 41.746951
| 80
| 0.638282
| 1,511
| 13,693
| 5.532098
| 0.096625
| 0.070583
| 0.090441
| 0.092954
| 0.848307
| 0.805479
| 0.767197
| 0.751525
| 0.725326
| 0.697093
| 0
| 0.010763
| 0.246841
| 13,693
| 327
| 81
| 41.874618
| 0.799767
| 0.021982
| 0
| 0.678715
| 0
| 0
| 0.118401
| 0.02467
| 0
| 0
| 0
| 0
| 0.261044
| 1
| 0.204819
| false
| 0.068273
| 0.032129
| 0
| 0.26506
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
f798962a0d922e6f7be6deff6dc4880b92fdcfcf
| 17,824
|
py
|
Python
|
src/conductor/client/http/api/admin_resource_api.py
|
conductor-sdk/conductor-python
|
b3e4e0ae196f9963316a829fe42d9e7e01a390e2
|
[
"Apache-2.0"
] | 3
|
2022-03-10T18:24:46.000Z
|
2022-03-22T20:49:30.000Z
|
src/conductor/client/http/api/admin_resource_api.py
|
conductor-sdk/conductor-python
|
b3e4e0ae196f9963316a829fe42d9e7e01a390e2
|
[
"Apache-2.0"
] | 6
|
2022-03-08T17:48:28.000Z
|
2022-03-30T00:39:22.000Z
|
src/conductor/client/http/api/admin_resource_api.py
|
conductor-sdk/conductor-python
|
b3e4e0ae196f9963316a829fe42d9e7e01a390e2
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from conductor.client.http.api_client import ApiClient
class AdminResourceApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_all_config(self, **kwargs): # noqa: E501
"""Get all the configuration parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_config(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: dict(str, object)
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_config_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_all_config_with_http_info(**kwargs) # noqa: E501
return data
def get_all_config_with_http_info(self, **kwargs): # noqa: E501
"""Get all the configuration parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_config_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: dict(str, object)
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_config" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/admin/config', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='dict(str, object)', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_event_queues(self, **kwargs): # noqa: E501
"""Get registered queues # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_event_queues(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool verbose:
:return: dict(str, object)
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_event_queues_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_event_queues_with_http_info(**kwargs) # noqa: E501
return data
def get_event_queues_with_http_info(self, **kwargs): # noqa: E501
"""Get registered queues # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_event_queues_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool verbose:
:return: dict(str, object)
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['verbose'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_event_queues" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'verbose' in params:
query_params.append(('verbose', params['verbose'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/admin/queues', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='dict(str, object)', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def requeue_sweep(self, workflow_id, **kwargs): # noqa: E501
"""Queue up all the running workflows for sweep # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.requeue_sweep(workflow_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.requeue_sweep_with_http_info(workflow_id, **kwargs) # noqa: E501
else:
(data) = self.requeue_sweep_with_http_info(workflow_id, **kwargs) # noqa: E501
return data
def requeue_sweep_with_http_info(self, workflow_id, **kwargs): # noqa: E501
"""Queue up all the running workflows for sweep # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.requeue_sweep_with_http_info(workflow_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workflow_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method requeue_sweep" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workflow_id' is set
if ('workflow_id' not in params or
params['workflow_id'] is None):
raise ValueError("Missing the required parameter `workflow_id` when calling `requeue_sweep`") # noqa: E501
collection_formats = {}
path_params = {}
if 'workflow_id' in params:
path_params['workflowId'] = params['workflow_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/admin/sweep/requeue/{workflowId}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def verify_and_repair_workflow_consistency(self, workflow_id, **kwargs): # noqa: E501
"""Verify and repair workflow consistency # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.verify_and_repair_workflow_consistency(workflow_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.verify_and_repair_workflow_consistency_with_http_info(workflow_id, **kwargs) # noqa: E501
else:
(data) = self.verify_and_repair_workflow_consistency_with_http_info(workflow_id, **kwargs) # noqa: E501
return data
def verify_and_repair_workflow_consistency_with_http_info(self, workflow_id, **kwargs): # noqa: E501
"""Verify and repair workflow consistency # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.verify_and_repair_workflow_consistency_with_http_info(workflow_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workflow_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method verify_and_repair_workflow_consistency" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workflow_id' is set
if ('workflow_id' not in params or
params['workflow_id'] is None):
raise ValueError("Missing the required parameter `workflow_id` when calling `verify_and_repair_workflow_consistency`") # noqa: E501
collection_formats = {}
path_params = {}
if 'workflow_id' in params:
path_params['workflowId'] = params['workflow_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/admin/consistency/verifyAndRepair/{workflowId}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def view(self, tasktype, **kwargs): # noqa: E501
"""Get the list of pending tasks for a given task type # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.view(tasktype, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str tasktype: (required)
:param int start:
:param int count:
:return: list[Task]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.view_with_http_info(tasktype, **kwargs) # noqa: E501
else:
(data) = self.view_with_http_info(tasktype, **kwargs) # noqa: E501
return data
def view_with_http_info(self, tasktype, **kwargs): # noqa: E501
"""Get the list of pending tasks for a given task type # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.view_with_http_info(tasktype, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str tasktype: (required)
:param int start:
:param int count:
:return: list[Task]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['tasktype', 'start', 'count'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method view" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'tasktype' is set
if ('tasktype' not in params or
params['tasktype'] is None):
raise ValueError("Missing the required parameter `tasktype` when calling `view`") # noqa: E501
collection_formats = {}
path_params = {}
if 'tasktype' in params:
path_params['tasktype'] = params['tasktype'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/admin/task/{tasktype}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Task]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 36.902692
| 144
| 0.599192
| 2,027
| 17,824
| 5.00296
| 0.079921
| 0.046544
| 0.027611
| 0.035499
| 0.922986
| 0.907997
| 0.900602
| 0.8936
| 0.883148
| 0.872892
| 0
| 0.014778
| 0.309022
| 17,824
| 482
| 145
| 36.979253
| 0.808623
| 0.303411
| 0
| 0.769811
| 0
| 0
| 0.168616
| 0.045558
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041509
| false
| 0
| 0.015094
| 0
| 0.116981
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e3b74eb140b3fc8744eaec533cde831dab19733e
| 5,288
|
py
|
Python
|
tests/base/test_endpoints_login_expiration.py
|
rapydo/http-api
|
ef0a299173195145303069534d45d446ea4da93a
|
[
"MIT"
] | 8
|
2018-07-04T09:54:46.000Z
|
2022-03-17T08:21:06.000Z
|
tests/base/test_endpoints_login_expiration.py
|
rapydo/http-api
|
ef0a299173195145303069534d45d446ea4da93a
|
[
"MIT"
] | 19
|
2018-04-18T07:24:55.000Z
|
2022-03-04T01:03:15.000Z
|
tests/base/test_endpoints_login_expiration.py
|
rapydo/http-api
|
ef0a299173195145303069534d45d446ea4da93a
|
[
"MIT"
] | 7
|
2018-07-03T12:17:50.000Z
|
2021-05-05T04:33:32.000Z
|
import time
from datetime import datetime, timedelta
import pytz
from restapi.env import Env
from restapi.tests import API_URI, AUTH_URI, BaseTests, FlaskClient
from restapi.utilities.logs import Events, log
class TestApp2(BaseTests):
def test_01_login_expiration(self, client: FlaskClient) -> None:
if not Env.get_bool("MAIN_LOGIN_ENABLE") or not Env.get_bool("AUTH_ENABLE"):
log.warning("Skipping login expiration tests")
return
# Let's create a new user with an expiration time of N seconds
expiration_time = 10
expiration = datetime.now(pytz.utc) + timedelta(seconds=expiration_time)
uuid, data = self.create_user(client, data={"expiration": expiration})
# The user is valid
valid_headers, _ = self.do_login(client, data["email"], data["password"])
assert valid_headers is not None
# But after N seconds the login will be refused
time.sleep(expiration_time)
invalid_headers, error = self.do_login(
client,
data["email"],
data["password"],
status_code=403,
)
assert invalid_headers is None
assert error == "Sorry, this account is expired"
events = self.get_last_events(1)
assert events[0].event == Events.refused_login.value
assert events[0].payload["username"] == data["email"]
assert events[0].payload["motivation"] == "account expired"
assert events[0].url == "/auth/login"
# This token was valid before the expiration, but should be no longer valid
# due to the short TTL set when emitted (capped to expiration time)
r = client.get(f"{AUTH_URI}/status", headers=valid_headers)
assert r.status_code == 401
if Env.get_bool("ALLOW_PASSWORD_RESET"):
reset_data = {"reset_email": data["email"]}
r = client.post(f"{AUTH_URI}/reset", data=reset_data)
assert r.status_code == 403
assert self.get_content(r) == "Sorry, this account is expired"
events = self.get_last_events(1)
assert events[0].event == Events.refused_login.value
assert events[0].payload["username"] == data["email"]
assert events[0].payload["motivation"] == "account expired"
assert events[0].url == "/auth/reset"
# Let's extend the account validity for other N seconds
admin_headers, _ = self.do_login(client, None, None)
expiration = datetime.now(pytz.utc) + timedelta(seconds=expiration_time)
r = client.put(
f"{API_URI}/admin/users/{uuid}",
data={"expiration": expiration},
headers=admin_headers,
)
assert r.status_code == 204
# The user is valid again
valid_headers, _ = self.do_login(client, data["email"], data["password"])
assert valid_headers is not None
# But after N seconds the login will be refused again
time.sleep(expiration_time)
invalid_headers, error = self.do_login(
client,
data["email"],
data["password"],
status_code=403,
)
assert invalid_headers is None
assert error == "Sorry, this account is expired"
events = self.get_last_events(1)
assert events[0].event == Events.refused_login.value
assert events[0].payload["username"] == data["email"]
assert events[0].payload["motivation"] == "account expired"
assert events[0].url == "/auth/login"
# Test reduction of account validity
# Let's extent other N seconds
admin_headers, _ = self.do_login(client, None, None)
expiration = datetime.now(pytz.utc) + timedelta(seconds=expiration_time)
r = client.put(
f"{API_URI}/admin/users/{uuid}",
data={"expiration": expiration},
headers=admin_headers,
)
assert r.status_code == 204
# The user is valid again
valid_headers, _ = self.do_login(client, data["email"], data["password"])
assert valid_headers is not None
# Let's set an already expired date
expiration = datetime.now(pytz.utc) - timedelta(seconds=expiration_time)
r = client.put(
f"{API_URI}/admin/users/{uuid}",
data={"expiration": expiration},
headers=admin_headers,
)
assert r.status_code == 204
# User is no longer valid
invalid_headers, error = self.do_login(
client,
data["email"],
data["password"],
status_code=403,
)
assert invalid_headers is None
assert error == "Sorry, this account is expired"
events = self.get_last_events(1)
assert events[0].event == Events.refused_login.value
assert events[0].payload["username"] == data["email"]
assert events[0].payload["motivation"] == "account expired"
assert events[0].url == "/auth/login"
# This token was valid and original TTL was set >= now
# But when the user expiration were reduced the token was invalided
r = client.get(f"{AUTH_URI}/status", headers=valid_headers)
assert r.status_code == 401
| 38.043165
| 84
| 0.615545
| 652
| 5,288
| 4.863497
| 0.191718
| 0.060549
| 0.065594
| 0.042889
| 0.713655
| 0.713655
| 0.713655
| 0.713655
| 0.713655
| 0.695364
| 0
| 0.013623
| 0.278177
| 5,288
| 138
| 85
| 38.318841
| 0.817134
| 0.124622
| 0
| 0.734694
| 0
| 0
| 0.143724
| 0.018209
| 0
| 0
| 0
| 0
| 0.326531
| 1
| 0.010204
| false
| 0.071429
| 0.061224
| 0
| 0.091837
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
e3e3dded94decf67a578fa322e911fa31e7c15f6
| 23,975
|
py
|
Python
|
tests/dhcpv6/kea_only/host_reservation/test_host_reservation_address_conflicts_pgsql.py
|
shawnmullaney/forge
|
aaaef0a0645f73d24666aab6a400f3604e753aac
|
[
"0BSD"
] | null | null | null |
tests/dhcpv6/kea_only/host_reservation/test_host_reservation_address_conflicts_pgsql.py
|
shawnmullaney/forge
|
aaaef0a0645f73d24666aab6a400f3604e753aac
|
[
"0BSD"
] | null | null | null |
tests/dhcpv6/kea_only/host_reservation/test_host_reservation_address_conflicts_pgsql.py
|
shawnmullaney/forge
|
aaaef0a0645f73d24666aab6a400f3604e753aac
|
[
"0BSD"
] | null | null | null |
"""Host Reservation DHCPv6"""
# pylint: disable=invalid-name,line-too-long
import pytest
import misc
import srv_msg
import srv_control
@pytest.mark.v6
@pytest.mark.host_reservation
@pytest.mark.kea_only
@pytest.mark.pgsql
def test_v6_host_reservation_duplicate_reservation_duid():
misc.test_setup()
srv_control.config_srv_subnet('3000::/30', '3000::1-3000::10')
srv_control.enable_db_backend_reservation('PostgreSQL')
srv_control.new_db_backend_reservation('PostgreSQL', 'duid', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_control.update_db_backend_reservation('dhcp6_subnet_id', '1', 'PostgreSQL', '1')
srv_control.ipv6_address_db_backend_reservation('3000::1', '$(EMPTY)', 'PostgreSQL', '1')
srv_control.new_db_backend_reservation('PostgreSQL', 'duid', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_control.update_db_backend_reservation('dhcp6_subnet_id', '1', 'PostgreSQL', '2')
srv_control.ipv6_address_db_backend_reservation('3000::2', '$(EMPTY)', 'PostgreSQL', '2')
srv_control.upload_db_reservation('PostgreSQL')
# upload should failed!#TODO add step to failed upload
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
@pytest.mark.v6
@pytest.mark.host_reservation
@pytest.mark.kea_only
@pytest.mark.pgsql
def test_v6_host_reservation_duplicate_reservation_address():
misc.test_setup()
srv_control.config_srv_subnet('3000::/30', '3000::1-3000::10')
srv_control.enable_db_backend_reservation('PostgreSQL')
srv_control.new_db_backend_reservation('PostgreSQL', 'duid', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_control.update_db_backend_reservation('dhcp6_subnet_id', '1', 'PostgreSQL', '1')
srv_control.ipv6_address_db_backend_reservation('3000::1', '$(EMPTY)', 'PostgreSQL', '1')
srv_control.new_db_backend_reservation('PostgreSQL', 'duid', '00:03:00:01:f6:f5:f4:f3:f2:11')
srv_control.update_db_backend_reservation('dhcp6_subnet_id', '1', 'PostgreSQL', '1')
srv_control.ipv6_address_db_backend_reservation('3000::1', '$(EMPTY)', 'PostgreSQL', '1')
srv_control.upload_db_reservation('PostgreSQL')
# upload should failed! #TODO add step to failed upload
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
@pytest.mark.v6
@pytest.mark.host_reservation
@pytest.mark.kea_only
@pytest.mark.pgsql
def test_v6_host_reservation_pgsql_conflicts_two_entries_for_one_host_different_subnets():
misc.test_setup()
srv_control.config_srv_subnet('3000::/30', '3000::1-3000::10')
srv_control.enable_db_backend_reservation('PostgreSQL')
srv_control.new_db_backend_reservation('PostgreSQL', 'duid', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_control.update_db_backend_reservation('dhcp6_subnet_id', '1', 'PostgreSQL', '1')
srv_control.ipv6_address_db_backend_reservation('3000::1', '$(EMPTY)', 'PostgreSQL', '1')
srv_control.new_db_backend_reservation('PostgreSQL', 'hw-address', 'f6:f5:f4:f3:f2:01')
srv_control.update_db_backend_reservation('dhcp6_subnet_id', '2', 'PostgreSQL', '2')
srv_control.ipv6_address_db_backend_reservation('3000::3', '$(EMPTY)', 'PostgreSQL', '2')
srv_control.upload_db_reservation('PostgreSQL')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'ia_id', '666')
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
srv_msg.response_check_include_option('Response', None, '3')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '5')
misc.test_procedure()
srv_msg.client_copy_option('server-id')
srv_msg.client_copy_option('IA_NA')
srv_msg.client_sets_value('Client', 'ia_id', '666')
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'REPLY')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '5')
srv_msg.response_check_suboption_content('Response', '5', '3', None, 'addr', '3000::1')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'ia_id', '667')
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
srv_msg.response_check_include_option('Response', None, '3')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '5')
misc.test_procedure()
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_sets_value('Client', 'ia_id', '667')
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'REPLY')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '5')
srv_msg.response_check_suboption_content('Response', '5', '3', 'NOT ', 'addr', '3000::3')
@pytest.mark.v6
@pytest.mark.host_reservation
@pytest.mark.kea_only
@pytest.mark.pgsql
def test_v6_host_reservation_pgsql_conflicts_reconfigure_server_with_reservation_of_used_address():
misc.test_setup()
srv_control.config_srv_subnet('3000::/30', '3000::1-3000::2')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:66:55:44:33:22:11')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
misc.test_procedure()
srv_msg.client_copy_option('server-id')
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:66:55:44:33:22:11')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'REPLY')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:66:55:44:33:22:22')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
misc.test_procedure()
srv_msg.client_copy_option('server-id')
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:66:55:44:33:22:22')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'REPLY')
# bigger prefix pool + reservation
misc.test_setup()
srv_control.config_srv_subnet('3000::/30', '3000::1-3000::10')
srv_control.enable_db_backend_reservation('PostgreSQL')
srv_control.new_db_backend_reservation('PostgreSQL',
'hw-address',
'00:03:00:01:f6:f5:f4:f3:f2:01')
srv_control.update_db_backend_reservation('dhcp6_subnet_id', '1', 'PostgreSQL', '1')
srv_control.ipv6_address_db_backend_reservation('3000::1', '$(EMPTY)', 'PostgreSQL', '1')
srv_control.upload_db_reservation('PostgreSQL')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'reconfigured')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
misc.test_procedure()
srv_msg.client_copy_option('server-id')
srv_msg.client_copy_option('IA_NA')
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'REPLY')
srv_msg.response_check_include_option('Response', None, '3')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '5')
srv_msg.response_check_suboption_content('Response', '5', '3', 'NOT ', 'addr', '3000::1')
@pytest.mark.v6
@pytest.mark.host_reservation
@pytest.mark.kea_only
@pytest.mark.pgsql
def test_v6_host_reservation_pgsql_conflicts_reconfigure_server_with_reservation_of_used_address_2():
misc.test_setup()
srv_control.config_srv_subnet('3000::/30', '3000::1-3000::2')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:66:55:44:33:22:11')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
misc.test_procedure()
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:66:55:44:33:22:11')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'REPLY')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:66:55:44:33:22:22')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
misc.test_procedure()
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:66:55:44:33:22:22')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'REPLY')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
srv_msg.response_check_include_option('Response', None, '3')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '13')
srv_msg.response_check_suboption_content('Response', '13', '3', None, 'statuscode', '2')
# bigger prefix pool + reservation
misc.test_setup()
srv_control.config_srv_subnet('3000::/30', '3000::1-3000::10')
srv_control.enable_db_backend_reservation('PostgreSQL')
srv_control.new_db_backend_reservation('PostgreSQL',
'hw-address',
'00:03:00:01:f6:f5:f4:f3:f2:01')
srv_control.update_db_backend_reservation('dhcp6_subnet_id', '1', 'PostgreSQL', '1')
srv_control.ipv6_address_db_backend_reservation('3000::1', '$(EMPTY)', 'PostgreSQL', '1')
srv_control.upload_db_reservation('PostgreSQL')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'reconfigured')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
misc.test_procedure()
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'REPLY')
srv_msg.response_check_include_option('Response', None, '3')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '5')
srv_msg.response_check_suboption_content('Response', '5', '3', 'NOT ', 'addr', '3000::1')
@pytest.mark.v6
@pytest.mark.host_reservation
@pytest.mark.kea_only
@pytest.mark.pgsql
def test_v6_host_reservation_pgsql_conflicts_reconfigure_server_with_reservation_of_used_address_renew_before_expire():
misc.test_setup()
srv_control.config_srv_subnet('3000::/30', '3000::1-3000::2')
# Use PostgreSQL reservation system.
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:66:55:44:33:22:11')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
misc.test_procedure()
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:66:55:44:33:22:11')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'REPLY')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:66:55:44:33:22:22')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
misc.test_procedure()
srv_msg.client_copy_option('IA_NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:66:55:44:33:22:22')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'REPLY')
# SAVE VALUES
srv_msg.client_save_option('IA_NA')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
srv_msg.response_check_include_option('Response', None, '3')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '13')
srv_msg.response_check_suboption_content('Response', '13', '3', None, 'statuscode', '2')
# bigger prefix pool + reservation
misc.test_setup()
srv_control.set_time('renew-timer', '105')
srv_control.set_time('rebind-timer', '106')
srv_control.set_time('valid-lifetime', '107')
srv_control.set_time('preferred-lifetime', '108')
srv_control.config_srv_subnet('3000::/30', '3000::1-3000::3')
srv_control.enable_db_backend_reservation('PostgreSQL')
srv_control.new_db_backend_reservation('PostgreSQL', 'duid', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_control.update_db_backend_reservation('dhcp6_subnet_id', '1', 'PostgreSQL', '1')
srv_control.ipv6_address_db_backend_reservation('3000::2', '$(EMPTY)', 'PostgreSQL', '1')
srv_control.upload_db_reservation('PostgreSQL')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'reconfigured')
misc.test_procedure()
srv_msg.client_copy_option('server-id')
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:66:55:44:33:22:22')
srv_msg.client_add_saved_option('DONT ')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_send_msg('RENEW')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'REPLY')
srv_msg.response_check_include_option('Response', None, '3')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '5')
srv_msg.response_check_suboption_content('Response', '5', '3', None, 'validlft', '0')
srv_msg.response_check_suboption_content('Response', '5', '3', None, 'addr', '3000::2')
srv_msg.response_check_suboption_content('Response', '5', '3', None, 'validlft', '107')
srv_msg.response_check_suboption_content('Response', '5', '3', None, 'addr', '3000::3')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
misc.test_procedure()
srv_msg.client_copy_option('server-id')
srv_msg.client_copy_option('IA_NA')
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'REPLY')
srv_msg.response_check_include_option('Response', None, '3')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '5')
srv_msg.response_check_suboption_content('Response', '5', '3', None, 'addr', '3000::2')
@pytest.mark.v6
@pytest.mark.host_reservation
@pytest.mark.kea_only
@pytest.mark.pgsql
def test_v6_host_reservation_pgsql_conflicts_reconfigure_server_with_reservation_of_used_address_renew_after_expire():
misc.test_setup()
srv_control.set_time('renew-timer', '5')
srv_control.set_time('rebind-timer', '6')
srv_control.set_time('preferred-lifetime', '7')
srv_control.set_time('valid-lifetime', '8')
srv_control.config_srv_subnet('3000::/30', '3000::1-3000::2')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:66:55:44:33:22:22')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_sets_value('Client', 'ia_id', '11')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_sets_value('Client', 'ia_id', '22')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'ia_id', '11')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_sets_value('Client', 'ia_id', '22')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_copy_option('server-id')
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:66:55:44:33:22:22')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'REPLY')
# SAVE VALUES
srv_msg.client_save_option('IA_NA')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
srv_msg.response_check_include_option('Response', None, '3')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '13')
srv_msg.response_check_suboption_content('Response', '13', '3', None, 'statuscode', '2')
# bigger prefix pool + reservation
srv_msg.forge_sleep('5', 'seconds')
misc.test_setup()
srv_control.set_time('renew-timer', '5')
srv_control.set_time('rebind-timer', '6')
srv_control.set_time('preferred-lifetime', '7')
srv_control.set_time('valid-lifetime', '8')
srv_control.config_srv_subnet('3000::/30', '3000::1-3000::3')
srv_control.enable_db_backend_reservation('PostgreSQL')
srv_control.new_db_backend_reservation('PostgreSQL', 'duid', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_control.update_db_backend_reservation('dhcp6_subnet_id', '1', 'PostgreSQL', '1')
srv_control.ipv6_address_db_backend_reservation('3000::2', '$(EMPTY)', 'PostgreSQL', '1')
srv_control.upload_db_reservation('PostgreSQL')
srv_control.build_and_send_config_files('SSH', 'config-file')
srv_control.start_srv('DHCP', 'reconfigured')
misc.test_procedure()
srv_msg.client_copy_option('server-id')
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:66:55:44:33:22:22')
srv_msg.client_sets_value('Client', 'IA_Address', '3000::1')
srv_msg.client_does_include('Client', None, 'IA_Address')
srv_msg.client_sets_value('Client', 'IA_Address', '3000::2')
srv_msg.client_does_include('Client', None, 'IA_Address')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_send_msg('RENEW')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'REPLY')
srv_msg.response_check_include_option('Response', None, '3')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '5')
srv_msg.response_check_suboption_content('Response', '5', '3', None, 'validlft', '0')
srv_msg.response_check_suboption_content('Response', '5', '3', None, 'addr', '3000::2')
srv_msg.response_check_suboption_content('Response', '5', '3', None, 'validlft', '8')
srv_msg.response_check_suboption_content('Response', '5', '3', None, 'addr', '3000::1')
srv_msg.response_check_suboption_content('Response', '5', '3', None, 'addr', '3000::3')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_does_include('Client', None, 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'ADVERTISE')
misc.test_procedure()
srv_msg.client_copy_option('server-id')
srv_msg.client_copy_option('IA_NA')
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_does_include('Client', None, 'client-id')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', None, 'REPLY')
srv_msg.response_check_include_option('Response', None, '3')
srv_msg.response_check_option_content('Response', '3', None, 'sub-option', '5')
srv_msg.response_check_suboption_content('Response', '5', '3', None, 'addr', '3000::2')
| 43.590909
| 119
| 0.713952
| 3,608
| 23,975
| 4.38969
| 0.042406
| 0.086375
| 0.115924
| 0.053542
| 0.984026
| 0.984026
| 0.977459
| 0.977459
| 0.977459
| 0.971208
| 0
| 0.055935
| 0.121585
| 23,975
| 549
| 120
| 43.67031
| 0.696106
| 0.015224
| 0
| 0.935185
| 0
| 0
| 0.230916
| 0.049167
| 0
| 0
| 0
| 0.001821
| 0
| 1
| 0.016204
| true
| 0.071759
| 0.009259
| 0
| 0.025463
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
e3efd5731487108d24b6eb82905448b5d8e53a13
| 5,808
|
py
|
Python
|
src/tests/lambda_authorizer_tests.py
|
u93/aws-iot-certificate-vending-machine
|
f0ef5115f5627e53f5ad763b808b2ce597919114
|
[
"MIT"
] | 2
|
2021-03-12T04:11:49.000Z
|
2021-08-10T05:41:56.000Z
|
src/tests/lambda_authorizer_tests.py
|
u93/aws-iot-certificate-vending-machine
|
f0ef5115f5627e53f5ad763b808b2ce597919114
|
[
"MIT"
] | 2
|
2021-02-06T18:26:59.000Z
|
2021-06-02T03:59:21.000Z
|
src/tests/lambda_authorizer_tests.py
|
u93/aws-iot-certificate-vending-machine
|
f0ef5115f5627e53f5ad763b808b2ce597919114
|
[
"MIT"
] | null | null | null |
import json
import os
os.environ["APP_CONFIG_PATH"] = "/multa-cvm/dev/config-parameters"
from handlers.utils import Logger
from applications.aws_lambda.basic.lambda_authorizer import lambda_handler as auth_handler
# Import Project Logger
project_logger = Logger()
logger = project_logger.get_logger()
valid_device_token = "DeviceToken NjWO2tVh6fVAeNuLwRsPi-c6N7SP5-DT"
invalid_device_token = "DeviceToken 123"
invalid_token_prefix = "Token 123"
invalid_token_composition = "DeviceToken-123"
def test_valid_device_token():
"""Tests correct payload received by Authorizer Lambda"""
token = "DeviceToken NjWO2tVh6fVAeNuLwRsPi-c6N7SP5-DT"
event = {
"type": "TOKEN",
"methodArn": "arn:aws:execute-api:us-east-1:112646120612:n8il2c2eic/prod/POST/register",
"authorizationToken": token,
}
assert isinstance(event, dict)
assert event["type"] == "TOKEN"
assert isinstance(event["methodArn"], str)
assert isinstance(event["authorizationToken"], str)
result = auth_handler(event=event, context={})
assert isinstance(result, dict)
assert isinstance(result["principalId"], str)
assert result["principalId"] == "".join(token.split(" "))
assert isinstance(result["policyDocument"], dict)
assert isinstance(result["policyDocument"]["Version"], str)
assert result["policyDocument"]["Version"] == "2012-10-17"
assert isinstance(result["policyDocument"]["Statement"], list)
for element in result["policyDocument"]["Statement"]:
assert element["Action"] == "execute-api:Invoke"
assert element["Effect"] == "Allow"
assert isinstance(element["Resource"], list)
for sub_element in element["Resource"]:
assert isinstance(sub_element, str)
assert "POST" in sub_element
assert "register" in sub_element
def test_invalid_device_token():
"""Tests invalid device token value received by Authorizer Lambda"""
token = "DeviceToken 123"
event = {
"type": "TOKEN",
"methodArn": "arn:aws:execute-api:us-east-1:112646120612:n8il2c2eic/prod/POST/register",
"authorizationToken": token,
}
assert isinstance(event, dict)
assert event["type"] == "TOKEN"
assert isinstance(event["methodArn"], str)
assert isinstance(event["authorizationToken"], str)
result = auth_handler(event=event, context={})
assert isinstance(result, dict)
assert isinstance(result["principalId"], str)
assert result["principalId"] == "".join(token.split(" "))
assert isinstance(result["policyDocument"], dict)
assert isinstance(result["policyDocument"]["Version"], str)
assert result["policyDocument"]["Version"] == "2012-10-17"
assert isinstance(result["policyDocument"]["Statement"], list)
for element in result["policyDocument"]["Statement"]:
assert element["Action"] == "execute-api:Invoke"
assert element["Effect"] == "Deny"
assert isinstance(element["Resource"], list)
for sub_element in element["Resource"]:
assert isinstance(sub_element, str)
assert "*" in sub_element
assert "*" in sub_element
def test_invalid_token_prefix():
"""Tests invalid token prefix received by Authorizer Lambda"""
token = "Token 123"
event = {
"type": "TOKEN",
"methodArn": "arn:aws:execute-api:us-east-1:112646120612:n8il2c2eic/prod/POST/register",
"authorizationToken": token,
}
assert isinstance(event, dict)
assert event["type"] == "TOKEN"
assert isinstance(event["methodArn"], str)
assert isinstance(event["authorizationToken"], str)
result = auth_handler(event=event, context={})
assert isinstance(result, dict)
assert isinstance(result["principalId"], str)
assert result["principalId"] == "".join(token.split(" "))
assert isinstance(result["policyDocument"], dict)
assert isinstance(result["policyDocument"]["Version"], str)
assert result["policyDocument"]["Version"] == "2012-10-17"
assert isinstance(result["policyDocument"]["Statement"], list)
for element in result["policyDocument"]["Statement"]:
assert element["Action"] == "execute-api:Invoke"
assert element["Effect"] == "Deny"
assert isinstance(element["Resource"], list)
for sub_element in element["Resource"]:
assert isinstance(sub_element, str)
assert "*" in sub_element
assert "*" in sub_element
def test_invalid_token_composition():
"""Tests invalid token composition received by Authorizer Lambda"""
token = "DeviceToken-123"
event = {
"type": "TOKEN",
"methodArn": "arn:aws:execute-api:us-east-1:112646120612:n8il2c2eic/prod/POST/register",
"authorizationToken": token,
}
assert isinstance(event, dict)
assert event["type"] == "TOKEN"
assert isinstance(event["methodArn"], str)
assert isinstance(event["authorizationToken"], str)
result = auth_handler(event=event, context={})
assert isinstance(result, dict)
assert isinstance(result["principalId"], str)
assert result["principalId"] == "".join(token)
assert isinstance(result["policyDocument"], dict)
assert isinstance(result["policyDocument"]["Version"], str)
assert result["policyDocument"]["Version"] == "2012-10-17"
assert isinstance(result["policyDocument"]["Statement"], list)
for element in result["policyDocument"]["Statement"]:
assert element["Action"] == "execute-api:Invoke"
assert element["Effect"] == "Deny"
assert isinstance(element["Resource"], list)
for sub_element in element["Resource"]:
assert isinstance(sub_element, str)
assert "*" in sub_element
assert "*" in sub_element
| 36.759494
| 96
| 0.673382
| 616
| 5,808
| 6.269481
| 0.137987
| 0.165717
| 0.113931
| 0.111859
| 0.861471
| 0.833765
| 0.816157
| 0.816157
| 0.816157
| 0.816157
| 0
| 0.026299
| 0.188189
| 5,808
| 157
| 97
| 36.993631
| 0.792789
| 0.044077
| 0
| 0.773109
| 0
| 0.033613
| 0.279696
| 0.069427
| 0
| 0
| 0
| 0
| 0.571429
| 1
| 0.033613
| false
| 0
| 0.033613
| 0
| 0.067227
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5418756cd55a6eaf628c409c05052a3b3447c584
| 158
|
py
|
Python
|
anime/__init__.py
|
SodaCookie/pygame-animations
|
44546b5254a659a38bed744e81008d999a328fb2
|
[
"MIT"
] | 14
|
2015-07-02T03:27:23.000Z
|
2017-10-13T09:27:21.000Z
|
anime/__init__.py
|
SodaCookie/pygame-animations
|
44546b5254a659a38bed744e81008d999a328fb2
|
[
"MIT"
] | 2
|
2015-07-30T04:07:37.000Z
|
2015-07-30T04:15:43.000Z
|
anime/__init__.py
|
SodaCookie/pygame-animations
|
44546b5254a659a38bed744e81008d999a328fb2
|
[
"MIT"
] | 7
|
2015-09-09T15:04:45.000Z
|
2019-11-21T04:52:09.000Z
|
from anime.core.anime import Anime, AnimeBase
from anime.core.episode import Episode
import anime.core.filter as filter
import anime.core.renderer as renderer
| 39.5
| 45
| 0.841772
| 25
| 158
| 5.32
| 0.36
| 0.270677
| 0.195489
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101266
| 158
| 4
| 46
| 39.5
| 0.93662
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5861ece416fbef06a2c69d3486435ca4929ba641
| 4,881
|
py
|
Python
|
logical/converter/qiskit/qasm/_qasmparser.py
|
malcolmregan/GateCircuit-to-AnnealerEmbedding
|
33a1a4ea2ebd707ade0677e0df468d5120a861db
|
[
"Apache-2.0"
] | null | null | null |
logical/converter/qiskit/qasm/_qasmparser.py
|
malcolmregan/GateCircuit-to-AnnealerEmbedding
|
33a1a4ea2ebd707ade0677e0df468d5120a861db
|
[
"Apache-2.0"
] | 1
|
2019-04-09T02:22:38.000Z
|
2019-04-09T02:22:38.000Z
|
logical/converter/qiskit/qasm/_qasmparser.py
|
malcolmregan/GateCircuit-to-AnnealerEmbedding
|
33a1a4ea2ebd707ade0677e0df468d5120a861db
|
[
"Apache-2.0"
] | null | null | null |
import os
import shutil
import tempfile
import ply.yacc as yacc
import sympy
from . import _node as node
from ._qasmerror import QasmError
from ._qasmlexer import QasmLexer
class QasmParser(object):
pass
def __init__(self, filename):
pass
def __enter__(self):
pass
def __exit__(self, *args):
pass
def update_symtab(self, obj):
pass
def verify_declared_bit(self, obj):
pass
def verify_bit_list(self, obj):
pass
def verify_exp_list(self, obj):
pass
def verify_as_gate(self, obj, bitlist, arglist=None):
pass
def verify_reg(self, obj, object_type):
pass
def verify_reg_list(self, obj, object_type):
pass
def id_tuple_list(self, id_node):
pass
def verify_distinct(self, list_of_nodes):
pass
def pop_scope(self):
pass
def push_scope(self):
pass
def p_main(self, program):
pass
def p_program_0(self, program):
pass
def p_program_1(self, program):
pass
def p_statement(self, program):
pass
def p_format(self, program):
pass
def p_format_0(self, program):
pass
def p_id(self, program):
pass
def p_id_e(self, program):
pass
def p_indexed_id(self, program):
pass
def p_primary(self, program):
pass
def p_id_list_0(self, program):
pass
def p_id_list_1(self, program):
pass
def p_gate_id_list_0(self, program):
pass
def p_gate_id_list_1(self, program):
pass
def p_bit_list_0(self, program):
pass
def p_bit_list_1(self, program):
pass
def p_primary_list_0(self, program):
pass
def p_primary_list_1(self, program):
pass
def p_decl(self, program):
pass
def p_qreg_decl(self, program):
pass
def p_qreg_decl_e(self, program):
pass
def p_creg_decl(self, program):
pass
def p_creg_decl_e(self, program):
pass
def p_gate_decl_0(self, program):
pass
def p_gate_decl_1(self, program):
pass
def p_gate_decl_2(self, program):
pass
def p_gate_scope(self, program):
pass
def p_gate_body_0(self, program):
pass
def p_gate_body_1(self, program):
pass
def p_gate_op_list_0(self, program):
pass
def p_gate_op_list_1(self, program):
pass
def p_unitary_op_0(self, program):
pass
def p_unitary_op_1(self, program):
pass
def p_unitary_op_2(self, program):
pass
def p_unitary_op_3(self, program):
pass
def p_unitary_op_4(self, program):
pass
def p_gate_op_0(self, program):
pass
def p_gate_op_0e1(self, p):
pass
def p_gate_op_0e2(self, program):
pass
def p_gate_op_1(self, program):
pass
def p_gate_op_1e1(self, program):
pass
def p_gate_op_1e2(self, program):
pass
def p_gate_op_2(self, program):
pass
def p_gate_op_2e(self, program):
pass
def p_gate_op_3(self, program):
pass
def p_gate_op_4(self, program):
pass
def p_gate_op_4e0(self, program):
pass
def p_gate_op_4e1(self, program):
pass
def p_gate_op_5(self, program):
pass
def p_gate_op_5e(self, program):
pass
def p_opaque_0(self, program):
pass
def p_opaque_1(self, program):
pass
def p_opaque_2(self, program):
pass
def p_opaque_1e(self, program):
pass
def p_measure(self, program):
pass
def p_measure_e(self, program):
pass
def p_barrier(self, program):
pass
def p_reset(self, program):
pass
def p_if(self, program):
pass
def p_quantum_op(self, program):
pass
def p_unary_0(self, program):
pass
def p_unary_1(self, program):
pass
def p_unary_2(self, program):
pass
def p_unary_3(self, program):
pass
def p_unary_4(self, program):
pass
def p_unary_6(self, program):
pass
def p_expression_1(self, program):
pass
def p_expression_0(self, program):
pass
def p_expression_2(self, program):
pass
def p_exp_list_0(self, program):
pass
def p_exp_list_1(self, program):
pass
def p_ignore(self, program):
pass
def p_error(self, program):
pass
def find_column(self, input_, token):
pass
def get_tokens(self):
pass
def parse_debug(self, val):
pass
def parse(self, data):
pass
def print_tree(self):
pass
def run(self, data):
pass
| 15.495238
| 57
| 0.58615
| 698
| 4,881
| 3.786533
| 0.140401
| 0.246311
| 0.220961
| 0.490352
| 0.751419
| 0.702232
| 0.436625
| 0.14983
| 0.021188
| 0
| 0
| 0.01789
| 0.335792
| 4,881
| 314
| 58
| 15.544586
| 0.797347
| 0
| 0
| 0.479592
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.47449
| false
| 0.479592
| 0.040816
| 0
| 0.520408
| 0.005102
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
5887726d0394981c27294743851dea0ab39de568
| 5,051
|
py
|
Python
|
nmfamv2/stan_process/stan_model_constants.py
|
gmarupilla/NMRFAMv2
|
2e9e7e1f43dbf1d8bcdbcff044bb686db3af09e0
|
[
"MIT"
] | null | null | null |
nmfamv2/stan_process/stan_model_constants.py
|
gmarupilla/NMRFAMv2
|
2e9e7e1f43dbf1d8bcdbcff044bb686db3af09e0
|
[
"MIT"
] | null | null | null |
nmfamv2/stan_process/stan_model_constants.py
|
gmarupilla/NMRFAMv2
|
2e9e7e1f43dbf1d8bcdbcff044bb686db3af09e0
|
[
"MIT"
] | null | null | null |
STAN_MODEL_ADJUSTING = """
data
{
int is_it_final;
int LENGTH;
int final_size;
vector [final_size] scales_mat_est;
matrix [final_size , final_size] temps_square_dots;
vector [final_size] temps_mixture_dots ;
vector [final_size] zeros ;
vector [final_size] sigma ;
vector [final_size] scales_mean ;
vector [final_size] scales_sigma ;
}
parameters
{
vector <lower = 0.001 , upper = 1000000000 > [final_size] scales;
}
model
{
scales ~ normal(scales_mean , scales_sigma);
zeros ~ normal( (temps_mixture_dots - (temps_square_dots * scales)), sigma);
}
"""
# INITIALESTIMATE
STAN_MODEL_BASIC = """
data
{
int is_it_final;
int LENGTH; //2048
int final_size; //5
vector [final_size] scales_mat_est ; // Grant's estimations for the scale values
matrix [final_size , final_size] temps_square_dots; // dot product of each template with itself
//row_vector [final_size] temps_mixture_dots ; // dot product of each template with the mixture
vector [final_size] temps_mixture_dots ; // dot product of each template with the mixture
//row_vector [final_size] zeros; // a list of zeros with the same size as the number of the matabolites and scales
vector [final_size] zeros; // a list of zeros with the same size as the number of the matabolites and scales
vector [final_size] sigma ; //list of small numbers
//matrix [final_size,final_size] scales_mean ; // diagonal scales_mat_est rest of it zero
//matrix [final_size,final_size] scales_sigma; // diagonal scales_mat_est * 2 rest of it zero
//row_vector [final_size] scales_mean ;
vector [final_size] scales_mean ;
//row_vector [final_size] scales_sigma ;
vector [final_size] scales_sigma ;
}
parameters
{
//row_vector <lower = 0.001 , upper = 100000 > [final_size] scales; // scale values
vector <lower = 0.001 , upper = 1000000 > [final_size] scales; // scale values
}
model
{
scales ~ normal(scales_mean ,2 * scales_sigma);
//correct one
zeros ~ normal ( (temps_mixture_dots - (temps_square_dots * scales)) , sigma );
}
"""
# REESTIMATE
STAN_MODEL_BAD_ESTIMATES = """
data
{
int is_it_final;
int LENGTH; //2048
int final_size; //5
vector [final_size] scales_mat_est ; // Grant's estimations for the scale values
matrix [final_size , final_size] temps_square_dots; // dot product of each template with itself
//row_vector [final_size] temps_mixture_dots ; // dot product of each template with the mixture
vector [final_size] temps_mixture_dots ; // dot product of each template with the mixture
//row_vector [final_size] zeros; // a list of zeros with the same size as the number of the matabolites and scales
vector [final_size] zeros; // a list of zeros with the same size as the number of the matabolites and scales
vector [final_size] sigma ; //list of small numbers
//matrix [final_size,final_size] scales_mean ; // diagonal scales_mat_est rest of it zero
//matrix [final_size,final_size] scales_sigma; // diagonal scales_mat_est * 2 rest of it zero
//row_vector [final_size] scales_mean ;
vector [final_size] scales_mean ;
//row_vector [final_size] scales_sigma ;
vector [final_size] scales_sigma ;
}
parameters
{
//row_vector <lower = 0.001 , upper = 100000 > [final_size] scales; // scale values
vector <lower = 0.001 , upper = 1000000 > [final_size] scales; // scale values
}
model
{
scales ~ normal(10 * scales_mean ,4 * scales_sigma);
//correct one
zeros ~ normal ( (temps_mixture_dots - (temps_square_dots * scales)) , sigma );
}
"""
# ALTERNATIVEESTIMATES
STAN_MODEL_OK_ESTIMATES = """
data
{
int is_it_final;
int LENGTH; //2048
int final_size; //5
vector [final_size] scales_mat_est ; // Grant's estimations for the scale values
matrix [final_size , final_size] temps_square_dots; // dot product of each template with itself
//row_vector [final_size] temps_mixture_dots ; // dot product of each template with the mixture
vector [final_size] temps_mixture_dots ; // dot product of each template with the mixture
//row_vector [final_size] zeros; // a list of zeros with the same size as the number of the matabolites and scales
vector [final_size] zeros; // a list of zeros with the same size as the number of the matabolites and scales
vector [final_size] sigma ; //list of small numbers
//matrix [final_size,final_size] scales_mean ; // diagonal scales_mat_est rest of it zero
//matrix [final_size,final_size] scales_sigma; // diagonal scales_mat_est * 2 rest of it zero
//row_vector [final_size] scales_mean ;
vector [final_size] scales_mean ;
//row_vector [final_size] scales_sigma ;
vector [final_size] scales_sigma ;
}
parameters
{
//row_vector <lower = 0.001 , upper = 100000 > [final_size] scales; // scale values
vector <lower = 0.001 , upper = 1000000 > [final_size] scales; // scale values
}
model
{
scales ~ normal(4 * scales_mean ,2 * scales_sigma);
//correct one
zeros ~ normal ( (temps_mixture_dots - (temps_square_dots * scales)) , sigma );
}
"""
| 27.752747
| 116
| 0.719857
| 739
| 5,051
| 4.668471
| 0.097429
| 0.174783
| 0.156522
| 0.109565
| 0.942899
| 0.927826
| 0.906667
| 0.89942
| 0.888116
| 0.874783
| 0
| 0.024544
| 0.18531
| 5,051
| 181
| 117
| 27.906077
| 0.813852
| 0.009305
| 0
| 0.666667
| 0
| 0.075
| 0.9744
| 0.0276
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
589f2620152161ed65369d654996234c07756517
| 1,205
|
py
|
Python
|
unitorch/cli/models/roberta/__init__.py
|
fuliucansheng/UniTorch
|
47038321593ce4e7eabda555bd58c0cf89482146
|
[
"MIT"
] | 2
|
2022-02-05T08:52:00.000Z
|
2022-03-27T07:01:34.000Z
|
unitorch/cli/models/roberta/__init__.py
|
Lixin-Qian/unitorch
|
47038321593ce4e7eabda555bd58c0cf89482146
|
[
"MIT"
] | null | null | null |
unitorch/cli/models/roberta/__init__.py
|
Lixin-Qian/unitorch
|
47038321593ce4e7eabda555bd58c0cf89482146
|
[
"MIT"
] | 1
|
2022-03-27T07:01:13.000Z
|
2022-03-27T07:01:13.000Z
|
# Copyright (c) FULIUCANSHENG.
# Licensed under the MIT License.
# pretrained infos
pretrained_roberta_infos = {
"default-roberta": {
"config": "https://huggingface.co/roberta-base/resolve/main/config.json",
"vocab": "https://huggingface.co/roberta-base/resolve/main/vocab.json",
"merge": "https://huggingface.co/roberta-base/resolve/main/merges.txt",
},
"roberta-base": {
"config": "https://huggingface.co/roberta-base/resolve/main/config.json",
"vocab": "https://huggingface.co/roberta-base/resolve/main/vocab.json",
"merge": "https://huggingface.co/roberta-base/resolve/main/merges.txt",
"weight": "https://huggingface.co/roberta-base/resolve/main/pytorch_model.bin",
},
"roberta-large": {
"config": "https://huggingface.co/roberta-large/resolve/main/config.json",
"vocab": "https://huggingface.co/roberta-large/resolve/main/vocab.json",
"merge": "https://huggingface.co/roberta-large/resolve/main/merges.txt",
"weight": "https://huggingface.co/roberta-large/resolve/main/pytorch_model.bin",
},
}
import unitorch.cli.models.roberta.modeling
import unitorch.cli.models.roberta.processing
| 43.035714
| 88
| 0.682988
| 143
| 1,205
| 5.727273
| 0.244755
| 0.214896
| 0.241758
| 0.335775
| 0.827839
| 0.710623
| 0.710623
| 0.623932
| 0.623932
| 0.400488
| 0
| 0
| 0.138589
| 1,205
| 27
| 89
| 44.62963
| 0.789017
| 0.0639
| 0
| 0.285714
| 0
| 0
| 0.685053
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.095238
| 0
| 0.095238
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
546e7482c7a658eba4451977ec15e21d5f3195dd
| 46,329
|
py
|
Python
|
q2_types/per_sample_sequences/tests/test_transformer.py
|
misialq/q2-types
|
058ee0e40e38edaa02b1aad034df37456aeb4ddf
|
[
"BSD-3-Clause"
] | null | null | null |
q2_types/per_sample_sequences/tests/test_transformer.py
|
misialq/q2-types
|
058ee0e40e38edaa02b1aad034df37456aeb4ddf
|
[
"BSD-3-Clause"
] | null | null | null |
q2_types/per_sample_sequences/tests/test_transformer.py
|
misialq/q2-types
|
058ee0e40e38edaa02b1aad034df37456aeb4ddf
|
[
"BSD-3-Clause"
] | null | null | null |
# ----------------------------------------------------------------------------
# Copyright (c) 2016-2020, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
import unittest
import os
import shutil
import io
import string
import skbio
import yaml
import pandas as pd
from q2_types.per_sample_sequences import (
SingleLanePerSampleSingleEndFastqDirFmt,
SingleLanePerSamplePairedEndFastqDirFmt,
CasavaOneEightSingleLanePerSampleDirFmt,
CasavaOneEightLanelessPerSampleDirFmt,
SingleEndFastqManifestPhred33,
SingleEndFastqManifestPhred64,
PairedEndFastqManifestPhred33,
PairedEndFastqManifestPhred64,
FastqManifestFormat,
SingleEndFastqManifestPhred33V2,
SingleEndFastqManifestPhred64V2,
PairedEndFastqManifestPhred33V2,
PairedEndFastqManifestPhred64V2,
QIIME1DemuxDirFmt,
FastqGzFormat)
from q2_types.per_sample_sequences._transformer import (
_validate_header,
_validate_single_end_fastq_manifest_directions,
_validate_paired_end_fastq_manifest_directions,
_parse_and_validate_manifest
)
from qiime2.plugin.testing import TestPluginBase
class TestTransformers(TestPluginBase):
package = "q2_types.per_sample_sequences.tests"
def test_slpspefdf_to_slpssefdf(self):
filenames = ('paired_end_data/MANIFEST', 'metadata.yml',
'Human-Kneecap_S1_L001_R1_001.fastq.gz',
'paired_end_data/Human-Kneecap_S1_L001_R2_001.fastq.gz')
input, obs = self.transform_format(
SingleLanePerSamplePairedEndFastqDirFmt,
SingleLanePerSampleSingleEndFastqDirFmt, filenames=filenames
)
expected = skbio.io.read(
'%s/Human-Kneecap_S1_L001_R1_001.fastq.gz' % str(input),
format='fastq', constructor=skbio.DNA
)
obs = skbio.io.read(
'%s/Human-Kneecap_S1_L001_R1_001.fastq.gz' % str(obs),
format='fastq', constructor=skbio.DNA
)
for act, exp in zip(obs, expected):
self.assertEqual(act, exp)
def test_slpssefdf_to_qiime1demuxdf(self):
filenames = ('single-end-two-sample-data1/MANIFEST',
'metadata.yml',
'Human-Kneecap_S1_L001_R1_001.fastq.gz',
'Human-Armpit_S2_L001_R1_001.fastq.gz')
input, observed = self.transform_format(
SingleLanePerSampleSingleEndFastqDirFmt,
QIIME1DemuxDirFmt, filenames=filenames
)
expected1 = list(skbio.io.read(
'%s/Human-Kneecap_S1_L001_R1_001.fastq.gz' % str(input),
format='fastq', constructor=skbio.DNA
))
expected2 = list(skbio.io.read(
'%s/Human-Armpit_S2_L001_R1_001.fastq.gz' % str(input),
format='fastq', constructor=skbio.DNA
))
expected = \
list(zip(expected1, ['Human-Kneecap'] * len(expected1))) + \
list(zip(expected2, ['Human-Armpit'] * len(expected2)))
observed = skbio.io.read(
'%s/seqs.fna' % str(observed),
format='fasta', constructor=skbio.DNA
)
observed = list(observed)
self.assertEqual(len(observed), len(expected))
for i, obs in enumerate(observed):
# identifiers are as expected
self.assertEqual(obs.metadata['id'],
'%s_%d' % (expected[i][1], i))
# sequences are as expected
self.assertEqual(str(obs), str(expected[i][0]))
def test_slpssefdf_to_qiime1demuxdf_bad_sample_ids(self):
filenames = ('single-end-two-sample-data2/MANIFEST',
'metadata.yml',
'Human-Kneecap_S1_L001_R1_001.fastq.gz',
'Human-Armpit_S2_L001_R1_001.fastq.gz')
with self.assertRaisesRegex(ValueError,
expected_regex='space.*Human-K'):
self.transform_format(
SingleLanePerSampleSingleEndFastqDirFmt,
QIIME1DemuxDirFmt, filenames=filenames)
filenames = ('single-end-two-sample-data3/MANIFEST',
'metadata.yml',
'Human-Kneecap_S1_L001_R1_001.fastq.gz',
'Human-Armpit_S2_L001_R1_001.fastq.gz')
with self.assertRaisesRegex(ValueError,
expected_regex='space.*Human-A'):
self.transform_format(
SingleLanePerSampleSingleEndFastqDirFmt,
QIIME1DemuxDirFmt, filenames=filenames)
def test_casava_one_eight_laneless_per_sample_dirfmt_to_slpspefd(self):
filenames = ('Human-Kneecap_S1_R1_001.fastq.gz',
'Human-Armpit_S2_R1_001.fastq.gz')
input, dirfmt = self.transform_format(
CasavaOneEightLanelessPerSampleDirFmt,
SingleLanePerSamplePairedEndFastqDirFmt, filenames=filenames
)
expected_filepaths = ['Human-Kneecap_S1_L001_R1_001.fastq.gz',
'Human-Armpit_S2_L001_R1_001.fastq.gz']
for path, view in dirfmt.sequences.iter_views(FastqGzFormat):
self.assertIn(path.name, expected_filepaths)
df = dirfmt.manifest.view(pd.DataFrame)
for name in df['forward']:
self.assertTrue((dirfmt.path / name).exists())
def test_casava_one_eight_single_lane_per_sample_dirfmt_to_slpssefdf(self):
filenames = ('Human-Kneecap_S1_L001_R1_001.fastq.gz',)
input, obs = self.transform_format(
CasavaOneEightSingleLanePerSampleDirFmt,
SingleLanePerSampleSingleEndFastqDirFmt, filenames=filenames
)
input = skbio.io.read(
'%s/Human-Kneecap_S1_L001_R1_001.fastq.gz' % str(input),
format='fastq', constructor=skbio.DNA
)
obs = skbio.io.read(
'%s/Human-Kneecap_S1_L001_R1_001.fastq.gz' % str(obs),
format='fastq', constructor=skbio.DNA
)
for act, exp in zip(obs, input):
self.assertEqual(act, exp)
def test_casava_one_eight_single_lane_per_sample_dirfmt_to_slpspefdf(self):
filenames = ('Human-Kneecap_S1_L001_R1_001.fastq.gz',)
input, obs = self.transform_format(
CasavaOneEightSingleLanePerSampleDirFmt,
SingleLanePerSamplePairedEndFastqDirFmt, filenames=filenames
)
input = skbio.io.read(
'%s/Human-Kneecap_S1_L001_R1_001.fastq.gz' % str(input),
format='fastq', constructor=skbio.DNA
)
obs = skbio.io.read(
'%s/Human-Kneecap_S1_L001_R1_001.fastq.gz' % str(obs),
format='fastq', constructor=skbio.DNA
)
for act, exp in zip(obs, input):
self.assertEqual(act, exp)
def test_miseq_demux_dirfmt_to_slpssefdf(self):
input, obs = self.transform_format(
CasavaOneEightLanelessPerSampleDirFmt,
SingleLanePerSampleSingleEndFastqDirFmt,
filenames=('Human-Kneecap_S1_R1_001.fastq.gz',),
)
input = skbio.io.read(
'%s/Human-Kneecap_S1_R1_001.fastq.gz' % str(input),
format='fastq', constructor=skbio.DNA
)
obs = skbio.io.read(
'%s/Human-Kneecap_S1_L001_R1_001.fastq.gz' % str(obs),
format='fastq', constructor=skbio.DNA
)
for act, exp in zip(obs, input):
self.assertEqual(act, exp)
def test_miseq_demux_dirfmt_to_slpspefdf(self):
input, obs = self.transform_format(
CasavaOneEightLanelessPerSampleDirFmt,
SingleLanePerSamplePairedEndFastqDirFmt,
filenames=('Human-Kneecap_S1_R1_001.fastq.gz',),
)
input = skbio.io.read(
'%s/Human-Kneecap_S1_R1_001.fastq.gz' % str(input),
format='fastq', constructor=skbio.DNA
)
obs = skbio.io.read(
'%s/Human-Kneecap_S1_L001_R1_001.fastq.gz' % str(obs),
format='fastq', constructor=skbio.DNA
)
for act, exp in zip(obs, input):
self.assertEqual(act, exp)
def test_fastqmanifest_single(self):
_, dirfmt = self.transform_format(
CasavaOneEightSingleLanePerSampleDirFmt,
SingleLanePerSampleSingleEndFastqDirFmt,
filenames=('Human-Kneecap_S1_L001_R1_001.fastq.gz',
'Human-Armpit_S2_L001_R1_001.fastq.gz'),
)
df = dirfmt.manifest.view(pd.DataFrame)
self.assertEqual(set(df.index), {'Human-Kneecap', 'Human-Armpit'})
self.assertEqual(set(df.columns), {'forward'})
self.assertTrue(os.path.exists(df['forward'].loc['Human-Kneecap']))
self.assertTrue(os.path.exists(df['forward'].loc['Human-Armpit']))
def test_fastqmanifest_paired(self):
_, dirfmt = self.transform_format(
CasavaOneEightSingleLanePerSampleDirFmt,
SingleLanePerSamplePairedEndFastqDirFmt,
filenames=(
'Human-Kneecap_S1_L001_R1_001.fastq.gz',
'paired_end_data/Human-Kneecap_S1_L001_R2_001.fastq.gz'),
)
df = dirfmt.manifest.view(pd.DataFrame)
self.assertEqual(set(df.index), {'Human-Kneecap'})
self.assertEqual(set(df.columns), {'forward', 'reverse'})
self.assertTrue(os.path.exists(df['forward'].loc['Human-Kneecap']))
self.assertTrue(os.path.exists(df['reverse'].loc['Human-Kneecap']))
class TestFastqManifestTransformers(TestPluginBase):
package = "q2_types.per_sample_sequences.tests"
def test_single_end_fastq_manifest_phred33_to_slpssefdf(self):
format_ = SingleEndFastqManifestPhred33
transformer = self.get_transformer(
format_,
SingleLanePerSampleSingleEndFastqDirFmt)
shutil.copy(
self.get_data_path('Human-Kneecap_S1_L001_R1_001.fastq.gz'),
os.path.join(self.temp_dir.name,
'Human-Kneecap_S1_L001_R1_001.fastq.gz'))
shutil.copy(
self.get_data_path('Human-Armpit.fastq.gz'),
os.path.join(self.temp_dir.name, 'Human-Armpit.fastq.gz'))
manifest_fp = os.path.join(self.temp_dir.name, 'manifest')
with open(manifest_fp, 'w') as fh:
fh.write("sample-id,absolute-filepath,direction\n")
fh.write("sampleABC,%s/Human-Kneecap_S1_L001_R1_001.fastq.gz,"
"forward\n" % self.temp_dir.name)
fh.write("sampleXYZ,%s/Human-Armpit.fastq.gz,forward\n"
% self.temp_dir.name)
obs = transformer(format_(manifest_fp, 'r'))
fastq_pairs = [('Human-Kneecap_S1_L001_R1_001.fastq.gz',
'sampleABC_0_L001_R1_001.fastq.gz'),
('Human-Armpit.fastq.gz',
'sampleXYZ_1_L001_R1_001.fastq.gz')]
for input_fastq, obs_fastq in fastq_pairs:
obs_fh = skbio.io.read(
os.path.join(str(obs), obs_fastq), compression='gzip',
format='fastq', constructor=skbio.DNA, variant='illumina1.8'
)
exp_fh = skbio.io.read(
self.get_data_path(input_fastq),
format='fastq', constructor=skbio.DNA, variant='illumina1.8'
)
for o, e in zip(obs_fh, exp_fh):
self.assertEqual(o, e)
obs_metadata = yaml.load(open('%s/metadata.yml' % str(obs)))
exp_metadata = yaml.load("{'phred-offset': 33}")
self.assertEqual(obs_metadata, exp_metadata)
obs_manifest = open('%s/MANIFEST' % (str(obs))).read()
exp_manifest = ("sample-id,filename,direction\n"
"sampleABC,sampleABC_0_L001_R1_001.fastq.gz,forward\n"
"sampleXYZ,sampleXYZ_1_L001_R1_001.fastq.gz,forward\n")
self.assertEqual(obs_manifest, exp_manifest)
def test_single_end_fastq_manifest_phred33_to_slpssefdf_uncompressed(self):
format_ = SingleEndFastqManifestPhred33
transformer = self.get_transformer(
format_,
SingleLanePerSampleSingleEndFastqDirFmt)
shutil.copy(
self.get_data_path('Human-Kneecap_S1_L001_R1_001.fastq'),
os.path.join(self.temp_dir.name,
'Human-Kneecap_S1_L001_R1_001.fastq'))
shutil.copy(
self.get_data_path('Human-Armpit.fastq'),
os.path.join(self.temp_dir.name, 'Human-Armpit.fastq'))
manifest_fp = os.path.join(self.temp_dir.name, 'manifest')
with open(manifest_fp, 'w') as fh:
fh.write("sample-id,absolute-filepath,direction\n")
fh.write("sampleABC,%s/Human-Kneecap_S1_L001_R1_001.fastq,"
"forward\n" % self.temp_dir.name)
fh.write("sampleXYZ,%s/Human-Armpit.fastq,forward\n"
% self.temp_dir.name)
obs = transformer(format_(manifest_fp, 'r'))
fastq_pairs = [('Human-Kneecap_S1_L001_R1_001.fastq',
'sampleABC_0_L001_R1_001.fastq.gz'),
('Human-Armpit.fastq',
'sampleXYZ_1_L001_R1_001.fastq.gz')]
for input_fastq, obs_fastq in fastq_pairs:
obs_fh = skbio.io.read(
os.path.join(str(obs), obs_fastq), compression='gzip',
format='fastq', constructor=skbio.DNA, variant='illumina1.8'
)
exp_fh = skbio.io.read(
self.get_data_path(input_fastq),
format='fastq', constructor=skbio.DNA, variant='illumina1.8'
)
for o, e in zip(obs_fh, exp_fh):
self.assertEqual(o, e)
obs_metadata = yaml.load(open('%s/metadata.yml' % str(obs)))
exp_metadata = yaml.load("{'phred-offset': 33}")
self.assertEqual(obs_metadata, exp_metadata)
obs_manifest = open('%s/MANIFEST' % (str(obs))).read()
exp_manifest = ("sample-id,filename,direction\n"
"sampleABC,sampleABC_0_L001_R1_001.fastq.gz,forward\n"
"sampleXYZ,sampleXYZ_1_L001_R1_001.fastq.gz,forward\n")
self.assertEqual(obs_manifest, exp_manifest)
def test_single_end_fastq_manifest_phred64_to_slpssefdf(self):
format_ = SingleEndFastqManifestPhred64
transformer = self.get_transformer(
format_,
SingleLanePerSampleSingleEndFastqDirFmt)
shutil.copy(
self.get_data_path('s1-phred64.fastq.gz'),
os.path.join(self.temp_dir.name, 's1-phred64.fastq.gz'))
shutil.copy(
self.get_data_path('s2-phred64.fastq.gz'),
os.path.join(self.temp_dir.name, 's2-phred64.fastq.gz'))
manifest_fp = os.path.join(self.temp_dir.name, 'manifest')
with open(manifest_fp, 'w') as fh:
fh.write("sample-id,absolute-filepath,direction\n")
fh.write("sampleABC,%s/s1-phred64.fastq.gz,"
"forward\n" % self.temp_dir.name)
fh.write("sampleXYZ,%s/s2-phred64.fastq.gz,forward\n" %
self.temp_dir.name)
obs = transformer(format_(manifest_fp, 'r'))
fastq_pairs = [('s1-phred64.fastq.gz',
'sampleABC_0_L001_R1_001.fastq.gz'),
('s2-phred64.fastq.gz',
'sampleXYZ_1_L001_R1_001.fastq.gz')]
for input_fastq, obs_fastq in fastq_pairs:
obs_fh = skbio.io.read(
os.path.join(str(obs), obs_fastq), compression='gzip',
format='fastq', constructor=skbio.DNA, variant='illumina1.8'
)
exp_fh = skbio.io.read(
self.get_data_path(input_fastq),
format='fastq', constructor=skbio.DNA, variant='illumina1.3'
)
for o, e in zip(obs_fh, exp_fh):
self.assertEqual(o, e)
obs_metadata = yaml.load(open('%s/metadata.yml' % str(obs)))
exp_metadata = yaml.load("{'phred-offset': 33}")
self.assertEqual(obs_metadata, exp_metadata)
obs_manifest = open('%s/MANIFEST' % (str(obs))).read()
exp_manifest = ("sample-id,filename,direction\n"
"sampleABC,sampleABC_0_L001_R1_001.fastq.gz,forward\n"
"sampleXYZ,sampleXYZ_1_L001_R1_001.fastq.gz,forward\n")
self.assertEqual(obs_manifest, exp_manifest)
def test_single_end_fastq_manifest_phred64_to_slpssefdf_uncompressed(self):
format_ = SingleEndFastqManifestPhred64
transformer = self.get_transformer(
format_,
SingleLanePerSampleSingleEndFastqDirFmt)
shutil.copy(
self.get_data_path('s1-phred64.fastq'),
os.path.join(self.temp_dir.name, 's1-phred64.fastq'))
shutil.copy(
self.get_data_path('s2-phred64.fastq'),
os.path.join(self.temp_dir.name, 's2-phred64.fastq'))
manifest_fp = os.path.join(self.temp_dir.name, 'manifest')
with open(manifest_fp, 'w') as fh:
fh.write("sample-id,absolute-filepath,direction\n")
fh.write("sampleABC,%s/s1-phred64.fastq,"
"forward\n" % self.temp_dir.name)
fh.write("sampleXYZ,%s/s2-phred64.fastq,forward\n" %
self.temp_dir.name)
obs = transformer(format_(manifest_fp, 'r'))
fastq_pairs = [('s1-phred64.fastq',
'sampleABC_0_L001_R1_001.fastq.gz'),
('s2-phred64.fastq',
'sampleXYZ_1_L001_R1_001.fastq.gz')]
for input_fastq, obs_fastq in fastq_pairs:
obs_fh = skbio.io.read(
os.path.join(str(obs), obs_fastq), compression='gzip',
format='fastq', constructor=skbio.DNA, variant='illumina1.8'
)
exp_fh = skbio.io.read(
self.get_data_path(input_fastq),
format='fastq', constructor=skbio.DNA, variant='illumina1.3'
)
for o, e in zip(obs_fh, exp_fh):
self.assertEqual(o, e)
obs_metadata = yaml.load(open('%s/metadata.yml' % str(obs)))
exp_metadata = yaml.load("{'phred-offset': 33}")
self.assertEqual(obs_metadata, exp_metadata)
obs_manifest = open('%s/MANIFEST' % (str(obs))).read()
exp_manifest = ("sample-id,filename,direction\n"
"sampleABC,sampleABC_0_L001_R1_001.fastq.gz,forward\n"
"sampleXYZ,sampleXYZ_1_L001_R1_001.fastq.gz,forward\n")
self.assertEqual(obs_manifest, exp_manifest)
def test_paired_end_fastq_manifest_phred33_to_slpspefdf(self):
format_ = PairedEndFastqManifestPhred33
transformer = self.get_transformer(
format_,
SingleLanePerSamplePairedEndFastqDirFmt)
shutil.copy(
self.get_data_path('Human-Kneecap_S1_L001_R1_001.fastq.gz'),
os.path.join(self.temp_dir.name,
'Human-Kneecap_S1_L001_R1_001.fastq.gz'))
shutil.copy(
self.get_data_path('Human-Armpit.fastq.gz'),
os.path.join(self.temp_dir.name, 'Human-Armpit.fastq.gz'))
manifest_fp = os.path.join(self.temp_dir.name, 'manifest')
with open(manifest_fp, 'w') as fh:
fh.write("sample-id,absolute-filepath,direction\n")
fh.write("sampleABC,%s/Human-Kneecap_S1_L001_R1_001.fastq.gz,"
"forward\n" % self.temp_dir.name)
fh.write("sampleABC,%s/Human-Armpit.fastq.gz,reverse\n"
% self.temp_dir.name)
obs = transformer(format_(manifest_fp, 'r'))
fastq_pairs = [('Human-Kneecap_S1_L001_R1_001.fastq.gz',
'sampleABC_0_L001_R1_001.fastq.gz'),
('Human-Armpit.fastq.gz',
'sampleABC_1_L001_R2_001.fastq.gz')]
for input_fastq, obs_fastq in fastq_pairs:
obs_fh = skbio.io.read(
os.path.join(str(obs), obs_fastq), compression='gzip',
format='fastq', constructor=skbio.DNA, variant='illumina1.8'
)
exp_fh = skbio.io.read(
self.get_data_path(input_fastq),
format='fastq', constructor=skbio.DNA, variant='illumina1.8'
)
for o, e in zip(obs_fh, exp_fh):
self.assertEqual(o, e)
obs_metadata = yaml.load(open('%s/metadata.yml' % str(obs)))
exp_metadata = yaml.load("{'phred-offset': 33}")
self.assertEqual(obs_metadata, exp_metadata)
obs_manifest = open('%s/MANIFEST' % (str(obs))).read()
exp_manifest = ("sample-id,filename,direction\n"
"sampleABC,sampleABC_0_L001_R1_001.fastq.gz,forward\n"
"sampleABC,sampleABC_1_L001_R2_001.fastq.gz,reverse\n")
self.assertEqual(obs_manifest, exp_manifest)
def test_paired_end_fastq_manifest_phred33_to_slpspefdf_uncompressed(self):
format_ = PairedEndFastqManifestPhred33
transformer = self.get_transformer(
format_,
SingleLanePerSamplePairedEndFastqDirFmt)
shutil.copy(
self.get_data_path('Human-Kneecap_S1_L001_R1_001.fastq'),
os.path.join(self.temp_dir.name,
'Human-Kneecap_S1_L001_R1_001.fastq'))
shutil.copy(
self.get_data_path('Human-Armpit.fastq'),
os.path.join(self.temp_dir.name, 'Human-Armpit.fastq'))
manifest_fp = os.path.join(self.temp_dir.name, 'manifest')
with open(manifest_fp, 'w') as fh:
fh.write("sample-id,absolute-filepath,direction\n")
fh.write("sampleABC,%s/Human-Kneecap_S1_L001_R1_001.fastq,"
"forward\n" % self.temp_dir.name)
fh.write("sampleABC,%s/Human-Armpit.fastq,reverse\n"
% self.temp_dir.name)
obs = transformer(format_(manifest_fp, 'r'))
fastq_pairs = [('Human-Kneecap_S1_L001_R1_001.fastq',
'sampleABC_0_L001_R1_001.fastq.gz'),
('Human-Armpit.fastq',
'sampleABC_1_L001_R2_001.fastq.gz')]
for input_fastq, obs_fastq in fastq_pairs:
obs_fh = skbio.io.read(
os.path.join(str(obs), obs_fastq), compression='gzip',
format='fastq', constructor=skbio.DNA, variant='illumina1.8'
)
exp_fh = skbio.io.read(
self.get_data_path(input_fastq),
format='fastq', constructor=skbio.DNA, variant='illumina1.8'
)
for o, e in zip(obs_fh, exp_fh):
self.assertEqual(o, e)
obs_metadata = yaml.load(open('%s/metadata.yml' % str(obs)))
exp_metadata = yaml.load("{'phred-offset': 33}")
self.assertEqual(obs_metadata, exp_metadata)
obs_manifest = open('%s/MANIFEST' % (str(obs))).read()
exp_manifest = ("sample-id,filename,direction\n"
"sampleABC,sampleABC_0_L001_R1_001.fastq.gz,forward\n"
"sampleABC,sampleABC_1_L001_R2_001.fastq.gz,reverse\n")
self.assertEqual(obs_manifest, exp_manifest)
def test_paired_end_fastq_manifest_phred64_to_slpspefdf(self):
format_ = PairedEndFastqManifestPhred64
transformer = self.get_transformer(
format_,
SingleLanePerSamplePairedEndFastqDirFmt)
shutil.copy(
self.get_data_path('s1-phred64.fastq.gz'),
os.path.join(self.temp_dir.name, 's1-phred64.fastq.gz'))
shutil.copy(
self.get_data_path('s2-phred64.fastq.gz'),
os.path.join(self.temp_dir.name, 's2-phred64.fastq.gz'))
manifest_fp = os.path.join(self.temp_dir.name, 'manifest')
with open(manifest_fp, 'w') as fh:
fh.write("sample-id,absolute-filepath,direction\n")
fh.write("sampleABC,%s/s1-phred64.fastq.gz,"
"forward\n" % self.temp_dir.name)
fh.write("sampleABC,%s/s2-phred64.fastq.gz,reverse\n" %
self.temp_dir.name)
obs = transformer(format_(manifest_fp, 'r'))
fastq_pairs = [('s1-phred64.fastq.gz',
'sampleABC_0_L001_R1_001.fastq.gz'),
('s2-phred64.fastq.gz',
'sampleABC_1_L001_R2_001.fastq.gz')]
for input_fastq, obs_fastq in fastq_pairs:
obs_fh = skbio.io.read(
os.path.join(str(obs), obs_fastq), compression='gzip',
format='fastq', constructor=skbio.DNA, variant='illumina1.8'
)
exp_fh = skbio.io.read(
self.get_data_path(input_fastq),
format='fastq', constructor=skbio.DNA, variant='illumina1.3'
)
for o, e in zip(obs_fh, exp_fh):
self.assertEqual(o, e)
obs_metadata = yaml.load(open('%s/metadata.yml' % str(obs)))
exp_metadata = yaml.load("{'phred-offset': 33}")
self.assertEqual(obs_metadata, exp_metadata)
obs_manifest = open('%s/MANIFEST' % (str(obs))).read()
exp_manifest = ("sample-id,filename,direction\n"
"sampleABC,sampleABC_0_L001_R1_001.fastq.gz,forward\n"
"sampleABC,sampleABC_1_L001_R2_001.fastq.gz,reverse\n")
self.assertEqual(obs_manifest, exp_manifest)
def test_paired_end_fastq_manifest_phred64_to_slpspefdf_uncompressed(self):
format_ = PairedEndFastqManifestPhred64
transformer = self.get_transformer(
format_,
SingleLanePerSamplePairedEndFastqDirFmt)
shutil.copy(
self.get_data_path('s1-phred64.fastq'),
os.path.join(self.temp_dir.name, 's1-phred64.fastq'))
shutil.copy(
self.get_data_path('s2-phred64.fastq.gz'),
os.path.join(self.temp_dir.name, 's2-phred64.fastq'))
manifest_fp = os.path.join(self.temp_dir.name, 'manifest')
with open(manifest_fp, 'w') as fh:
fh.write("sample-id,absolute-filepath,direction\n")
fh.write("sampleABC,%s/s1-phred64.fastq,"
"forward\n" % self.temp_dir.name)
fh.write("sampleABC,%s/s2-phred64.fastq,reverse\n" %
self.temp_dir.name)
obs = transformer(format_(manifest_fp, 'r'))
fastq_pairs = [('s1-phred64.fastq',
'sampleABC_0_L001_R1_001.fastq.gz'),
('s2-phred64.fastq',
'sampleABC_1_L001_R2_001.fastq.gz')]
for input_fastq, obs_fastq in fastq_pairs:
obs_fh = skbio.io.read(
os.path.join(str(obs), obs_fastq), compression='gzip',
format='fastq', constructor=skbio.DNA, variant='illumina1.8'
)
exp_fh = skbio.io.read(
self.get_data_path(input_fastq),
format='fastq', constructor=skbio.DNA, variant='illumina1.3'
)
for o, e in zip(obs_fh, exp_fh):
self.assertEqual(o, e)
obs_metadata = yaml.load(open('%s/metadata.yml' % str(obs)))
exp_metadata = yaml.load("{'phred-offset': 33}")
self.assertEqual(obs_metadata, exp_metadata)
obs_manifest = open('%s/MANIFEST' % (str(obs))).read()
exp_manifest = ("sample-id,filename,direction\n"
"sampleABC,sampleABC_0_L001_R1_001.fastq.gz,forward\n"
"sampleABC,sampleABC_1_L001_R2_001.fastq.gz,reverse\n")
self.assertEqual(obs_manifest, exp_manifest)
def test_single_end_fastq_manifest_missing_fastq(self):
format_ = SingleEndFastqManifestPhred64
transformer = self.get_transformer(
format_,
SingleLanePerSampleSingleEndFastqDirFmt)
shutil.copy(
self.get_data_path('s1-phred64.fastq.gz'),
os.path.join(self.temp_dir.name, 's1-phred64.fastq.gz'))
manifest_fp = os.path.join(self.temp_dir.name, 'manifest')
with open(manifest_fp, 'w') as fh:
fh.write("sample-id,absolute-filepath,direction\n")
fh.write("sampleABC,%s/s1-phred64.fastq.gz,"
"forward\n" % self.temp_dir.name)
fh.write("sampleXYZ,%s/s2-phred64.fastq.gz,forward\n" %
self.temp_dir.name)
with self.assertRaisesRegex(FileNotFoundError,
"s2-phred64.fastq.gz"):
transformer(format_(manifest_fp, 'r'))
def test_single_end_fastq_manifest_invalid_direction(self):
format_ = SingleEndFastqManifestPhred64
transformer = self.get_transformer(
format_,
SingleLanePerSampleSingleEndFastqDirFmt)
shutil.copy(
self.get_data_path('s1-phred64.fastq.gz'),
os.path.join(self.temp_dir.name, 's1-phred64.fastq.gz'))
shutil.copy(
self.get_data_path('s2-phred64.fastq.gz'),
os.path.join(self.temp_dir.name, 's2-phred64.fastq.gz'))
manifest_fp = os.path.join(self.temp_dir.name, 'manifest')
with open(manifest_fp, 'w') as fh:
fh.write("sample-id,absolute-filepath,direction\n")
fh.write("sampleABC,%s/s1-phred64.fastq.gz,"
"middle-out\n" % self.temp_dir.name)
fh.write("sampleXYZ,%s/s2-phred64.fastq.gz,forward\n" %
self.temp_dir.name)
with self.assertRaisesRegex(ValueError, 'middle-out'):
transformer(format_(manifest_fp, 'r'))
def test_single_end_fastq_manifest_too_many_directions(self):
format_ = SingleEndFastqManifestPhred64
transformer = self.get_transformer(
format_,
SingleLanePerSampleSingleEndFastqDirFmt)
shutil.copy(
self.get_data_path('s1-phred64.fastq.gz'),
os.path.join(self.temp_dir.name, 's1-phred64.fastq.gz'))
shutil.copy(
self.get_data_path('s2-phred64.fastq.gz'),
os.path.join(self.temp_dir.name, 's2-phred64.fastq.gz'))
manifest_fp = os.path.join(self.temp_dir.name, 'manifest')
with open(manifest_fp, 'w') as fh:
fh.write("sample-id,absolute-filepath,direction\n")
fh.write("sampleABC,%s/s1-phred64.fastq.gz,"
"forward\n" % self.temp_dir.name)
fh.write("sampleXYZ,%s/s2-phred64.fastq.gz,reverse\n" %
self.temp_dir.name)
with self.assertRaisesRegex(ValueError, "only forward or reverse"):
transformer(format_(manifest_fp, 'r'))
def test_paired_end_fastq_manifest_missing_fastq(self):
format_ = PairedEndFastqManifestPhred64
transformer = self.get_transformer(
format_,
SingleLanePerSamplePairedEndFastqDirFmt)
shutil.copy(
self.get_data_path('s1-phred64.fastq.gz'),
os.path.join(self.temp_dir.name, 's1-phred64.fastq.gz'))
manifest_fp = os.path.join(self.temp_dir.name, 'manifest')
with open(manifest_fp, 'w') as fh:
fh.write("sample-id,absolute-filepath,direction\n")
fh.write("sampleABC,%s/s1-phred64.fastq.gz,"
"forward\n" % self.temp_dir.name)
fh.write("sampleABC,%s/s2-phred64.fastq.gz,reverse\n" %
self.temp_dir.name)
with self.assertRaisesRegex(FileNotFoundError,
"s2-phred64.fastq.gz"):
transformer(format_(manifest_fp, 'r'))
def test_paired_end_fastq_manifest_invalid_direction(self):
format_ = PairedEndFastqManifestPhred64
transformer = self.get_transformer(
format_,
SingleLanePerSamplePairedEndFastqDirFmt)
shutil.copy(
self.get_data_path('s1-phred64.fastq.gz'),
os.path.join(self.temp_dir.name, 's1-phred64.fastq.gz'))
shutil.copy(
self.get_data_path('s2-phred64.fastq.gz'),
os.path.join(self.temp_dir.name, 's2-phred64.fastq.gz'))
manifest_fp = os.path.join(self.temp_dir.name, 'manifest')
with open(manifest_fp, 'w') as fh:
fh.write("sample-id,absolute-filepath,direction\n")
fh.write("sampleABC,%s/s1-phred64.fastq.gz,"
"middle-out\n" % self.temp_dir.name)
fh.write("sampleABC,%s/s2-phred64.fastq.gz,reverse\n" %
self.temp_dir.name)
with self.assertRaisesRegex(ValueError, 'middle-out'):
transformer(format_(manifest_fp, 'r'))
def test_paired_end_fastq_manifest_missing_directions(self):
format_ = PairedEndFastqManifestPhred64
transformer = self.get_transformer(
format_,
SingleLanePerSamplePairedEndFastqDirFmt)
shutil.copy(
self.get_data_path('s1-phred64.fastq.gz'),
os.path.join(self.temp_dir.name, 's1-phred64.fastq.gz'))
manifest_fp = os.path.join(self.temp_dir.name, 'manifest')
with open(manifest_fp, 'w') as fh:
fh.write("sample-id,absolute-filepath,direction\n")
fh.write("sampleABC,%s/s1-phred64.fastq.gz,"
"forward\n" % self.temp_dir.name)
with self.assertRaisesRegex(ValueError,
"one time each for each sample"):
transformer(format_(manifest_fp, 'r'))
def test_parse_and_validate_manifest_invalid(self):
manifest = io.StringIO(
'sample-id,absolute-filepath\n'
'abc,/hello/world,forward\n')
with self.assertRaisesRegex(
ValueError, "Expected.*absolute-filepath.*found "
"'sample-id,absolute-filepath'.$"):
_parse_and_validate_manifest(manifest, single_end=True,
absolute=True)
manifest = io.StringIO(
'sample-id,absolute-filepath,direction\n'
'abc,/hello/world\n'
'abc,/hello/world,forward\n')
with self.assertRaisesRegex(ValueError, 'Empty cells'):
_parse_and_validate_manifest(manifest, single_end=True,
absolute=True)
manifest = io.StringIO(
'sample-id,absolute-filepath,direction\n'
'abc,/hello/world,forward\n'
'xyz,/hello/world,forward,extra-field')
with self.assertRaisesRegex(ValueError, 'issue parsing the manifest'):
_parse_and_validate_manifest(manifest, single_end=True,
absolute=True)
manifest = io.StringIO(
'sample-id,absolute-filepath,direction\n'
'abc,world,forward\n'
'xyz,world,forward')
with self.assertRaisesRegex(ValueError,
'absolute but found relative path'):
_parse_and_validate_manifest(manifest, single_end=True,
absolute=True)
manifest = io.StringIO(
'sample-id,absolute-filepath,direction\n'
'abc,world,forward\n'
'abc,world,reverse')
with self.assertRaisesRegex(ValueError,
'absolute but found relative path'):
_parse_and_validate_manifest(manifest, single_end=False,
absolute=True)
manifest = io.StringIO(
'sample-id,filename,direction\n'
'abc,/snap/crackle/pop/world,forward\n'
'xyz,/snap/crackle/pop/world,forward')
with self.assertRaisesRegex(ValueError,
'relative but found absolute path'):
_parse_and_validate_manifest(manifest, single_end=True,
absolute=False)
manifest = io.StringIO(
'sample-id,filename,direction\n'
'abc,/snap/crackle/pop/world,forward\n'
'abc,/snap/crackle/pop/world,reverse')
with self.assertRaisesRegex(ValueError,
'relative but found absolute path'):
_parse_and_validate_manifest(manifest, single_end=False,
absolute=False)
def test_parse_and_validate_manifest_expand_vars(self):
expected_fp = os.path.join(self.temp_dir.name, 'manifest.txt')
# touch the file - the valdiator will fail if it doesn't exist
open(expected_fp, 'w')
os.environ['TESTENVGWAR'] = self.temp_dir.name
manifest = io.StringIO(
'sample-id,absolute-filepath,direction\n'
'abc,$TESTENVGWAR/manifest.txt,forward')
manifest = _parse_and_validate_manifest(manifest, single_end=True,
absolute=True)
del os.environ['TESTENVGWAR']
self.assertEqual(manifest.iloc[0]['absolute-filepath'], expected_fp)
def test_validate_header_valid(self):
columns = ['sample-id', 'absolute-filepath', 'direction']
manifest = pd.DataFrame(
[['abc', '/hello/world', 'forward'],
['xyz', '/hello/world', 'forward']],
columns=columns)
# should not raise an error
_validate_header(manifest, expected_header=columns)
def test_validate_header_invalid(self):
columns = ['sample-id', 'absolute-filepath', 'direction']
manifest = pd.DataFrame(
[['abc', '/hello/world'],
['xyz', '/hello/world']],
columns=['xyz', 'absolute-filepath'])
with self.assertRaisesRegex(ValueError, 'Expected manifest.*absolute'
'-filepath.*but'):
_validate_header(manifest, expected_header=columns)
manifest = pd.DataFrame(
[['abc', '/hello/world', 'forward'],
['xyz', '/hello/world', 'forward']],
columns=['xyz', 'absolute-filepath', 'direction'])
with self.assertRaisesRegex(ValueError, 'sample-id.*xyz'):
_validate_header(manifest, expected_header=columns)
manifest = pd.DataFrame(
[['abc', '/hello/world', 'forward'],
['xyz', '/hello/world', 'forward']],
columns=['sample-id', 'xyz', 'direction'])
with self.assertRaisesRegex(ValueError, 'absolute-filepath.*xyz'):
_validate_header(manifest, expected_header=columns)
manifest = pd.DataFrame(
[['abc', '/hello/world', 'forward'],
['xyz', '/hello/world', 'forward']],
columns=['sample-id', 'absolute-filepath', 'xyz'])
with self.assertRaisesRegex(ValueError, 'direction.*xyz'):
_validate_header(manifest, expected_header=columns)
def test_validate_single_end_fastq_manifest_directions(self):
manifest = pd.DataFrame(
[['abc', '/hello/world', 'forward'],
['xyz', '/hello/world', 'forward']],
columns=['sample-id', 'absolute-filepath', 'direction'])
_validate_single_end_fastq_manifest_directions(manifest)
manifest = pd.DataFrame(
[['abc', '/hello/world', 'reverse'],
['xyz', '/hello/world', 'reverse']],
columns=['sample-id', 'absolute-filepath', 'direction'])
_validate_single_end_fastq_manifest_directions(manifest)
def test_validate_single_end_fastq_manifest_directions_invalid(self):
manifest = pd.DataFrame(
[['abc', '/hello/world', 'forward'],
['xyz', '/hello/world', 'reverse']],
columns=['sample-id', 'absolute-filepath', 'direction'])
with self.assertRaisesRegex(ValueError, 'can contain only'):
_validate_single_end_fastq_manifest_directions(manifest)
manifest = pd.DataFrame(
[['abc', '/hello/world', 'forward'],
['abc', '/hello/world2', 'forward']],
columns=['sample-id', 'absolute-filepath', 'direction'])
with self.assertRaisesRegex(ValueError, 'more than once'):
_validate_single_end_fastq_manifest_directions(manifest)
def test_validate_paired_end_fastq_manifest_directions(self):
manifest = pd.DataFrame(
[['abc', '/hello/world', 'forward'],
['abc', '/hello/world', 'reverse'],
['xyz', '/hello/world2', 'forward'],
['xyz', '/hello/world2', 'reverse']],
columns=['sample-id', 'absolute-filepath', 'direction'])
_validate_paired_end_fastq_manifest_directions(manifest)
def test_validate_paired_end_fastq_manifest_directions_invalid(self):
manifest = pd.DataFrame(
[['abc', '/hello/world', 'forward'],
['abc', '/hello/world', 'reverse'],
['xyz', '/hello/world2', 'reverse']],
columns=['sample-id', 'absolute-filepath', 'direction'])
with self.assertRaisesRegex(ValueError, 'reverse but not.*xyz'):
_validate_paired_end_fastq_manifest_directions(manifest)
manifest = pd.DataFrame(
[['abc', '/hello/world', 'forward'],
['abc', '/hello/world', 'reverse'],
['xyz', '/hello/world2', 'forward']],
columns=['sample-id', 'absolute-filepath', 'direction'])
with self.assertRaisesRegex(ValueError, 'forward but not.*xyz'):
_validate_paired_end_fastq_manifest_directions(manifest)
manifest = pd.DataFrame(
[['abc', '/hello/world', 'forward'],
['abc', '/hello/world', 'reverse'],
['abc', '/hello/world2', 'forward']],
columns=['sample-id', 'absolute-filepath', 'direction'])
with self.assertRaisesRegex(ValueError, 'forward read record: abc'):
_validate_paired_end_fastq_manifest_directions(manifest)
manifest = pd.DataFrame(
[['xyz', '/hello/world', 'forward'],
['xyz', '/hello/world', 'reverse'],
['xyz', '/hello/world2', 'reverse']],
columns=['sample-id', 'absolute-filepath', 'direction'])
with self.assertRaisesRegex(ValueError, 'reverse read record: xyz'):
_validate_paired_end_fastq_manifest_directions(manifest)
# NOTE: we are really only interested in the manifest, since these transformers
# primarily transform the V2 TSV manifests to the (older) CSV manifests. The
# only things asserted here are facts about the manifest and not the actual
# data assets, themselves.
class TestFastqManifestV2Transformers(TestPluginBase):
package = "q2_types.per_sample_sequences.tests"
def setUp(self):
super().setUp()
self.se_formats = [SingleEndFastqManifestPhred33V2,
SingleEndFastqManifestPhred64V2]
self.pe_formats = [PairedEndFastqManifestPhred33V2,
PairedEndFastqManifestPhred64V2]
self.exp_se_manifest = (
"sample-id,filename,direction\n"
"Human-Kneecap,Human-Kneecap_0_L001_R1_001.fastq.gz,forward\n"
"Peanut-Eyeball,Peanut-Eyeball_1_L001_R1_001.fastq.gz,forward\n")
self.exp_pe_manifest = (
"sample-id,filename,direction\n"
"Human-Kneecap,Human-Kneecap_0_L001_R1_001.fastq.gz,forward\n"
"Peanut-Eyeball,Peanut-Eyeball_1_L001_R1_001.fastq.gz,forward\n"
"Human-Kneecap,Human-Kneecap_2_L001_R2_001.fastq.gz,reverse\n"
"Peanut-Eyeball,Peanut-Eyeball_3_L001_R2_001.fastq.gz,reverse\n")
def template_manifest(self, filepath, ctx):
with open(filepath) as fh:
tmpl = string.Template(fh.read())
basename = os.path.basename(filepath)
file_ = os.path.join(self.temp_dir.name, basename)
with open(file_, 'w') as fh:
fh.write(tmpl.substitute(**ctx))
return file_
def apply_transformation(self, from_fmt, to_fmt, datafile_fp, manifest_fp):
transformer = self.get_transformer(from_fmt, to_fmt)
fp = self.get_data_path(datafile_fp)
manifest = self.template_manifest(
self.get_data_path(manifest_fp),
{k: fp for k in ['s1', 's2', 's1f', 's1r', 's2f', 's2r']})
return transformer(from_fmt(manifest, 'r'))
def test_single_end_fastq_manifest_phred33_to_slpssefdf(self):
obs = self.apply_transformation(
SingleEndFastqManifestPhred33V2,
SingleLanePerSampleSingleEndFastqDirFmt,
'Human-Kneecap_S1_L001_R1_001.fastq.gz',
'absolute_manifests_v2/single-MANIFEST')
with obs.manifest.view(FastqManifestFormat).open() as obs_manifest:
self.assertEqual(obs_manifest.read(), self.exp_se_manifest)
def test_single_end_fastq_manifest_phred64_to_slpssefdf(self):
obs = self.apply_transformation(
SingleEndFastqManifestPhred64V2,
SingleLanePerSampleSingleEndFastqDirFmt,
's1-phred64.fastq.gz',
'absolute_manifests_v2/single-MANIFEST')
with obs.manifest.view(FastqManifestFormat).open() as obs_manifest:
self.assertEqual(obs_manifest.read(), self.exp_se_manifest)
def test_paired_end_fastq_manifest_phred33_to_slpspefdf(self):
obs = self.apply_transformation(
PairedEndFastqManifestPhred33V2,
SingleLanePerSamplePairedEndFastqDirFmt,
'Human-Kneecap_S1_L001_R1_001.fastq.gz',
'absolute_manifests_v2/paired-MANIFEST')
with obs.manifest.view(FastqManifestFormat).open() as obs_manifest:
self.assertEqual(obs_manifest.read(), self.exp_pe_manifest)
def test_paired_end_fastq_manifest_phred64_to_slpspefdf(self):
obs = self.apply_transformation(
PairedEndFastqManifestPhred64V2,
SingleLanePerSamplePairedEndFastqDirFmt,
's1-phred64.fastq.gz',
'absolute_manifests_v2/paired-MANIFEST')
with obs.manifest.view(FastqManifestFormat).open() as obs_manifest:
self.assertEqual(obs_manifest.read(), self.exp_pe_manifest)
if __name__ == '__main__':
unittest.main()
| 43.624294
| 79
| 0.604891
| 5,126
| 46,329
| 5.237027
| 0.05911
| 0.035984
| 0.029801
| 0.036506
| 0.869324
| 0.852151
| 0.817694
| 0.786292
| 0.780853
| 0.771168
| 0
| 0.032013
| 0.273824
| 46,329
| 1,061
| 80
| 43.66541
| 0.765925
| 0.015692
| 0
| 0.754802
| 0
| 0
| 0.231124
| 0.142851
| 0
| 0
| 0
| 0
| 0.081356
| 1
| 0.044068
| false
| 0
| 0.012429
| 0
| 0.065537
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5470bd086a5e2b8708b6d1d4032642ab834b50c0
| 183
|
py
|
Python
|
listings/listing2-1.py
|
eivl/MissionPython
|
dc4e06f2eaac9d53f5091ae6f921e39db986d101
|
[
"Apache-2.0"
] | 4
|
2018-09-07T15:35:24.000Z
|
2019-03-27T09:48:12.000Z
|
listings/listing2-1.py
|
eivl/MissionPython
|
dc4e06f2eaac9d53f5091ae6f921e39db986d101
|
[
"Apache-2.0"
] | 371
|
2020-03-04T21:51:56.000Z
|
2022-03-31T20:59:11.000Z
|
listings/listing2-1.py
|
eivl/MissionPython
|
dc4e06f2eaac9d53f5091ae6f921e39db986d101
|
[
"Apache-2.0"
] | 3
|
2019-06-18T19:57:17.000Z
|
2020-11-06T03:55:08.000Z
|
room_map = [ [ 1, 0, 0, 0, 0],
[ 0, 0, 0, 2, 0],
[ 0, 0, 0, 0, 0],
[ 0, 3, 0, 0, 0],
[ 0, 0, 0, 0, 4]
]
print(room_map)
| 22.875
| 30
| 0.251366
| 30
| 183
| 1.466667
| 0.266667
| 0.818182
| 1.022727
| 1.090909
| 0.477273
| 0.477273
| 0.477273
| 0
| 0
| 0
| 0
| 0.294118
| 0.535519
| 183
| 7
| 31
| 26.142857
| 0.223529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.142857
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
54a5fd1db479f1371648b0f22baf2b6b5cf5994f
| 236
|
py
|
Python
|
cli/enums.py
|
3lpsy/exutils
|
ce189a229b60aeda33374e438fca47338877fd98
|
[
"MIT"
] | 5
|
2020-01-07T22:46:41.000Z
|
2020-03-09T16:00:24.000Z
|
cli/enums.py
|
3lpsy/exutils
|
ce189a229b60aeda33374e438fca47338877fd98
|
[
"MIT"
] | null | null | null |
cli/enums.py
|
3lpsy/exutils
|
ce189a229b60aeda33374e438fca47338877fd98
|
[
"MIT"
] | null | null | null |
SHELLCODE_HELP = "shellcode in \\xAA\\xBB format (can also pass: a python import path via 'py:somefile.someimporttarget', shellcode in \\AA format in a file via 'txt:/path/to/file', and binary data in a file via 'bin:/path/to/binary')"
| 118
| 235
| 0.737288
| 41
| 236
| 4.219512
| 0.609756
| 0.127168
| 0.080925
| 0.115607
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135593
| 236
| 1
| 236
| 236
| 0.848039
| 0
| 0
| 0
| 0
| 1
| 0.915254
| 0.224576
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
49c79e3325d61136f84853b3b052f3c0204416e8
| 118
|
py
|
Python
|
src/ScaleHDALSPAC/seq_qc/__init__.py
|
helloabunai/ScaleHD-ALSPAC
|
02c11bdd39ecc670d258c4301bbd4fa78bb90e9a
|
[
"MIT"
] | null | null | null |
src/ScaleHDALSPAC/seq_qc/__init__.py
|
helloabunai/ScaleHD-ALSPAC
|
02c11bdd39ecc670d258c4301bbd4fa78bb90e9a
|
[
"MIT"
] | null | null | null |
src/ScaleHDALSPAC/seq_qc/__init__.py
|
helloabunai/ScaleHD-ALSPAC
|
02c11bdd39ecc670d258c4301bbd4fa78bb90e9a
|
[
"MIT"
] | null | null | null |
__all__ = ['__quality_control.py']
from __quality_control import SeqQC
from __quality_control import BatchadaptWrapper
| 39.333333
| 47
| 0.864407
| 14
| 118
| 6.357143
| 0.571429
| 0.47191
| 0.404494
| 0.539326
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084746
| 118
| 3
| 47
| 39.333333
| 0.824074
| 0
| 0
| 0
| 0
| 0
| 0.168067
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3fa690c8943f5771ab73bcbcbe910c4f25e7a0ae
| 78,817
|
py
|
Python
|
TweakApi/apis/product_size_material_api.py
|
tweak-com-public/tweak-api-client-python
|
019f86da11fdb12683d516f8f37db5d717380bcc
|
[
"Apache-2.0"
] | null | null | null |
TweakApi/apis/product_size_material_api.py
|
tweak-com-public/tweak-api-client-python
|
019f86da11fdb12683d516f8f37db5d717380bcc
|
[
"Apache-2.0"
] | null | null | null |
TweakApi/apis/product_size_material_api.py
|
tweak-com-public/tweak-api-client-python
|
019f86da11fdb12683d516f8f37db5d717380bcc
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
tweak-api
Tweak API to integrate with all the Tweak services. You can find out more about Tweak at <a href='https://www.tweak.com'>https://www.tweak.com</a>, #tweak.
OpenAPI spec version: 1.0.8-beta.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class ProductSizeMaterialApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def product_size_materials_change_stream_get(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_change_stream_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_size_materials_change_stream_get_with_http_info(**kwargs)
else:
(data) = self.product_size_materials_change_stream_get_with_http_info(**kwargs)
return data
def product_size_materials_change_stream_get_with_http_info(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_change_stream_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['options']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_size_materials_change_stream_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ProductSizeMaterials/change-stream'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'options' in params:
query_params['options'] = params['options']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='file',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_size_materials_change_stream_post(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_change_stream_post(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_size_materials_change_stream_post_with_http_info(**kwargs)
else:
(data) = self.product_size_materials_change_stream_post_with_http_info(**kwargs)
return data
def product_size_materials_change_stream_post_with_http_info(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_change_stream_post_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['options']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_size_materials_change_stream_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ProductSizeMaterials/change-stream'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
if 'options' in params:
form_params.append(('options', params['options']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='file',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_size_materials_count_get(self, **kwargs):
"""
Count instances of the model matched by where from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_count_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_size_materials_count_get_with_http_info(**kwargs)
else:
(data) = self.product_size_materials_count_get_with_http_info(**kwargs)
return data
def product_size_materials_count_get_with_http_info(self, **kwargs):
"""
Count instances of the model matched by where from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_count_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['where']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_size_materials_count_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ProductSizeMaterials/count'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'where' in params:
query_params['where'] = params['where']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2001',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_size_materials_find_one_get(self, **kwargs):
"""
Find first instance of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_find_one_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: ProductSizeMaterial
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_size_materials_find_one_get_with_http_info(**kwargs)
else:
(data) = self.product_size_materials_find_one_get_with_http_info(**kwargs)
return data
def product_size_materials_find_one_get_with_http_info(self, **kwargs):
"""
Find first instance of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_find_one_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: ProductSizeMaterial
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_size_materials_find_one_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ProductSizeMaterials/findOne'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductSizeMaterial',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_size_materials_get(self, **kwargs):
"""
Find all instances of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: list[ProductSizeMaterial]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_size_materials_get_with_http_info(**kwargs)
else:
(data) = self.product_size_materials_get_with_http_info(**kwargs)
return data
def product_size_materials_get_with_http_info(self, **kwargs):
"""
Find all instances of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: list[ProductSizeMaterial]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_size_materials_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ProductSizeMaterials'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ProductSizeMaterial]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_size_materials_id_delete(self, id, **kwargs):
"""
Delete a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_id_delete(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_size_materials_id_delete_with_http_info(id, **kwargs)
else:
(data) = self.product_size_materials_id_delete_with_http_info(id, **kwargs)
return data
def product_size_materials_id_delete_with_http_info(self, id, **kwargs):
"""
Delete a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_id_delete_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_size_materials_id_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_size_materials_id_delete`")
collection_formats = {}
resource_path = '/ProductSizeMaterials/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_size_materials_id_exists_get(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_id_exists_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_size_materials_id_exists_get_with_http_info(id, **kwargs)
else:
(data) = self.product_size_materials_id_exists_get_with_http_info(id, **kwargs)
return data
def product_size_materials_id_exists_get_with_http_info(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_id_exists_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_size_materials_id_exists_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_size_materials_id_exists_get`")
collection_formats = {}
resource_path = '/ProductSizeMaterials/{id}/exists'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2002',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_size_materials_id_get(self, id, **kwargs):
"""
Find a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_id_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param str filter: Filter defining fields and include - must be a JSON-encoded string ({\"something\":\"value\"})
:return: ProductSizeMaterial
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_size_materials_id_get_with_http_info(id, **kwargs)
else:
(data) = self.product_size_materials_id_get_with_http_info(id, **kwargs)
return data
def product_size_materials_id_get_with_http_info(self, id, **kwargs):
"""
Find a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_id_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param str filter: Filter defining fields and include - must be a JSON-encoded string ({\"something\":\"value\"})
:return: ProductSizeMaterial
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_size_materials_id_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_size_materials_id_get`")
collection_formats = {}
resource_path = '/ProductSizeMaterials/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductSizeMaterial',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_size_materials_id_head(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_id_head(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_size_materials_id_head_with_http_info(id, **kwargs)
else:
(data) = self.product_size_materials_id_head_with_http_info(id, **kwargs)
return data
def product_size_materials_id_head_with_http_info(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_id_head_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_size_materials_id_head" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_size_materials_id_head`")
collection_formats = {}
resource_path = '/ProductSizeMaterials/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2002',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_size_materials_id_material_get(self, id, **kwargs):
"""
Fetches belongsTo relation material.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_id_material_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSizeMaterial id (required)
:param bool refresh:
:return: ProductMaterial
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_size_materials_id_material_get_with_http_info(id, **kwargs)
else:
(data) = self.product_size_materials_id_material_get_with_http_info(id, **kwargs)
return data
def product_size_materials_id_material_get_with_http_info(self, id, **kwargs):
"""
Fetches belongsTo relation material.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_id_material_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSizeMaterial id (required)
:param bool refresh:
:return: ProductMaterial
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'refresh']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_size_materials_id_material_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_size_materials_id_material_get`")
collection_formats = {}
resource_path = '/ProductSizeMaterials/{id}/material'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'refresh' in params:
query_params['refresh'] = params['refresh']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductMaterial',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_size_materials_id_patch(self, id, **kwargs):
"""
Patch attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_id_patch(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSizeMaterial id (required)
:param ProductSizeMaterial data: An object of model property name/value pairs
:return: ProductSizeMaterial
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_size_materials_id_patch_with_http_info(id, **kwargs)
else:
(data) = self.product_size_materials_id_patch_with_http_info(id, **kwargs)
return data
def product_size_materials_id_patch_with_http_info(self, id, **kwargs):
"""
Patch attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_id_patch_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSizeMaterial id (required)
:param ProductSizeMaterial data: An object of model property name/value pairs
:return: ProductSizeMaterial
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_size_materials_id_patch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_size_materials_id_patch`")
collection_formats = {}
resource_path = '/ProductSizeMaterials/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductSizeMaterial',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_size_materials_id_pdf_color_profile_get(self, id, **kwargs):
"""
Fetches belongsTo relation pdfColorProfile.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_id_pdf_color_profile_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSizeMaterial id (required)
:param bool refresh:
:return: ProductPdfColorProfile
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_size_materials_id_pdf_color_profile_get_with_http_info(id, **kwargs)
else:
(data) = self.product_size_materials_id_pdf_color_profile_get_with_http_info(id, **kwargs)
return data
def product_size_materials_id_pdf_color_profile_get_with_http_info(self, id, **kwargs):
"""
Fetches belongsTo relation pdfColorProfile.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_id_pdf_color_profile_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSizeMaterial id (required)
:param bool refresh:
:return: ProductPdfColorProfile
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'refresh']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_size_materials_id_pdf_color_profile_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_size_materials_id_pdf_color_profile_get`")
collection_formats = {}
resource_path = '/ProductSizeMaterials/{id}/pdfColorProfile'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'refresh' in params:
query_params['refresh'] = params['refresh']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductPdfColorProfile',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_size_materials_id_put(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_id_put(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param ProductSizeMaterial data: Model instance data
:return: ProductSizeMaterial
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_size_materials_id_put_with_http_info(id, **kwargs)
else:
(data) = self.product_size_materials_id_put_with_http_info(id, **kwargs)
return data
def product_size_materials_id_put_with_http_info(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_id_put_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param ProductSizeMaterial data: Model instance data
:return: ProductSizeMaterial
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_size_materials_id_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_size_materials_id_put`")
collection_formats = {}
resource_path = '/ProductSizeMaterials/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductSizeMaterial',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_size_materials_id_replace_post(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_id_replace_post(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param ProductSizeMaterial data: Model instance data
:return: ProductSizeMaterial
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_size_materials_id_replace_post_with_http_info(id, **kwargs)
else:
(data) = self.product_size_materials_id_replace_post_with_http_info(id, **kwargs)
return data
def product_size_materials_id_replace_post_with_http_info(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_id_replace_post_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param ProductSizeMaterial data: Model instance data
:return: ProductSizeMaterial
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_size_materials_id_replace_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_size_materials_id_replace_post`")
collection_formats = {}
resource_path = '/ProductSizeMaterials/{id}/replace'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductSizeMaterial',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_size_materials_id_size_get(self, id, **kwargs):
"""
Fetches belongsTo relation size.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_id_size_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSizeMaterial id (required)
:param bool refresh:
:return: ProductSize
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_size_materials_id_size_get_with_http_info(id, **kwargs)
else:
(data) = self.product_size_materials_id_size_get_with_http_info(id, **kwargs)
return data
def product_size_materials_id_size_get_with_http_info(self, id, **kwargs):
"""
Fetches belongsTo relation size.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_id_size_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSizeMaterial id (required)
:param bool refresh:
:return: ProductSize
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'refresh']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_size_materials_id_size_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_size_materials_id_size_get`")
collection_formats = {}
resource_path = '/ProductSizeMaterials/{id}/size'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'refresh' in params:
query_params['refresh'] = params['refresh']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductSize',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_size_materials_id_team_get(self, id, **kwargs):
"""
Fetches belongsTo relation team.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_id_team_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSizeMaterial id (required)
:param bool refresh:
:return: Team
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_size_materials_id_team_get_with_http_info(id, **kwargs)
else:
(data) = self.product_size_materials_id_team_get_with_http_info(id, **kwargs)
return data
def product_size_materials_id_team_get_with_http_info(self, id, **kwargs):
"""
Fetches belongsTo relation team.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_id_team_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductSizeMaterial id (required)
:param bool refresh:
:return: Team
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'refresh']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_size_materials_id_team_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_size_materials_id_team_get`")
collection_formats = {}
resource_path = '/ProductSizeMaterials/{id}/team'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'refresh' in params:
query_params['refresh'] = params['refresh']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Team',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_size_materials_post(self, **kwargs):
"""
Create a new instance of the model and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_post(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ProductSizeMaterial data: Model instance data
:return: ProductSizeMaterial
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_size_materials_post_with_http_info(**kwargs)
else:
(data) = self.product_size_materials_post_with_http_info(**kwargs)
return data
def product_size_materials_post_with_http_info(self, **kwargs):
"""
Create a new instance of the model and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_size_materials_post_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ProductSizeMaterial data: Model instance data
:return: ProductSizeMaterial
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_size_materials_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ProductSizeMaterials'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductSizeMaterial',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
| 41.417236
| 165
| 0.57107
| 7,986
| 78,817
| 5.399199
| 0.033434
| 0.063083
| 0.0603
| 0.0449
| 0.970268
| 0.968041
| 0.965768
| 0.953523
| 0.949534
| 0.947609
| 0
| 0.000912
| 0.346029
| 78,817
| 1,902
| 166
| 41.439012
| 0.835616
| 0.319956
| 0
| 0.826381
| 0
| 0
| 0.180652
| 0.073644
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039459
| false
| 0
| 0.007892
| 0
| 0.105975
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3fb5b4dd75f19fe3e15b366465f036fcb00db4b8
| 24,500
|
py
|
Python
|
tests/test_global_muscle_tracking.py
|
Sina-Mehdiz/Bioptim
|
49c13c089db8200f503d0209f7f8685607d9ccaa
|
[
"MIT"
] | null | null | null |
tests/test_global_muscle_tracking.py
|
Sina-Mehdiz/Bioptim
|
49c13c089db8200f503d0209f7f8685607d9ccaa
|
[
"MIT"
] | null | null | null |
tests/test_global_muscle_tracking.py
|
Sina-Mehdiz/Bioptim
|
49c13c089db8200f503d0209f7f8685607d9ccaa
|
[
"MIT"
] | null | null | null |
"""
Test for file IO
"""
import pytest
import numpy as np
import biorbd
from bioptim import OdeSolver
from .utils import TestUtils
@pytest.mark.parametrize("ode_solver", [OdeSolver.RK4, OdeSolver.RK8, OdeSolver.IRK])
def test_muscle_activations_and_states_tracking(ode_solver):
# Load muscle_activations_tracker
bioptim_folder = TestUtils.bioptim_folder()
tracker = TestUtils.load_module(bioptim_folder + "/examples/muscle_driven_ocp/muscle_activations_tracker.py")
ode_solver = ode_solver()
# Define the problem
model_path = bioptim_folder + "/examples/muscle_driven_ocp/arm26.bioMod"
biorbd_model = biorbd.Model(model_path)
final_time = 2
n_shooting = 9
use_residual_torque = True
# Generate random data to fit
np.random.seed(42)
t, markers_ref, x_ref, muscle_activations_ref = tracker.generate_data(
biorbd_model, final_time, n_shooting, use_residual_torque=use_residual_torque
)
biorbd_model = biorbd.Model(model_path) # To allow for non free variable, the model must be reloaded
ocp = tracker.prepare_ocp(
biorbd_model,
final_time,
n_shooting,
markers_ref,
muscle_activations_ref,
x_ref[: biorbd_model.nbQ(), :],
use_residual_torque=use_residual_torque,
kin_data_to_track="q",
ode_solver=ode_solver,
)
sol = ocp.solve()
# Check objective function value
f = np.array(sol.cost)
np.testing.assert_equal(f.shape, (1, 1))
if isinstance(ode_solver, OdeSolver.RK8):
np.testing.assert_almost_equal(f[0, 0], 6.340821289366818e-06)
else:
np.testing.assert_almost_equal(f[0, 0], 6.518854595660012e-06)
# Check constraints
g = np.array(sol.constraints)
np.testing.assert_equal(g.shape, (36, 1))
np.testing.assert_almost_equal(g, np.zeros((36, 1)), decimal=6)
# Check some of the results
q, qdot, tau, mus = sol.states["q"], sol.states["qdot"], sol.controls["tau"], sol.controls["muscles"]
if isinstance(ode_solver, OdeSolver.IRK):
# initial and final position
np.testing.assert_almost_equal(q[:, 0], np.array([-9.11292790e-06, -9.98708184e-06]))
np.testing.assert_almost_equal(q[:, -1], np.array([-0.49388008, -1.4492482]))
# initial and final velocities
np.testing.assert_almost_equal(qdot[:, 0], np.array([-1.58428412e-04, -6.69634564e-05]))
np.testing.assert_almost_equal(qdot[:, -1], np.array([0.87809776, -2.64745571]))
# initial and final controls
np.testing.assert_almost_equal(tau[:, 0], np.array([-6.89985946e-07, 8.85124432e-06]))
np.testing.assert_almost_equal(tau[:, -1], np.array([-7.38474471e-06, -5.12994471e-07]))
np.testing.assert_almost_equal(
mus[:, 0], np.array([0.37442763, 0.95074155, 0.73202163, 0.59858471, 0.15595214, 0.15596623])
)
np.testing.assert_almost_equal(
mus[:, -1], np.array([0.54685822, 0.18481451, 0.96949193, 0.77512584, 0.93948978, 0.89483523])
)
elif isinstance(ode_solver, OdeSolver.RK8):
# initial and final position
np.testing.assert_almost_equal(q[:, 0], np.array([-1.20296925e-05, -1.42883927e-05]))
np.testing.assert_almost_equal(q[:, -1], np.array([-0.49387969, -1.44924798]))
# initial and final velocities
np.testing.assert_almost_equal(qdot[:, 0], np.array([-6.75664553e-05, -1.59537195e-04]))
np.testing.assert_almost_equal(qdot[:, -1], np.array([0.87809983, -2.64745432]))
# initial and final controls
np.testing.assert_almost_equal(tau[:, 0], np.array([-1.56121402e-06, 1.32347911e-05]))
np.testing.assert_almost_equal(tau[:, -1], np.array([-7.48770006e-06, -5.90970158e-07]))
np.testing.assert_almost_equal(
mus[:, 0], np.array([0.37438764, 0.95075245, 0.73203411, 0.59854825, 0.15591868, 0.15595168])
)
np.testing.assert_almost_equal(
mus[:, -1], np.array([0.5468589, 0.18481491, 0.96949149, 0.77512487, 0.93948887, 0.89483671])
)
else:
# initial and final position
np.testing.assert_almost_equal(q[:, 0], np.array([-1.1123547e-05, -1.2705707e-05]))
np.testing.assert_almost_equal(q[:, -1], np.array([-0.4938793, -1.4492479]))
# initial and final velocities
np.testing.assert_almost_equal(qdot[:, 0], np.array([-9.0402027e-05, -1.3433204e-04]))
np.testing.assert_almost_equal(qdot[:, -1], np.array([0.8780898, -2.6474401]))
# initial and final controls
np.testing.assert_almost_equal(tau[:, 0], np.array([-1.1482641e-06, 1.1539847e-05]))
np.testing.assert_almost_equal(tau[:, -1], np.array([-7.6255276e-06, -5.1947040e-07]))
np.testing.assert_almost_equal(
mus[:, 0], np.array([0.3744008, 0.9507489, 0.7320295, 0.5985624, 0.1559316, 0.1559573])
)
np.testing.assert_almost_equal(
mus[:, -1], np.array([0.5468632, 0.184813, 0.969489, 0.7751258, 0.9394897, 0.8948353])
)
# save and load
TestUtils.save_and_load(sol, ocp, False)
# simulate
TestUtils.simulate(sol)
@pytest.mark.parametrize("ode_solver", [OdeSolver.RK4, OdeSolver.RK8, OdeSolver.IRK])
def test_muscle_activation_no_residual_torque_and_markers_tracking(ode_solver):
# Load muscle_activations_tracker
bioptim_folder = TestUtils.bioptim_folder()
tracker = TestUtils.load_module(bioptim_folder + "/examples/muscle_driven_ocp/muscle_activations_tracker.py")
ode_solver = ode_solver()
# Define the problem
model_path = bioptim_folder + "/examples/muscle_driven_ocp/arm26.bioMod"
biorbd_model = biorbd.Model(model_path)
final_time = 2
n_shooting = 9
use_residual_torque = False
# Generate random data to fit
np.random.seed(42)
t, markers_ref, x_ref, muscle_activations_ref = tracker.generate_data(
biorbd_model, final_time, n_shooting, use_residual_torque=use_residual_torque
)
biorbd_model = biorbd.Model(model_path) # To allow for non free variable, the model must be reloaded
ocp = tracker.prepare_ocp(
biorbd_model,
final_time,
n_shooting,
markers_ref,
muscle_activations_ref,
x_ref[: biorbd_model.nbQ(), :],
use_residual_torque=use_residual_torque,
kin_data_to_track="q",
ode_solver=ode_solver,
)
sol = ocp.solve()
# Check objective function value
f = np.array(sol.cost)
np.testing.assert_equal(f.shape, (1, 1))
if isinstance(ode_solver, OdeSolver.RK8):
np.testing.assert_almost_equal(f[0, 0], 6.39401362889915e-06)
else:
np.testing.assert_almost_equal(f[0, 0], 6.5736277330517424e-06)
# Check constraints
g = np.array(sol.constraints)
np.testing.assert_equal(g.shape, (36, 1))
np.testing.assert_almost_equal(g, np.zeros((36, 1)), decimal=6)
# Check some of the results
q, qdot, mus = sol.states["q"], sol.states["qdot"], sol.controls["muscles"]
if isinstance(ode_solver, OdeSolver.IRK):
# initial and final position
np.testing.assert_almost_equal(q[:, 0], np.array([-9.17149105e-06, -1.00592773e-05]))
np.testing.assert_almost_equal(q[:, -1], np.array([-0.49387979, -1.44924811]))
# initial and final velocities
np.testing.assert_almost_equal(qdot[:, 0], np.array([-1.58831625e-04, -6.69127853e-05]))
np.testing.assert_almost_equal(qdot[:, -1], np.array([0.87809239, -2.64744482]))
# initial and final controls
np.testing.assert_almost_equal(
mus[:, 0], np.array([0.37442688, 0.95074176, 0.73202184, 0.59858414, 0.15595162, 0.155966])
)
np.testing.assert_almost_equal(
mus[:, -1], np.array([0.5468617, 0.18481307, 0.96948995, 0.77512646, 0.93949036, 0.89483428])
)
elif isinstance(ode_solver, OdeSolver.RK8):
# initial and final position
np.testing.assert_almost_equal(q[:, 0], np.array([-1.20797525e-05, -1.44483833e-05]))
np.testing.assert_almost_equal(q[:, -1], np.array([-0.4938794, -1.44924789]))
# initial and final velocities
np.testing.assert_almost_equal(qdot[:, 0], np.array([-6.79694854e-05, -1.59565906e-04]))
np.testing.assert_almost_equal(qdot[:, -1], np.array([0.87809444, -2.64744336]))
# initial and final controls
np.testing.assert_almost_equal(
mus[:, 0], np.array([0.37438663, 0.95075271, 0.7320346, 0.5985466, 0.15591717, 0.15595102])
)
np.testing.assert_almost_equal(
mus[:, -1], np.array([0.5468624, 0.18481347, 0.9694895, 0.77512549, 0.93948945, 0.89483576])
)
else:
# initial and final position
np.testing.assert_almost_equal(q[:, 0], np.array([-1.1123547e-05, -1.2705707e-05]))
np.testing.assert_almost_equal(q[:, -1], np.array([-0.49387905, -1.4492478]))
# initial and final velocities
np.testing.assert_almost_equal(qdot[:, 0], np.array([-9.07884121e-05, -1.34382832e-04]))
np.testing.assert_almost_equal(qdot[:, -1], np.array([0.87808434, -2.64742889]))
# initial and final controls
np.testing.assert_almost_equal(
mus[:, 0], np.array([0.37439988, 0.95074914, 0.73202991, 0.598561, 0.15593039, 0.15595677])
)
np.testing.assert_almost_equal(
mus[:, -1], np.array([0.54686681, 0.18481157, 0.969487, 0.7751264, 0.9394903, 0.89483438])
)
# save and load
TestUtils.save_and_load(sol, ocp, False)
# simulate
TestUtils.simulate(sol)
@pytest.mark.parametrize("ode_solver", [OdeSolver.RK4, OdeSolver.RK8, OdeSolver.IRK])
def test_muscle_excitation_with_residual_torque_and_markers_tracking(ode_solver):
# Load muscle_excitations_tracker
bioptim_folder = TestUtils.bioptim_folder()
tracker = TestUtils.load_module(bioptim_folder + "/examples/muscle_driven_ocp/muscle_excitations_tracker.py")
ode_solver = ode_solver()
# Define the problem
model_path = bioptim_folder + "/examples/muscle_driven_ocp/arm26.bioMod"
biorbd_model = biorbd.Model(model_path)
final_time = 0.5
n_shooting = 9
# Generate random data to fit
np.random.seed(42)
t, markers_ref, x_ref, muscle_excitations_ref = tracker.generate_data(biorbd_model, final_time, n_shooting)
biorbd_model = biorbd.Model(model_path) # To allow for non free variable, the model must be reloaded
ocp = tracker.prepare_ocp(
biorbd_model,
final_time,
n_shooting,
markers_ref,
muscle_excitations_ref,
x_ref[: biorbd_model.nbQ(), :].T,
use_residual_torque=True,
kin_data_to_track="markers",
ode_solver=ode_solver,
)
sol = ocp.solve()
# Check constraints
g = np.array(sol.constraints)
np.testing.assert_equal(g.shape, (90, 1))
np.testing.assert_almost_equal(g, np.zeros((90, 1)), decimal=6)
# Check some of the results
q, qdot, mus_states, tau, mus_controls = (
sol.states["q"],
sol.states["qdot"],
sol.states["muscles"],
sol.controls["tau"],
sol.controls["muscles"],
)
if isinstance(ode_solver, OdeSolver.IRK):
# Check objective function value
f = np.array(sol.cost)
np.testing.assert_equal(f.shape, (1, 1))
np.testing.assert_almost_equal(f[0, 0], 7.972968350373634e-07)
# initial and final position
np.testing.assert_almost_equal(q[:, 0], np.array([-0.00025738, 0.00155432]))
np.testing.assert_almost_equal(q[:, -1], np.array([0.08502663, -0.49682756]))
# initial and final velocities
np.testing.assert_almost_equal(qdot[:, 0], np.array([0.0091607, -0.08174147]))
np.testing.assert_almost_equal(qdot[:, -1], np.array([0.13524112, -1.55868503]))
# initial and final muscle state
np.testing.assert_almost_equal(
mus_states[:, 0], np.array([0.37454012, 0.95071431, 0.73199394, 0.59865848, 0.15601864, 0.15599452])
)
np.testing.assert_almost_equal(
mus_states[:, -1], np.array([0.54298176, 0.310865, 0.94645053, 0.7714009, 0.91816808, 0.88114152])
)
# initial and final controls
np.testing.assert_almost_equal(tau[:, 0], np.array([-2.91199924e-06, -1.34810801e-06]))
np.testing.assert_almost_equal(tau[:, -1], np.array([-5.50139682e-07, -4.73229437e-07]))
np.testing.assert_almost_equal(
mus_controls[:, 0], np.array([0.37477829, 0.95063176, 0.73196614, 0.59867481, 0.1560593, 0.15600768])
)
np.testing.assert_almost_equal(
mus_controls[:, -1], np.array([0.546718, 0.18485758, 0.96954554, 0.7751266, 0.93947678, 0.89481784])
)
elif isinstance(ode_solver, OdeSolver.RK8):
# Check objective function value
f = np.array(sol.cost)
np.testing.assert_equal(f.shape, (1, 1))
np.testing.assert_almost_equal(f[0, 0], 7.972968350373634e-07)
# initial and final position
np.testing.assert_almost_equal(q[:, 0], np.array([-0.00027084, 0.00158996]))
np.testing.assert_almost_equal(q[:, -1], np.array([0.08501423, -0.4967964]))
# initial and final velocities
np.testing.assert_almost_equal(qdot[:, 0], np.array([0.00929371, -0.08205146]))
np.testing.assert_almost_equal(qdot[:, -1], np.array([0.13526892, -1.55864048]))
# initial and final muscle state
np.testing.assert_almost_equal(
mus_states[:, 0], np.array([0.37454012, 0.95071431, 0.73199394, 0.59865848, 0.15601864, 0.15599452])
)
np.testing.assert_almost_equal(
mus_states[:, -1], np.array([0.54288542, 0.31087161, 0.94651896, 0.77142083, 0.91824438, 0.88120091])
)
# initial and final controls
np.testing.assert_almost_equal(tau[:, 0], np.array([-2.86811951e-06, -1.41200803e-06]))
np.testing.assert_almost_equal(tau[:, -1], np.array([-4.91632371e-07, -5.53045415e-07]))
np.testing.assert_almost_equal(
mus_controls[:, 0], np.array([0.37477774, 0.95063188, 0.73196591, 0.5986757, 0.15606055, 0.15600778])
)
np.testing.assert_almost_equal(
mus_controls[:, -1], np.array([0.54671643, 0.18485788, 0.96954568, 0.77512639, 0.93947659, 0.89481833])
)
else:
# Check objective function value
f = np.array(sol.cost)
np.testing.assert_equal(f.shape, (1, 1))
np.testing.assert_almost_equal(f[0, 0], 3.5086270922948964e-07)
# initial and final position
np.testing.assert_almost_equal(q[:, 0], np.array([-0.00019766, 0.00078078]))
np.testing.assert_almost_equal(q[:, -1], np.array([0.08521152, -0.49746311]))
# initial and final velocities
np.testing.assert_almost_equal(qdot[:, 0], np.array([0.00915609, -0.07268497]))
np.testing.assert_almost_equal(qdot[:, -1], np.array([0.13455099, -1.56043294]))
# initial and final muscle state
np.testing.assert_almost_equal(
mus_states[:, 0], np.array([0.37454012, 0.95071431, 0.73199394, 0.59865848, 0.15601864, 0.15599452])
)
np.testing.assert_almost_equal(
mus_states[:, -1], np.array([0.54337288, 0.31120388, 0.94682111, 0.77137861, 0.91864248, 0.88108659])
)
# initial and final controls
np.testing.assert_almost_equal(tau[:, 0], np.array([-4.62544381e-07, -9.75433210e-07]))
np.testing.assert_almost_equal(tau[:, -1], np.array([-7.93739618e-07, 8.51675280e-07]))
np.testing.assert_almost_equal(
mus_controls[:, 0], np.array([0.37458886, 0.95067258, 0.73198315, 0.59866926, 0.15604832, 0.15600496])
)
np.testing.assert_almost_equal(
mus_controls[:, -1], np.array([0.54673199, 0.18485512, 0.9695433, 0.77513005, 0.93947984, 0.89480958])
)
# save and load
TestUtils.save_and_load(sol, ocp, False)
# simulate
TestUtils.simulate(sol)
@pytest.mark.parametrize("ode_solver", [OdeSolver.RK4, OdeSolver.RK8, OdeSolver.IRK])
def test_muscle_excitation_no_residual_torque_and_markers_tracking(ode_solver):
# Load muscle_excitations_tracker
bioptim_folder = TestUtils.bioptim_folder()
tracker = TestUtils.load_module(bioptim_folder + "/examples/muscle_driven_ocp/muscle_excitations_tracker.py")
ode_solver = ode_solver()
# Define the problem
model_path = bioptim_folder + "/examples/muscle_driven_ocp/arm26.bioMod"
biorbd_model = biorbd.Model(model_path)
final_time = 0.5
n_shooting = 9
# Generate random data to fit
np.random.seed(42)
t, markers_ref, x_ref, muscle_excitations_ref = tracker.generate_data(biorbd_model, final_time, n_shooting)
biorbd_model = biorbd.Model(model_path) # To allow for non free variable, the model must be reloaded
ocp = tracker.prepare_ocp(
biorbd_model,
final_time,
n_shooting,
markers_ref,
muscle_excitations_ref,
x_ref[: biorbd_model.nbQ(), :].T,
use_residual_torque=False,
kin_data_to_track="markers",
ode_solver=ode_solver,
)
sol = ocp.solve()
# Check constraints
g = np.array(sol.constraints)
np.testing.assert_equal(g.shape, (90, 1))
np.testing.assert_almost_equal(g, np.zeros((90, 1)), decimal=6)
# Check some of the results
q, qdot, mus_states, mus_controls = (
sol.states["q"],
sol.states["qdot"],
sol.states["muscles"],
sol.controls["muscles"],
)
if isinstance(ode_solver, OdeSolver.IRK):
# Check objective function value
f = np.array(sol.cost)
np.testing.assert_equal(f.shape, (1, 1))
np.testing.assert_almost_equal(f[0, 0], 7.973265397440505e-07)
# initial and final position
# initial and final position
np.testing.assert_almost_equal(q[:, 0], np.array([-0.00025737, 0.00155433]))
np.testing.assert_almost_equal(q[:, -1], np.array([0.08502664, -0.49682755]))
# initial and final velocities
np.testing.assert_almost_equal(qdot[:, 0], np.array([0.00916055, -0.08174164]))
np.testing.assert_almost_equal(qdot[:, -1], np.array([0.13524117, -1.55868483]))
# initial and final muscle state
np.testing.assert_almost_equal(
mus_states[:, 0], np.array([0.37454012, 0.95071431, 0.73199394, 0.59865848, 0.15601864, 0.15599452])
)
np.testing.assert_almost_equal(
mus_states[:, -1], np.array([0.54298176, 0.310865, 0.94645053, 0.7714009, 0.91816808, 0.88114152])
)
# initial and final controls
np.testing.assert_almost_equal(
mus_controls[:, 0], np.array([0.37477831, 0.95063176, 0.73196614, 0.59867481, 0.1560593, 0.15600768])
)
np.testing.assert_almost_equal(
mus_controls[:, -1], np.array([0.546718, 0.18485758, 0.96954554, 0.7751266, 0.93947678, 0.89481784])
)
elif isinstance(ode_solver, OdeSolver.RK8):
# Check objective function value
f = np.array(sol.cost)
np.testing.assert_equal(f.shape, (1, 1))
np.testing.assert_almost_equal(f[0, 0], 7.973265397440505e-07)
# initial and final position
# initial and final position
np.testing.assert_almost_equal(q[:, 0], np.array([-0.00027084, 0.00158998]))
np.testing.assert_almost_equal(q[:, -1], np.array([0.08501423, -0.49679638]))
# initial and final velocities
np.testing.assert_almost_equal(qdot[:, 0], np.array([0.0092936, -0.08205169]))
np.testing.assert_almost_equal(qdot[:, -1], np.array([0.13526894, -1.55864023]))
# initial and final muscle state
np.testing.assert_almost_equal(
mus_states[:, 0], np.array([0.37454012, 0.95071431, 0.73199394, 0.59865848, 0.15601864, 0.15599452])
)
np.testing.assert_almost_equal(
mus_states[:, -1], np.array([0.54288542, 0.31087161, 0.94651896, 0.77142083, 0.91824438, 0.88120091])
)
# initial and final controls
np.testing.assert_almost_equal(
mus_controls[:, 0], np.array([0.37477776, 0.95063188, 0.73196591, 0.5986757, 0.15606055, 0.15600778])
)
np.testing.assert_almost_equal(
mus_controls[:, -1], np.array([0.54671643, 0.18485788, 0.96954568, 0.77512639, 0.93947659, 0.89481833])
)
else:
# Check objective function value
f = np.array(sol.cost)
np.testing.assert_equal(f.shape, (1, 1))
np.testing.assert_almost_equal(f[0, 0], 3.5087093735149467e-07)
# initial and final position
np.testing.assert_almost_equal(q[:, 0], np.array([-0.00019764, 0.00078075]))
np.testing.assert_almost_equal(q[:, -1], np.array([0.08521155, -0.49746317]))
# initial and final velocities
np.testing.assert_almost_equal(qdot[:, 0], np.array([0.009156, -0.07268483]))
np.testing.assert_almost_equal(qdot[:, -1], np.array([0.13455129, -1.56043348]))
# initial and final muscle state
np.testing.assert_almost_equal(
mus_states[:, 0], np.array([0.37454012, 0.95071431, 0.73199394, 0.59865848, 0.15601864, 0.15599452])
)
np.testing.assert_almost_equal(
mus_states[:, -1], np.array([0.54337289, 0.31120388, 0.94682111, 0.77137861, 0.91864248, 0.88108659])
)
# initial and final controls
np.testing.assert_almost_equal(
mus_controls[:, 0], np.array([0.37458887, 0.95067258, 0.73198315, 0.59866926, 0.15604832, 0.15600496])
)
np.testing.assert_almost_equal(
mus_controls[:, -1], np.array([0.54673199, 0.18485512, 0.9695433, 0.77513005, 0.93947984, 0.89480958])
)
# save and load
TestUtils.save_and_load(sol, ocp, False)
# simulate
TestUtils.simulate(sol)
@pytest.mark.parametrize("ode_solver", [OdeSolver.RK4, OdeSolver.RK8, OdeSolver.IRK])
def test_muscle_activation_and_contacts_tracking(ode_solver):
# Load muscle_activations_contact_tracker
bioptim_folder = TestUtils.bioptim_folder()
contact = TestUtils.load_module(
bioptim_folder + "/examples/muscle_driven_with_contact/muscle_activations_contacts_tracker.py"
)
ode_solver = ode_solver()
# Define the problem
model_path = bioptim_folder + "/examples/muscle_driven_with_contact/2segments_4dof_2contacts_1muscle.bioMod"
biorbd_model = biorbd.Model(model_path)
final_time = 0.1
n_shooting = 5
# Generate random data to fit
np.random.seed(42)
contact_forces_ref = np.random.rand(biorbd_model.nbContacts(), n_shooting)
muscle_activations_ref = np.random.rand(biorbd_model.nbMuscles(), n_shooting + 1)
ocp = contact.prepare_ocp(
model_path,
final_time,
n_shooting,
muscle_activations_ref[:, :-1],
contact_forces_ref,
ode_solver=ode_solver,
)
sol = ocp.solve()
# Check objective function value
f = np.array(sol.cost)
np.testing.assert_equal(f.shape, (1, 1))
np.testing.assert_almost_equal(f[0, 0], 1.2080146471135251)
# Check constraints
g = np.array(sol.constraints)
np.testing.assert_equal(g.shape, (40, 1))
np.testing.assert_almost_equal(g, np.zeros((40, 1)), decimal=6)
# Check some of the results
q, qdot, tau, mus_controls = sol.states["q"], sol.states["qdot"], sol.controls["tau"], sol.controls["muscles"]
# initial and final position
np.testing.assert_almost_equal(q[:, 0], np.array([0.0, 0.0, -0.75, 0.75]))
np.testing.assert_almost_equal(q[:, -1], np.array([0.01785865, -0.01749107, -0.8, 0.8]), decimal=5)
# initial and final velocities
np.testing.assert_almost_equal(qdot[:, 0], np.array([0.0, 0.0, 0.0, 0.0]))
np.testing.assert_almost_equal(qdot[:, -1], np.array([0.5199767, -0.535388, -1.49267023, 1.4926703]), decimal=5)
# initial and final controls
np.testing.assert_almost_equal(tau[:, 0], np.array([5.3773376, 127.6205162, -21.9933179, 1.3644034]), decimal=5)
np.testing.assert_almost_equal(tau[:, -1], np.array([57.203734, 72.3153286, -7.4076227, 1.2641681]), decimal=5)
np.testing.assert_almost_equal(mus_controls[:, 0], np.array([0.18722964]), decimal=5)
np.testing.assert_almost_equal(mus_controls[:, -1], np.array([0.29591125]), decimal=5)
# save and load
TestUtils.save_and_load(sol, ocp, False)
# simulate
TestUtils.simulate(sol)
| 44.144144
| 116
| 0.655061
| 3,391
| 24,500
| 4.557653
| 0.125332
| 0.078033
| 0.130055
| 0.163054
| 0.851504
| 0.84691
| 0.838111
| 0.835975
| 0.832093
| 0.822517
| 0
| 0.186681
| 0.202612
| 24,500
| 554
| 117
| 44.223827
| 0.604423
| 0.105673
| 0
| 0.561224
| 0
| 0
| 0.031547
| 0.024715
| 0
| 0
| 0
| 0
| 0.341837
| 1
| 0.012755
| false
| 0
| 0.012755
| 0
| 0.02551
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b77a69ea3c2afa3c4ff00ff33e3c8e50a4822250
| 12,828
|
py
|
Python
|
Jupyter Notebooks/data_augmentation.py
|
ian-flores/Deep-Learning-Species-Identification
|
7e56482229ec587c1001aeecdf02f96f08bb9269
|
[
"MIT"
] | 3
|
2017-12-03T02:18:09.000Z
|
2022-02-15T01:44:56.000Z
|
Jupyter Notebooks/data_augmentation.py
|
ian-flores/Deep-Learning-Species-Identification
|
7e56482229ec587c1001aeecdf02f96f08bb9269
|
[
"MIT"
] | null | null | null |
Jupyter Notebooks/data_augmentation.py
|
ian-flores/Deep-Learning-Species-Identification
|
7e56482229ec587c1001aeecdf02f96f08bb9269
|
[
"MIT"
] | 1
|
2019-10-10T21:54:33.000Z
|
2019-10-10T21:54:33.000Z
|
import numpy as np
import itertools
import tensorflow as tf
from six.moves import cPickle as pickle
from six.moves import range
import random
import matplotlib.pyplot as plt
from tensorflow.contrib.layers import flatten
from PIL import Image, ImageOps
from scipy.ndimage.interpolation import shift
from IPython.display import Image as Im
from sklearn.utils import shuffle
import sklearn
import pandas
### Linear Augmentation
def linear_augmentation(dataset, num_shifts, labels):
augmented_dataset = []
augmented_labels = []
for idx, image in enumerate(dataset):
idx = labels[idx]
the_image = np.asarray(image)
for i in range(num_shifts+1):
pre_image = the_image.reshape((28,28))
# shift up
shifted_image_up = shift(pre_image, [(i*(-1)), 0])
augmented_dataset.append(shifted_image_up)
augmented_labels.append(idx)
del shifted_image_up
# shift_down
shifted_image_down = shift(pre_image, [i, 0])
augmented_dataset.append(shifted_image_down)
augmented_labels.append(idx)
del shifted_image_down
#shift_left
shifted_image_left = shift(pre_image, [0, (i*(-1))])
augmented_dataset.append(shifted_image_left)
augmented_labels.append(idx)
del shifted_image_left
#shift_right
shifted_image_right = shift(pre_image, [0, i])
augmented_dataset.append(shifted_image_right)
augmented_labels.append(idx)
del shifted_image_right
del pre_image
del the_image
return np.asarray(augmented_dataset), np.asarray(augmented_labels)
### Single Linear Augmentation
def linear_augmentation_single(dataset, num_shifts, labels):
augmented_dataset = []
augmented_labels = []
for idx, image in enumerate(dataset):
idx = labels[idx]
the_image = np.asarray(image)
i = num_shifts + 1
#for i in range(num_shifts+1):
pre_image = the_image.reshape((28,28))
# shift up
shifted_image_up = shift(pre_image, [(i*(-1)), 0])
augmented_dataset.append(shifted_image_up)
augmented_labels.append(idx)
del shifted_image_up
# shift_down
shifted_image_down = shift(pre_image, [i, 0])
augmented_dataset.append(shifted_image_down)
augmented_labels.append(idx)
del shifted_image_down
#shift_left
shifted_image_left = shift(pre_image, [0, (i*(-1))])
augmented_dataset.append(shifted_image_left)
augmented_labels.append(idx)
del shifted_image_left
#shift_right
shifted_image_right = shift(pre_image, [0, i])
augmented_dataset.append(shifted_image_right)
augmented_labels.append(idx)
del shifted_image_right
del pre_image
del the_image
return np.asarray(augmented_dataset), np.asarray(augmented_labels)
### Diagonal Augmentation
def diagonal_augmentation(dataset, num_shifts, labels):
augmented_dataset = []
augmented_labels = []
for idx, image in enumerate(dataset):
idx = labels[idx]
the_image = np.asarray(image)
for i in range(num_shifts+1):
pre_image = the_image.reshape((28,28))
# shift diagonal left down
shifted_image_diagonal_left_down = shift(pre_image, [(i*(-1)), (i*(-1))])
augmented_dataset.append(shifted_image_diagonal_left_down)
augmented_labels.append(idx)
del shifted_image_diagonal_left_down
# shift diagonal right down
shifted_image_diagonal_right_down = shift(pre_image, [i, (i*(-1))])
augmented_dataset.append(shifted_image_diagonal_right_down)
augmented_labels.append(idx)
del shifted_image_diagonal_right_down
#shift diagonal left up
shifted_image_diagonal_left_up = shift(pre_image, [(i*(-1)), i])
augmented_dataset.append(shifted_image_diagonal_left_up)
augmented_labels.append(idx)
del shifted_image_diagonal_left_up
# shift diagonal right up
shifted_image_diagonal_right_up = shift(pre_image, [i, i])
augmented_dataset.append(shifted_image_diagonal_right_up)
augmented_labels.append(idx)
del shifted_image_diagonal_right_up
del pre_image
del the_image
return np.asarray(augmented_dataset), np.asarray(augmented_labels)
### Single Diagonal Augmentation
def diagonal_augmentation_single(dataset, num_shifts, labels):
augmented_dataset = []
augmented_labels = []
for idx, image in enumerate(dataset):
idx = labels[idx]
the_image = np.asarray(image)
#for i in range(num_shifts+1):
i = num_shifts + 1
pre_image = the_image.reshape((28,28))
# shift diagonal left down
shifted_image_diagonal_left_down = shift(pre_image, [(i*(-1)), (i*(-1))])
augmented_dataset.append(shifted_image_diagonal_left_down)
augmented_labels.append(idx)
del shifted_image_diagonal_left_down
# shift diagonal right down
shifted_image_diagonal_right_down = shift(pre_image, [i, (i*(-1))])
augmented_dataset.append(shifted_image_diagonal_right_down)
augmented_labels.append(idx)
del shifted_image_diagonal_right_down
#shift diagonal left up
shifted_image_diagonal_left_up = shift(pre_image, [(i*(-1)), i])
augmented_dataset.append(shifted_image_diagonal_left_up)
augmented_labels.append(idx)
del shifted_image_diagonal_left_up
# shift diagonal right up
shifted_image_diagonal_right_up = shift(pre_image, [i, i])
augmented_dataset.append(shifted_image_diagonal_right_up)
augmented_labels.append(idx)
del shifted_image_diagonal_right_up
del pre_image
del the_image
return np.asarray(augmented_dataset), np.asarray(augmented_labels)
### Combined Augmentation
def combined_augmentation(dataset, num_shifts, labels):
augmented_dataset = []
augmented_labels = []
for idx, image in enumerate(dataset):
idx = labels[idx]
the_image = np.asarray(image)
for i in range(num_shifts+1):
pre_image = the_image.reshape((28,28))
# shift up
shifted_image_up = shift(pre_image, [(i*(-1)), 0])
augmented_dataset.append(shifted_image_up)
augmented_labels.append(idx)
del shifted_image_up
# shift_down
shifted_image_down = shift(pre_image, [i, 0])
augmented_dataset.append(shifted_image_down)
augmented_labels.append(idx)
del shifted_image_down
#shift_left
shifted_image_left = shift(pre_image, [0, (i*(-1))])
augmented_dataset.append(shifted_image_left)
augmented_labels.append(idx)
del shifted_image_left
#shift_right
shifted_image_right = shift(pre_image, [0, i])
augmented_dataset.append(shifted_image_right)
augmented_labels.append(idx)
del shifted_image_right
# shift diagonal left down
shifted_image_diagonal_left_down = shift(pre_image, [(i*(-1)), (i*(-1))])
augmented_dataset.append(shifted_image_diagonal_left_down)
augmented_labels.append(idx)
del shifted_image_diagonal_left_down
# shift diagonal right down
shifted_image_diagonal_right_down = shift(pre_image, [i, (i*(-1))])
augmented_dataset.append(shifted_image_diagonal_right_down)
augmented_labels.append(idx)
del shifted_image_diagonal_right_down
#shift diagonal left up
shifted_image_diagonal_left_up = shift(pre_image, [(i*(-1)), i])
augmented_dataset.append(shifted_image_diagonal_left_up)
augmented_labels.append(idx)
del shifted_image_diagonal_left_up
# shift diagonal right up
shifted_image_diagonal_right_up = shift(pre_image, [i, i])
augmented_dataset.append(shifted_image_diagonal_right_up)
augmented_labels.append(idx)
del shifted_image_diagonal_right_up
del pre_image
del the_image
return np.asarray(augmented_dataset), np.asarray(augmented_labels)
### Single Combined Augmentation
def combined_augmentation_single(dataset, num_shifts, labels):
augmented_dataset = []
augmented_labels = []
for idx, image in enumerate(dataset):
idx = labels[idx]
the_image = np.asarray(image)
i = num_shifts + 1
# for i in range(num_shifts+1):
pre_image = the_image.reshape((28,28))
# shift up
shifted_image_up = shift(pre_image, [(i*(-1)), 0])
augmented_dataset.append(shifted_image_up)
augmented_labels.append(idx)
del shifted_image_up
# shift_down
shifted_image_down = shift(pre_image, [i, 0])
augmented_dataset.append(shifted_image_down)
augmented_labels.append(idx)
del shifted_image_down
#shift_left
shifted_image_left = shift(pre_image, [0, (i*(-1))])
augmented_dataset.append(shifted_image_left)
augmented_labels.append(idx)
del shifted_image_left
#shift_right
shifted_image_right = shift(pre_image, [0, i])
augmented_dataset.append(shifted_image_right)
augmented_labels.append(idx)
del shifted_image_right
# shift diagonal left down
shifted_image_diagonal_left_down = shift(pre_image, [(i*(-1)), (i*(-1))])
augmented_dataset.append(shifted_image_diagonal_left_down)
augmented_labels.append(idx)
del shifted_image_diagonal_left_down
# shift diagonal right down
shifted_image_diagonal_right_down = shift(pre_image, [i, (i*(-1))])
augmented_dataset.append(shifted_image_diagonal_right_down)
augmented_labels.append(idx)
del shifted_image_diagonal_right_down
#shift diagonal left up
shifted_image_diagonal_left_up = shift(pre_image, [(i*(-1)), i])
augmented_dataset.append(shifted_image_diagonal_left_up)
augmented_labels.append(idx)
del shifted_image_diagonal_left_up
# shift diagonal right up
shifted_image_diagonal_right_up = shift(pre_image, [i, i])
augmented_dataset.append(shifted_image_diagonal_right_up)
augmented_labels.append(idx)
del shifted_image_diagonal_right_up
del pre_image
del the_image
return np.asarray(augmented_dataset), np.asarray(augmented_labels)
### CNN Reformat
def reformat(dataset):
dataset = dataset.reshape((-1, 28, 28, 1, )).astype(np.float32)
return dataset
### SVM Reformat
def svm_reformat(dataset):
dataset = dataset.reshape((len(dataset), -1)).astype(np.float32)
return dataset
### CNN Evaluate
def evaluate(X_data, y_data):
sess = tf.get_default_session()
accuracy = sess.run(accuracy_operation, feed_dict={x: X_data, y: y_data})
return accuracy
### CNN Le-Net5
def LeNet(x):
mu = 0
sigma = 0.1
W = tf.Variable(tf.truncated_normal([5, 5, 1, 6], mean = mu, stddev = sigma))
b = tf.Variable(tf.zeros(6))
layer1 = tf.nn.conv2d(x, W, strides=[1,1,1,1], padding="VALID")
layer1 = tf.nn.bias_add(layer1, b)
layer1 = tf.nn.relu(layer1)#conv2d(x, W, b, 1, 'VALID')
pool1 = tf.nn.max_pool(layer1, ksize=[1,2,2,1], strides=[1,2,2,1], padding="VALID")
W = tf.Variable(tf.truncated_normal([5, 5, 6, 16], mean = mu, stddev = sigma))
b = tf.Variable(tf.zeros(16))
layer2 = tf.nn.conv2d(pool1, W, strides=[1,1,1,1], padding="VALID")
layer2 = tf.nn.bias_add(layer2, b)
layer2 = tf.nn.relu(layer2)#conv2d(x, W, b, 1, 'VALID')
pool2 = tf.nn.max_pool(layer2, ksize=[1,2,2,1], strides=[1,2,2,1], padding="VALID")
fc = flatten(pool2)
W = tf.Variable(tf.truncated_normal([400, 120], mean = mu, stddev = sigma))
b = tf.Variable(tf.zeros(120))
fc1 = tf.nn.relu(tf.add(tf.matmul(fc, W), b))
W = tf.Variable(tf.truncated_normal([120, 84], mean = mu, stddev = sigma))
b = tf.Variable(tf.zeros(84))
fc2 = tf.nn.relu(tf.add(tf.matmul(fc1, W), b))
W = tf.Variable(tf.truncated_normal([84, 21], mean = mu, stddev = sigma))
b = tf.Variable(tf.zeros(21))
logits = tf.add(tf.matmul(fc2, W), b)
return logits
| 36.135211
| 87
| 0.647334
| 1,636
| 12,828
| 4.775061
| 0.076406
| 0.147465
| 0.122888
| 0.118792
| 0.898233
| 0.866231
| 0.851382
| 0.846006
| 0.824501
| 0.802099
| 0
| 0.018081
| 0.258419
| 12,828
| 354
| 88
| 36.237288
| 0.803112
| 0.069613
| 0
| 0.778689
| 0
| 0
| 0.001685
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040984
| false
| 0
| 0.057377
| 0
| 0.139344
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b7b06f14dff0ffc382cd7d294d18f018bcc5cae5
| 21,837
|
py
|
Python
|
openprocurement/relocation/api/tests/award_complaint.py
|
Leits/openprocurement.relocation.api
|
5b55ff0e748ebbde032dabcf75ad9da358e600aa
|
[
"Apache-2.0"
] | null | null | null |
openprocurement/relocation/api/tests/award_complaint.py
|
Leits/openprocurement.relocation.api
|
5b55ff0e748ebbde032dabcf75ad9da358e600aa
|
[
"Apache-2.0"
] | 15
|
2016-08-19T13:05:39.000Z
|
2017-02-14T10:38:52.000Z
|
openprocurement/relocation/api/tests/award_complaint.py
|
leits/openprocurement.relocation.api
|
5b55ff0e748ebbde032dabcf75ad9da358e600aa
|
[
"Apache-2.0"
] | 4
|
2016-08-19T11:47:04.000Z
|
2018-02-13T08:16:04.000Z
|
# -*- coding: utf-8 -*-
import unittest
from openprocurement.relocation.api.tests.base import OwnershipWebTest, OpenUAOwnershipWebTest, OpenEUOwnershipWebTest
from openprocurement.relocation.api.tests.base import (
test_tender_data,
test_ua_tender_data,
test_uadefense_tender_data,
test_eu_tender_data,
test_tender_reporting_data,
test_tender_negotiation_data,
test_tender_negotiation_quick_data,
test_transfer_data)
from openprocurement.relocation.api.tests.base import (
test_bid_data,
test_ua_bid_data,
test_uadefense_bid_data,
test_eu_bid_data,
test_organization)
class AwardComplaintOwnershipChangeTest(OwnershipWebTest):
initial_data = test_tender_data
initial_bid = test_bid_data
first_owner = 'broker'
second_owner = 'broker1'
test_owner = 'broker1t'
invalid_owner = 'broker3'
First_provider = 'broker'
second_provider = 'broker2'
invalid_provider = 'broker4'
initial_auth = ('Basic', (first_owner, ''))
def test_change_award_complaint_ownership(self):
self.set_tendering_status()
authorization = self.app.authorization
self.app.authorization = ('Basic', (self.First_provider, ''))
response = self.app.post_json('/tenders/{}/bids'.format(
self.tender_id), self.initial_bid)
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
first_bid = response.json['data']
self.app.authorization = ('Basic', (self.second_provider, ''))
response = self.app.post_json('/tenders/{}/bids'.format(
self.tender_id), self.initial_bid)
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
bid = response.json['data']
bid_token = response.json['access']['token']
# submit award
self.app.authorization = authorization
self.set_qualification_status()
self.app.authorization = ('Basic', ('token', ''))
response = self.app.post_json('/tenders/{}/awards'.format(
self.tender_id), {'data': {'suppliers': [test_organization], 'status': 'pending', 'bid_id': first_bid['id']}})
award = response.json['data']
self.award_id = award['id']
# submit complaint from broker
self.app.authorization = ('Basic', (self.second_provider, ''))
response = self.app.post_json('/tenders/{}/awards/{}/complaints?acc_token={}'.format(
self.tender_id, self.award_id, bid_token), {'data': {'title': 'complaint title', 'description': 'complaint description', 'author': test_organization, 'status': 'claim'}})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
complaint = response.json['data']
complaint_token = response.json['access']['token']
complaint_transfer = response.json['access']['transfer']
self.assertEqual(complaint['author']['name'], test_organization['name'])
self.assertIn('id', complaint)
self.assertIn(complaint['id'], response.headers['Location'])
# check complaint owner
tender_doc = self.db.get(self.tender_id)
self.assertEqual(tender_doc['awards'][0]['complaints'][0]['owner'], self.second_provider)
self.app.authorization = ('Basic', (self.second_provider, ''))
# create Transfer
response = self.app.post_json('/transfers', {"data": test_transfer_data})
self.assertEqual(response.status, '201 Created')
transfer = response.json['data']
transfer_tokens = response.json['access']
# try to change ownership with invalid transfer token
response = self.app.post_json('/tenders/{}/awards/{}/complaints/{}/ownership'.format(self.tender_id, self.award_id, complaint['id']),
{"data": {"id": transfer['id'], 'transfer': "fake_transfer_token"}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.json['errors'], [
{u'description': u'Invalid transfer', u'location': u'body', u'name': u'transfer'}
])
# change complaint ownership
response = self.app.post_json('/tenders/{}/awards/{}/complaints/{}/ownership'.format(self.tender_id, self.award_id, complaint['id']),
{"data": {"id": transfer['id'], 'transfer': complaint_transfer}})
self.assertEqual(response.status, '200 OK')
complaint_transfer = transfer_tokens['transfer']
# check complaint owner
tender_doc = self.db.get(self.tender_id)
self.assertEqual(tender_doc['awards'][0]['complaints'][0]['owner'], self.second_provider)
self.app.authorization = ('Basic', (self.invalid_provider, ''))
# create Transfer
response = self.app.post_json('/transfers', {"data": test_transfer_data})
self.assertEqual(response.status, '201 Created')
transfer2 = response.json['data']
# change complaint ownership
response = self.app.post_json('/tenders/{}/awards/{}/complaints/{}/ownership'.format(self.tender_id, self.award_id, complaint['id']),
{"data": {"id": transfer['id'], 'transfer': complaint_transfer}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.json['errors'], [
{u'description': u'Broker Accreditation level does not permit ownership change',
u'location': u'procurementMethodType', u'name': u'accreditation'}
])
# try to use already applied transfer
self.app.authorization = ('Basic', (self.second_provider, ''))
response = self.app.post_json('/tenders/{}/awards/{}/complaints?acc_token={}'.format(
self.tender_id, self.award_id, bid_token), {'data': {'title': 'complaint title', 'description': 'complaint description', 'author': test_organization, 'status': 'claim'}})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
complaint2 = response.json['data']
complaint2_transfer = response.json['access']['transfer']
self.assertNotEqual(complaint['id'], complaint2['id'])
self.app.authorization = ('Basic', (self.First_provider, ''))
response = self.app.post_json('/tenders/{}/awards/{}/complaints/{}/ownership'.format(self.tender_id, self.award_id, complaint2['id']),
{"data": {"id": transfer['id'], 'transfer': complaint2_transfer}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.json['errors'], [
{u'description': u'Transfer already used', u'location': u'body', u'name': u'transfer'}
])
class OpenUAAwardComplaintOwnershipChangeTest(OpenUAOwnershipWebTest, AwardComplaintOwnershipChangeTest):
tender_type = "aboveThresholdUA"
initial_data = test_ua_tender_data
initial_bid = test_ua_bid_data
first_owner = 'broker'
second_owner = 'broker3'
test_owner = 'broker3t'
invalid_owner = 'broker1'
First_provider = 'broker'
second_provider = 'broker4'
invalid_provider = 'broker2'
def test_change_award_complaint_ownership(self):
super(OpenUAAwardComplaintOwnershipChangeTest, self).test_change_award_complaint_ownership()
class OpenUADefenseAwardComplaintOwnershipChangeTest(OpenUAOwnershipWebTest, AwardComplaintOwnershipChangeTest):
tender_type = "aboveThresholdUA.defense"
initial_data = test_uadefense_tender_data
initial_bid = test_uadefense_bid_data
first_owner = 'broker'
second_owner = 'broker3'
test_owner = 'broker3t'
invalid_owner = 'broker1'
First_provider = 'broker'
second_provider = 'broker4'
invalid_provider = 'broker2'
def test_change_award_complaint_ownership(self):
super(OpenUADefenseAwardComplaintOwnershipChangeTest, self).test_change_award_complaint_ownership()
class OpenEUAwardComplaintOwnershipChangeTest(OpenEUOwnershipWebTest):
tender_type = "aboveThresholdEU"
initial_data = test_eu_tender_data
initial_bid = test_eu_bid_data
first_owner = 'broker'
second_owner = 'broker3'
test_owner = 'broker3t'
invalid_owner = 'broker1'
First_provider = 'broker'
second_provider = 'broker4'
invalid_provider = 'broker2'
def test_change_award_complaint_ownership(self):
self.set_tendering_status()
authorization = self.app.authorization
self.app.authorization = ('Basic', ('broker', ''))
# create bids
response = self.app.post_json('/tenders/{}/bids'.format(
self.tender_id), self.initial_bid)
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
first_bid = response.json['data']
self.app.authorization = ('Basic', ('broker4', ''))
response = self.app.post_json('/tenders/{}/bids'.format(
self.tender_id), self.initial_bid)
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
bid = response.json['data']
bid_token = response.json['access']['token']
# switch to active.pre-qualification
self.set_pre_qualification_status({"id": self.tender_id, 'status': 'active.tendering'})
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/tenders/{}'.format(
self.tender_id), {"data": {"id": self.tender_id}})
self.assertEqual(response.json['data']['status'], 'active.pre-qualification')
# qualify bids
response = self.app.get('/tenders/{}/qualifications'.format(self.tender_id))
self.app.authorization = authorization
for qualification in response.json['data']:
response = self.app.patch_json('/tenders/{}/qualifications/{}?acc_token={}'.format(
self.tender_id, qualification['id'], self.tender_token), {"data": {"status": "active", "qualified": True, "eligible": True}})
self.assertEqual(response.status, "200 OK")
# switch to active.pre-qualification.stand-still
response = self.app.patch_json('/tenders/{}?acc_token={}'.format(
self.tender_id, self.tender_token), {"data": {"status": 'active.pre-qualification.stand-still'}})
self.assertEqual(response.json['data']['status'], 'active.pre-qualification.stand-still')
# switch to active.auction
self.set_auction_status({"id": self.tender_id, 'status': 'active.pre-qualification.stand-still'})
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/tenders/{}'.format(
self.tender_id), {"data": {"id": self.tender_id}})
self.assertEqual(response.json['data']['status'], "active.auction")
# submit award
self.app.authorization = authorization
self.set_qualification_status()
self.app.authorization = ('Basic', ('token', ''))
response = self.app.post_json('/tenders/{}/awards'.format(
self.tender_id), {'data': {'suppliers': [test_organization], 'status': 'pending', 'bid_id': first_bid['id']}})
award = response.json['data']
self.award_id = award['id']
# submit complaint from broker
self.app.authorization = ('Basic', ('broker4', ''))
response = self.app.post_json('/tenders/{}/awards/{}/complaints?acc_token={}'.format(
self.tender_id, self.award_id, bid_token), {'data': {'title': 'complaint title', 'description': 'complaint description', 'author': test_organization, 'status': 'claim'}})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
complaint = response.json['data']
complaint_token = response.json['access']['token']
complaint_transfer = response.json['access']['transfer']
self.assertEqual(complaint['author']['name'], test_organization['name'])
self.assertIn('id', complaint)
self.assertIn(complaint['id'], response.headers['Location'])
# check complaint owner
tender_doc = self.db.get(self.tender_id)
self.assertEqual(tender_doc['awards'][0]['complaints'][0]['owner'], self.second_provider)
self.app.authorization = ('Basic', ('broker4', ''))
# create Transfer
response = self.app.post_json('/transfers', {"data": test_transfer_data})
self.assertEqual(response.status, '201 Created')
transfer = response.json['data']
transfer_tokens = response.json['access']
# try to change ownership with invalid transfer token
response = self.app.post_json('/tenders/{}/awards/{}/complaints/{}/ownership'.format(self.tender_id, self.award_id, complaint['id']),
{"data": {"id": transfer['id'], 'transfer': "fake_transfer_token"}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.json['errors'], [
{u'description': u'Invalid transfer', u'location': u'body', u'name': u'transfer'}
])
# change complaint ownership
response = self.app.post_json('/tenders/{}/awards/{}/complaints/{}/ownership'.format(self.tender_id, self.award_id, complaint['id']),
{"data": {"id": transfer['id'], 'transfer': complaint_transfer}})
self.assertEqual(response.status, '200 OK')
complaint_transfer = transfer_tokens['transfer']
# check complaint owner
tender_doc = self.db.get(self.tender_id)
self.assertEqual(tender_doc['awards'][0]['complaints'][0]['owner'], self.second_provider)
self.app.authorization = ('Basic', ('broker2', ''))
# create Transfer
response = self.app.post_json('/transfers', {"data": test_transfer_data})
self.assertEqual(response.status, '201 Created')
transfer2 = response.json['data']
# change complaint ownership
response = self.app.post_json('/tenders/{}/awards/{}/complaints/{}/ownership'.format(self.tender_id, self.award_id, complaint['id']),
{"data": {"id": transfer['id'], 'transfer': complaint_transfer}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.json['errors'], [
{u'description': u'Broker Accreditation level does not permit ownership change',
u'location': u'procurementMethodType', u'name': u'accreditation'}
])
# try to use already applied transfer
self.app.authorization = ('Basic', ('broker4', ''))
response = self.app.post_json('/tenders/{}/awards/{}/complaints?acc_token={}'.format(
self.tender_id, self.award_id, bid_token), {'data': {'title': 'complaint title', 'description': 'complaint description', 'author': test_organization, 'status': 'claim'}})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
complaint2 = response.json['data']
complaint2_transfer = response.json['access']['transfer']
self.assertNotEqual(complaint['id'], complaint2['id'])
self.app.authorization = ('Basic', ('broker', ''))
response = self.app.post_json('/tenders/{}/awards/{}/complaints/{}/ownership'.format(self.tender_id, self.award_id, complaint2['id']),
{"data": {"id": transfer['id'], 'transfer': complaint2_transfer}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.json['errors'], [
{u'description': u'Transfer already used', u'location': u'body', u'name': u'transfer'}
])
class NegotiationAwardComplaintOwnershipChangeTest(OpenUAOwnershipWebTest):
tender_type = "negotioation"
initial_data = test_tender_negotiation_data
def test_change_award_complaint_ownership(self):
# Create award
request_path = '/tenders/{}/awards?acc_token={}'.format(self.tender_id, self.tender_token)
response = self.app.post_json(request_path, {'data': {'suppliers': [test_organization], 'qualified': True,
'status': 'pending'}})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
award = response.json['data']
self.award_id = award['id']
# submit complaint from broker
self.app.authorization = ('Basic', ('broker4', ''))
response = self.app.post_json('/tenders/{}/awards/{}/complaints'.format(
self.tender_id, self.award_id), {'data': {'title': 'complaint title', 'description': 'complaint description', 'author': test_organization, 'status': 'claim'}})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
complaint = response.json['data']
complaint_token = response.json['access']['token']
complaint_transfer = response.json['access']['transfer']
self.assertEqual(complaint['author']['name'], test_organization['name'])
self.assertIn('id', complaint)
self.assertIn(complaint['id'], response.headers['Location'])
# check complaint owner
tender_doc = self.db.get(self.tender_id)
self.assertEqual(tender_doc['awards'][0]['complaints'][0]['owner'], 'broker4')
self.app.authorization = ('Basic', ('broker', ''))
# create Transfer
response = self.app.post_json('/transfers', {"data": test_transfer_data})
self.assertEqual(response.status, '201 Created')
transfer = response.json['data']
transfer_tokens = response.json['access']
# try to change ownership with invalid transfer token
response = self.app.post_json('/tenders/{}/awards/{}/complaints/{}/ownership'.format(self.tender_id, self.award_id, complaint['id']),
{"data": {"id": transfer['id'], 'transfer': "fake_transfer_token"}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.json['errors'], [
{u'description': u'Invalid transfer', u'location': u'body', u'name': u'transfer'}
])
# change complaint ownership
response = self.app.post_json('/tenders/{}/awards/{}/complaints/{}/ownership'.format(self.tender_id, self.award_id, complaint['id']),
{"data": {"id": transfer['id'], 'transfer': complaint_transfer}})
self.assertEqual(response.status, '200 OK')
complaint_transfer = transfer_tokens['transfer']
# check complaint owner
tender_doc = self.db.get(self.tender_id)
self.assertEqual(tender_doc['awards'][0]['complaints'][0]['owner'], 'broker')
self.app.authorization = ('Basic', ('broker2', ''))
# create Transfer
response = self.app.post_json('/transfers', {"data": test_transfer_data})
self.assertEqual(response.status, '201 Created')
transfer2 = response.json['data']
# change complaint ownership
response = self.app.post_json('/tenders/{}/awards/{}/complaints/{}/ownership'.format(self.tender_id, self.award_id, complaint['id']),
{"data": {"id": transfer['id'], 'transfer': complaint_transfer}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.json['errors'], [
{u'description': u'Broker Accreditation level does not permit ownership change',
u'location': u'procurementMethodType', u'name': u'accreditation'}
])
# try to use already applied transfer
self.app.authorization = ('Basic', ('broker', ''))
response = self.app.post_json('/tenders/{}/awards/{}/complaints'.format(
self.tender_id, self.award_id), {'data': {'title': 'complaint title', 'description': 'complaint description', 'author': test_organization, 'status': 'claim'}})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
complaint2 = response.json['data']
complaint2_transfer = response.json['access']['transfer']
self.assertNotEqual(complaint['id'], complaint2['id'])
self.app.authorization = ('Basic', ('broker', ''))
response = self.app.post_json('/tenders/{}/awards/{}/complaints/{}/ownership'.format(self.tender_id, self.award_id, complaint2['id']),
{"data": {"id": transfer['id'], 'transfer': complaint2_transfer}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.json['errors'], [
{u'description': u'Transfer already used', u'location': u'body', u'name': u'transfer'}
])
class NegotiationQuickAwardComplaintOwnershipChangeTest(NegotiationAwardComplaintOwnershipChangeTest):
tender_type = "negotioation.quick"
initial_data = test_tender_negotiation_quick_data
def test_change_award_complaint_ownership(self):
super(NegotiationQuickAwardComplaintOwnershipChangeTest, self).test_change_award_complaint_ownership()
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(AwardComplaintOwnershipChangeTest))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| 50.548611
| 182
| 0.650089
| 2,294
| 21,837
| 6.022232
| 0.065824
| 0.032429
| 0.088237
| 0.038219
| 0.897575
| 0.857329
| 0.83641
| 0.811364
| 0.797249
| 0.785885
| 0
| 0.010068
| 0.199478
| 21,837
| 431
| 183
| 50.665893
| 0.780219
| 0.042726
| 0
| 0.784375
| 0
| 0
| 0.212632
| 0.053961
| 0
| 0
| 0
| 0
| 0.221875
| 1
| 0.021875
| false
| 0
| 0.0125
| 0
| 0.19375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b7b429a6fa05f5101c410f6baf91f4fd5ea31cfa
| 6,194
|
py
|
Python
|
chaospy/descriptives/sensitivity/nataf.py
|
agonzs11/Polinomio-del-caos
|
5a415ece07e6535488174bac69a6c0fcc2ca272d
|
[
"MIT"
] | 1
|
2020-04-29T20:53:25.000Z
|
2020-04-29T20:53:25.000Z
|
chaospy/descriptives/sensitivity/nataf.py
|
agonzs11/Polinomio-del-caos
|
5a415ece07e6535488174bac69a6c0fcc2ca272d
|
[
"MIT"
] | null | null | null |
chaospy/descriptives/sensitivity/nataf.py
|
agonzs11/Polinomio-del-caos
|
5a415ece07e6535488174bac69a6c0fcc2ca272d
|
[
"MIT"
] | null | null | null |
"""Variance based decomposition on the Nataf Copula."""
import numpy
import chaospy
from ..conditional import E_cond
from ..variance import Var
def Sens_m_nataf(order, dist, samples, vals, **kws):
"""
Variance-based decomposition through the Nataf distribution.
Generates first order sensitivity indices.
Args:
order (int):
Polynomial order used ``orth_ttr``.
dist (chaospy.distributions.copula.nataf.Nataf):
Assumed to be Nataf with independent components
samples (numpy.ndarray):
Samples used for evaluation (typically generated from ``dist``.)
vals (numpy.ndarray):
Evaluations of the model for given samples.
Returns:
(numpy.ndarray):
Sensitivity indices with shape ``(len(dist),) + vals.shape[1:]``.
"""
assert dist.__class__.__name__ == "Copula"
trans = dist.prm["trans"]
assert trans.__class__.__name__ == "nataf"
vals = numpy.array(vals)
cov = trans.prm["C"]
cov = numpy.dot(cov, cov.T)
marginal = dist.prm["dist"]
dim = len(dist)
orth = chaospy.orthogonal.orth_ttr(order, marginal, sort="GR")
r = range(dim)
index = [1] + [0]*(dim-1)
nataf = chaospy.dist.Nataf(marginal, cov, r)
samples_ = marginal.inv( nataf.fwd( samples ) )
poly, coeffs = chaospy.collocation.fit_regression(
orth, samples_, vals, retall=1)
V = Var(poly, marginal, **kws)
out = numpy.zeros((dim,) + poly.shape)
out[0] = Var(E_cond(poly, index, marginal, **kws),
marginal, **kws)/(V+(V == 0))*(V != 0)
for i in range(1, dim):
r = r[1:] + r[:1]
index = index[-1:] + index[:-1]
nataf = chaospy.dist.Nataf(marginal, cov, r)
samples_ = marginal.inv( nataf.fwd( samples ) )
poly, coeffs = chaospy.collocation.fit_regression(
orth, samples_, vals, retall=1)
out[i] = Var(E_cond(poly, index, marginal, **kws),
marginal, **kws)/(V+(V == 0))*(V != 0)
return out
def Sens_t_nataf(order, dist, samples, vals, **kws):
"""
Variance-based decomposition thorugh the Nataf distribution.
Total order sensitivity indices
Args:
order (int):
Polynomial order used ``orth_ttr``.
dist (Copula):
Assumed to be Nataf with independent components
samples (numpy.ndarray):
Samples used for evaluation (typically generated from ``dist``.)
vals (numpy.ndarray):
Evaluations of the model for given samples.
Returns:
(numpy.ndarray) :
Sensitivity indices with shape ``(len(dist),)+vals.shape[1:]``.
"""
assert dist.__class__.__name__ == "Copula"
trans = dist.prm["trans"]
assert trans.__class__.__name__ == "nataf"
vals = nump.array(vals)
cov = trans.prm["C"]
cov = nump.dot(cov, cov.T)
marginal = dist.prm["dist"]
dim = len(dist)
orth = chaospy.orthogonal.orth_ttr(order, marginal, sort="GR")
r = range(dim)
index = [0] + [1]*(dim-1)
nataf = chaospy.dist.Nataf(marginal, cov, r)
samples_ = marginal.inv( nataf.fwd( samples ) )
poly, coeffs = chaospy.collocation.fit_regression(
orth, samples_, vals, retall=1)
V = Var(poly, marginal, **kws)
out = nump.zeros((dim,) + poly.shape)
out[0] = (V-Var(E_cond(poly, index, marginal, **kws),
marginal, **kws))/(V+(V == 0))**(V != 0)
for i in range(1, dim):
r = r[1:] + r[:1]
index = index[-1:] + index[:-1]
nataf = chaospy.dist.Nataf(marginal, cov, r)
samples_ = marginal.inv( nataf.fwd( samples ) )
poly, coeffs = chaospy.collocation.fit_regression(
orth, samples_, vals, retall=1)
out[i] = (V-Var(E_cond(poly, index, marginal, **kws),
marginal, **kws))/(V+(V == 0))*(V != 0)
return out
def Sens_nataf(order, dist, samples, vals, **kws):
"""
Variance-based decomposition through the Nataf distribution.
Main and total order sensitivity indices
Args:
order (int):
polynomial order used ``orth_ttr``.
dist (Copula):
Assumed to be Nataf with independent components
samples (numpy.ndarray):
Samples used for evaluation (typically generated from ``dist``.)
vals (numpy.ndarray):
Evaluations of the model for given samples.
Returns:
(numpy.ndarray):
Sensitivity indices with shape
``kjl``(2, len(dist),)+vals.shape[1:]``. First component is main
and second is total.
"""
assert dist.__class__.__name__ == "Copula"
trans = dist.prm["trans"]
assert trans.__class__.__name__ == "nataf"
vals = nump.array(vals)
cov = trans.prm["C"]
cov = nump.dot(cov, cov.T)
marginal = dist.prm["dist"]
dim = len(dist)
orth = chaospy.orthogonal.orth_ttr(order, marginal, sort="GR")
r = range(dim)
index0 = [0] + [1]*(dim-1)
index1 = [1] + [0]*(dim-1)
nataf = chaospy.dist.Nataf(marginal, cov, r)
samples_ = marginal.inv( nataf.fwd( samples ) )
poly, coeffs = chaospy.collocation.fit_regression(
orth, samples_, vals, retall=1)
V = Var(poly, marginal, **kws)
out = nump.zeros((2, dim,) + poly.shape)
out[0, 0] = (V - Var(E_cond(poly, index0, marginal, **kws),
marginal, **kws))/(V+(V == 0))**(V != 0)
out[1, 0] = Var(E_cond(poly, index1, marginal, **kws),
marginal, **kws)/(V+(V == 0))*(V != 0)
for i in range(1, dim):
r = r[1:] + r[:1]
index0 = index0[-1:] + index0[:-1]
nataf = chaospy.dist.Nataf(marginal, cov, r)
samples_ = marginal.inv( nataf.fwd( samples ) )
poly, coeffs = chaospy.collocation.fit_regression(
orth, samples_, vals, retall=1)
out[0, i] = (V-Var(E_cond(poly, index0, marginal, **kws),
marginal, **kws))/(V+(V == 0))*(V != 0)
out[1, i] = Var(E_cond(poly, index1, marginal, **kws),
marginal, **kws)/(V+(V == 0))*(V != 0)
return out[::-1]
| 29.778846
| 77
| 0.571359
| 776
| 6,194
| 4.449742
| 0.128866
| 0.060527
| 0.018535
| 0.027802
| 0.91225
| 0.902114
| 0.89082
| 0.883869
| 0.883869
| 0.868231
| 0
| 0.016086
| 0.277365
| 6,194
| 207
| 78
| 29.922705
| 0.755362
| 0.28011
| 0
| 0.737374
| 0
| 0
| 0.016258
| 0
| 0
| 0
| 0
| 0
| 0.060606
| 1
| 0.030303
| false
| 0
| 0.040404
| 0
| 0.10101
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b7bca371ccb1b3ed8ec1c4c66f80d05c4280d231
| 3,317
|
py
|
Python
|
tests/metrics/test_accuracy.py
|
JiaqiLiu/PyTorch-NLP
|
71d2ce1e8b8da5ab4e7732d1ebf971150986e6c8
|
[
"BSD-3-Clause"
] | 2,125
|
2018-03-17T23:31:03.000Z
|
2022-03-31T12:20:14.000Z
|
tests/metrics/test_accuracy.py
|
zhengmingzhang/PyTorch-NLP
|
34a98b5fa5d2e13761546b94770aed12388528f2
|
[
"BSD-3-Clause"
] | 109
|
2018-03-21T00:38:51.000Z
|
2021-12-24T08:34:55.000Z
|
tests/metrics/test_accuracy.py
|
zhengmingzhang/PyTorch-NLP
|
34a98b5fa5d2e13761546b94770aed12388528f2
|
[
"BSD-3-Clause"
] | 270
|
2018-03-18T16:47:01.000Z
|
2022-03-17T08:01:58.000Z
|
import torch
from torchnlp.metrics import get_accuracy
from torchnlp.metrics import get_token_accuracy
def test_get_accuracy():
targets = torch.LongTensor([1, 2, 3, 4])
outputs = torch.LongTensor([1, 2, 3, 3])
accuracy, _, _ = get_accuracy(targets, outputs)
assert accuracy == 0.75
def test_get_token_accuracy():
targets = torch.LongTensor([1, 2, 3, 4])
outputs = torch.LongTensor([1, 2, 3, 3])
accuracy, _, _ = get_token_accuracy(targets, outputs)
assert accuracy == 0.75
def test_get_accuracy_2d_2d():
targets = torch.LongTensor([[1], [2], [3], [4]])
outputs = torch.LongTensor([[1], [2], [3], [3]])
accuracy, _, _ = get_accuracy(targets, outputs)
assert accuracy == 0.75
def test_get_token_accuracy_2d_2d():
targets = torch.LongTensor([[1], [2], [3], [4]])
outputs = torch.LongTensor([[1], [2], [3], [3]])
accuracy, _, _ = get_token_accuracy(targets, outputs)
assert accuracy == 0.75
def test_get_token_accuracy_2d_2d_2d_2d():
targets = torch.LongTensor([[1, 1], [2, 2], [3, 3]])
outputs = torch.LongTensor([[1, 1], [2, 3], [4, 4]])
accuracy, _, _ = get_token_accuracy(targets, outputs, ignore_index=3)
assert accuracy == 0.75
def test_get_accuracy_2d_3d():
targets = torch.LongTensor([[1, 1], [2, 2], [3, 3], [4, 4]])
outputs = torch.LongTensor([[[1, 1], [1, 1]], [[2, 2], [2, 2]], [[3, 3], [3, 3]],
[[3, 3], [3, 3]]])
accuracy, _, _ = get_accuracy(targets, outputs)
assert accuracy == 0.75
def test_get_token_accuracy_2d_3d():
targets = torch.LongTensor([[1, 1], [2, 2], [3, 3], [4, 4]])
outputs = torch.LongTensor([[[1, 1], [1, 1]], [[2, 2], [2, 2]], [[3, 3], [3, 3]],
[[3, 3], [3, 3]]])
accuracy, _, _ = get_token_accuracy(targets, outputs)
assert accuracy == 0.75
def test_get_accuracy_2d_3d_top_k():
targets = torch.LongTensor([[1, 1], [2, 2], [3, 3], [4, 4]])
outputs = torch.LongTensor([[[1, 1], [1, 1]], [[2, 2], [2, 2]], [[3, 3], [3, 3]],
[[3, 3], [4, 4]]])
accuracy, _, _ = get_accuracy(targets, outputs, k=3)
assert accuracy == 1.0
def test_get_accuracy_1d_2d():
targets = torch.LongTensor([1, 2, 3, 4])
outputs = torch.LongTensor([[1], [2], [3], [3]])
accuracy, _, _ = get_accuracy(targets, outputs)
assert accuracy == 0.75
def test_get_token_accuracy_1d_2d():
targets = torch.LongTensor([1, 2, 3, 4])
outputs = torch.LongTensor([[1], [2], [3], [3]])
accuracy, _, _ = get_token_accuracy(targets, outputs)
assert accuracy == 0.75
def test_get_accuracy_1d_2d_top_k():
targets = torch.LongTensor([1, 2, 3, 4])
outputs = torch.LongTensor([[1, 1], [2, 2], [3, 3], [3, 4]])
accuracy, _, _ = get_accuracy(targets, outputs, k=3)
assert accuracy == 1.0
def test_get_accuracy_ignore_index():
targets = torch.LongTensor([1, 2, 3, 4])
outputs = torch.LongTensor([1, 2, 3, 3])
accuracy, _, _ = get_accuracy(targets, outputs, ignore_index=4)
assert accuracy == 1.0
def test_get_token_accuracy_ignore_index():
targets = torch.LongTensor([1, 2, 3, 4])
outputs = torch.LongTensor([1, 2, 3, 3])
accuracy, _, _ = get_token_accuracy(targets, outputs, ignore_index=4)
assert accuracy == 1.0
| 33.505051
| 85
| 0.597226
| 475
| 3,317
| 3.936842
| 0.056842
| 0.035294
| 0.22246
| 0.154545
| 0.977005
| 0.941711
| 0.919786
| 0.914439
| 0.885562
| 0.870588
| 0
| 0.084518
| 0.215255
| 3,317
| 98
| 86
| 33.846939
| 0.633884
| 0
| 0
| 0.676056
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.183099
| 1
| 0.183099
| false
| 0
| 0.042254
| 0
| 0.225352
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b7cd600feb9972de09a3cd4528bb68a95680eebe
| 10,058
|
py
|
Python
|
tools/gcs-bucket-mover/tests/test_configuration.py
|
ruchirjain86/professional-services
|
739ac0f5ffc8237f750804fa9f0f14d4d918a0fa
|
[
"Apache-2.0"
] | 2,116
|
2017-05-18T19:33:05.000Z
|
2022-03-31T13:34:48.000Z
|
tools/gcs-bucket-mover/tests/test_configuration.py
|
ruchirjain86/professional-services
|
739ac0f5ffc8237f750804fa9f0f14d4d918a0fa
|
[
"Apache-2.0"
] | 548
|
2017-05-20T05:05:35.000Z
|
2022-03-28T16:38:12.000Z
|
tools/gcs-bucket-mover/tests/test_configuration.py
|
ruchirjain86/professional-services
|
739ac0f5ffc8237f750804fa9f0f14d4d918a0fa
|
[
"Apache-2.0"
] | 1,095
|
2017-05-19T00:02:36.000Z
|
2022-03-31T05:21:39.000Z
|
# Copyright 2018 Google LLC. All rights reserved. Licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and limitations under
# the License.
#
# Any software provided by Google hereunder is distributed "AS IS", WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, and is not intended for production use.
"""Tests for the configuration.py file"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import unittest
import mock
from google.auth import environment_vars
from gcs_bucket_mover import configuration
from tests import common
@mock.patch('google.cloud.storage.Client', mock.MagicMock())
@mock.patch('google.cloud.logging.Client', mock.MagicMock())
@mock.patch(
'google.oauth2.service_account.Credentials.from_service_account_file')
class TestConfiguration(unittest.TestCase):
"""Tests for the logic in the Configuration class."""
def setUp(self):
self.parsed_args = common.get_mock_args()
def test_default_constructor(self, mock_from_service_account_file):
"""Tests the default object is successfully created."""
config = configuration.Configuration.from_conf(self.parsed_args)
# Test that the properties are set to the parsed_args values
self.assertEqual(self.parsed_args.source_project, config.source_project)
self.assertEqual(self.parsed_args.target_project, config.target_project)
self.assertEqual(self.parsed_args.bucket_name, config.bucket_name)
self.assertEqual(self.parsed_args.bucket_name,
config.target_bucket_name)
self.assertEqual(self.parsed_args.bucket_name + '-temp',
config.temp_bucket_name)
self.assertFalse(config.is_rename)
self.assertFalse(config.disable_bucket_lock)
self.assertEqual(self.parsed_args.lock_file_name, config.lock_file_name)
calls = [
mock.call(self.parsed_args.gcp_source_project_service_account_key),
mock.call(self.parsed_args.gcp_target_project_service_account_key)
]
mock_from_service_account_file.assert_has_calls(calls)
def test_temp_bucket_name(self, mock_from_service_account_file):
"""Test that a specific temp_bucket_name is correctly set."""
self.parsed_args.temp_bucket_name = 'temp'
config = configuration.Configuration.from_conf(self.parsed_args)
self.assertEqual(self.parsed_args.source_project, config.source_project)
self.assertEqual(self.parsed_args.target_project, config.target_project)
self.assertEqual(self.parsed_args.bucket_name, config.bucket_name)
self.assertEqual(self.parsed_args.bucket_name,
config.target_bucket_name)
self.assertEqual('temp', config.temp_bucket_name)
self.assertFalse(config.is_rename)
self.assertFalse(config.disable_bucket_lock)
self.assertEqual(self.parsed_args.lock_file_name, config.lock_file_name)
calls = [
mock.call(self.parsed_args.gcp_source_project_service_account_key),
mock.call(self.parsed_args.gcp_target_project_service_account_key)
]
mock_from_service_account_file.assert_has_calls(calls)
def test_target_bucket_name(self, mock_from_service_account_file):
"""Test that the target_bucket_name is correctly set."""
self.parsed_args.rename_bucket_to = 'target'
config = configuration.Configuration.from_conf(self.parsed_args)
self.assertEqual(self.parsed_args.source_project, config.source_project)
self.assertEqual(self.parsed_args.target_project, config.target_project)
self.assertEqual(self.parsed_args.bucket_name, config.bucket_name)
self.assertEqual('target', config.target_bucket_name)
self.assertEqual(self.parsed_args.bucket_name + '-temp',
config.temp_bucket_name)
self.assertTrue(config.is_rename)
self.assertFalse(config.disable_bucket_lock)
self.assertEqual(self.parsed_args.lock_file_name, config.lock_file_name)
calls = [
mock.call(self.parsed_args.gcp_source_project_service_account_key),
mock.call(self.parsed_args.gcp_target_project_service_account_key)
]
mock_from_service_account_file.assert_has_calls(calls)
@mock.patch.dict(os.environ, {environment_vars.CREDENTIALS: 'env_key_path'})
def test_source_key_is_none(self, mock_from_service_account_file):
"""Test that the source project credentials are set from the environment
when the config value is not supplied variable."""
self.parsed_args.gcp_source_project_service_account_key = None
config = configuration.Configuration.from_conf(self.parsed_args)
self.assertEqual(self.parsed_args.source_project, config.source_project)
self.assertEqual(self.parsed_args.target_project, config.target_project)
self.assertEqual(self.parsed_args.bucket_name, config.bucket_name)
self.assertEqual(self.parsed_args.bucket_name,
config.target_bucket_name)
self.assertEqual(self.parsed_args.bucket_name + '-temp',
config.temp_bucket_name)
self.assertFalse(config.is_rename)
self.assertFalse(config.disable_bucket_lock)
self.assertEqual(self.parsed_args.lock_file_name, config.lock_file_name)
calls = [
mock.call('env_key_path'),
mock.call(self.parsed_args.gcp_target_project_service_account_key)
]
mock_from_service_account_file.assert_has_calls(calls)
@mock.patch.dict(os.environ, {environment_vars.CREDENTIALS: 'env_key_path'})
def test_source_key_is_none_string(self, mock_from_service_account_file):
"""Test that the source project credentials are set from the environment when
the config value provided is empty (and parsed as 'None' by the parser)."""
self.parsed_args.gcp_source_project_service_account_key = 'None'
config = configuration.Configuration.from_conf(self.parsed_args)
self.assertEqual(self.parsed_args.source_project, config.source_project)
self.assertEqual(self.parsed_args.target_project, config.target_project)
self.assertEqual(self.parsed_args.bucket_name, config.bucket_name)
self.assertEqual(self.parsed_args.bucket_name,
config.target_bucket_name)
self.assertEqual(self.parsed_args.bucket_name + '-temp',
config.temp_bucket_name)
self.assertFalse(config.is_rename)
self.assertFalse(config.disable_bucket_lock)
self.assertEqual(self.parsed_args.lock_file_name, config.lock_file_name)
calls = [
mock.call('env_key_path'),
mock.call(self.parsed_args.gcp_target_project_service_account_key)
]
mock_from_service_account_file.assert_has_calls(calls)
@mock.patch.dict(os.environ, {environment_vars.CREDENTIALS: 'env_key_path'})
def test_target_key_is_none(self, mock_from_service_account_file):
"""Test that the target project credentials are set from the environment
when the config value is not supplied variable."""
self.parsed_args.gcp_target_project_service_account_key = None
config = configuration.Configuration.from_conf(self.parsed_args)
self.assertEqual(self.parsed_args.source_project, config.source_project)
self.assertEqual(self.parsed_args.target_project, config.target_project)
self.assertEqual(self.parsed_args.bucket_name, config.bucket_name)
self.assertEqual(self.parsed_args.bucket_name,
config.target_bucket_name)
self.assertEqual(self.parsed_args.bucket_name + '-temp',
config.temp_bucket_name)
self.assertFalse(config.is_rename)
self.assertFalse(config.disable_bucket_lock)
self.assertEqual(self.parsed_args.lock_file_name, config.lock_file_name)
calls = [
mock.call(self.parsed_args.gcp_source_project_service_account_key),
mock.call('env_key_path')
]
mock_from_service_account_file.assert_has_calls(calls)
@mock.patch.dict(os.environ, {environment_vars.CREDENTIALS: 'env_key_path'})
def test_source_key_is_none_string(self, mock_from_service_account_file):
"""Test that the target project credentials are set from the environment when
the config value provided is empty (and parsed as 'None' by the parser)."""
self.parsed_args.gcp_target_project_service_account_key = 'None'
config = configuration.Configuration.from_conf(self.parsed_args)
self.assertEqual(self.parsed_args.source_project, config.source_project)
self.assertEqual(self.parsed_args.target_project, config.target_project)
self.assertEqual(self.parsed_args.bucket_name, config.bucket_name)
self.assertEqual(self.parsed_args.bucket_name,
config.target_bucket_name)
self.assertEqual(self.parsed_args.bucket_name + '-temp',
config.temp_bucket_name)
self.assertFalse(config.is_rename)
self.assertFalse(config.disable_bucket_lock)
self.assertEqual(self.parsed_args.lock_file_name, config.lock_file_name)
calls = [
mock.call(self.parsed_args.gcp_source_project_service_account_key),
mock.call('env_key_path')
]
mock_from_service_account_file.assert_has_calls(calls)
if __name__ == '__main__':
unittest.main()
| 51.579487
| 99
| 0.728873
| 1,297
| 10,058
| 5.322282
| 0.119507
| 0.094162
| 0.129799
| 0.144865
| 0.830219
| 0.830219
| 0.816022
| 0.805012
| 0.78618
| 0.78618
| 0
| 0.001105
| 0.190197
| 10,058
| 194
| 100
| 51.845361
| 0.846409
| 0.154106
| 0
| 0.731034
| 0
| 0
| 0.033638
| 0.014383
| 0
| 0
| 0
| 0
| 0.434483
| 1
| 0.055172
| false
| 0
| 0.062069
| 0
| 0.124138
| 0.006897
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.